mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 19:46:02 +01:00
Merge branch 'master' into master
This commit is contained in:
commit
494c49c488
484 changed files with 11131 additions and 6194 deletions
14
.coderabbit.yaml
Normal file
14
.coderabbit.yaml
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Disable CodeRabbit auto-review to prevent verbose comments on PRs.
|
||||
# When enabled: false, CodeRabbit won't attempt reviews and won't post
|
||||
# "Review skipped" or other automated comments.
|
||||
reviews:
|
||||
auto_review:
|
||||
enabled: false
|
||||
review_status: false
|
||||
high_level_summary: false
|
||||
poem: false
|
||||
sequence_diagrams: false
|
||||
changed_files_summary: false
|
||||
tools:
|
||||
github-checks:
|
||||
enabled: false
|
||||
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
|
@ -15,6 +15,10 @@ so you understand the process and the expectations.
|
|||
- volunteering contributions effectively
|
||||
- how to get help and our review process.
|
||||
|
||||
PR stuck in review? We have two Nix team meetings per week online that are open for everyone in a jitsi conference:
|
||||
|
||||
- https://calendar.google.com/calendar/u/0/embed?src=b9o52fobqjak8oq8lfkhg3t0qg@group.calendar.google.com
|
||||
|
||||
-->
|
||||
|
||||
## Motivation
|
||||
|
|
|
|||
83
.github/actions/install-nix-action/action.yaml
vendored
83
.github/actions/install-nix-action/action.yaml
vendored
|
|
@ -4,15 +4,29 @@ inputs:
|
|||
dogfood:
|
||||
description: "Whether to use Nix installed from the latest artifact from master branch"
|
||||
required: true # Be explicit about the fact that we are using unreleased artifacts
|
||||
experimental-installer:
|
||||
description: "Whether to use the experimental installer to install Nix"
|
||||
default: false
|
||||
experimental-installer-version:
|
||||
description: "Version of the experimental installer to use. If `latest`, the newest artifact from the default branch is used."
|
||||
# TODO: This should probably be pinned to a release after https://github.com/NixOS/experimental-nix-installer/pull/49 lands in one
|
||||
default: "latest"
|
||||
extra_nix_config:
|
||||
description: "Gets appended to `/etc/nix/nix.conf` if passed."
|
||||
install_url:
|
||||
description: "URL of the Nix installer"
|
||||
required: false
|
||||
default: "https://releases.nixos.org/nix/nix-2.30.2/install"
|
||||
default: "https://releases.nixos.org/nix/nix-2.32.1/install"
|
||||
tarball_url:
|
||||
description: "URL of the Nix tarball to use with the experimental installer"
|
||||
required: false
|
||||
github_token:
|
||||
description: "Github token"
|
||||
required: true
|
||||
use_cache:
|
||||
description: "Whether to setup magic-nix-cache"
|
||||
default: true
|
||||
required: false
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
|
|
@ -37,14 +51,81 @@ runs:
|
|||
|
||||
gh run download "$RUN_ID" --repo "$DOGFOOD_REPO" -n "$INSTALLER_ARTIFACT" -D "$INSTALLER_DOWNLOAD_DIR"
|
||||
echo "installer-path=file://$INSTALLER_DOWNLOAD_DIR" >> "$GITHUB_OUTPUT"
|
||||
TARBALL_PATH="$(find "$INSTALLER_DOWNLOAD_DIR" -name 'nix*.tar.xz' -print | head -n 1)"
|
||||
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "::notice ::Dogfooding Nix installer from master (https://github.com/$DOGFOOD_REPO/actions/runs/$RUN_ID)"
|
||||
env:
|
||||
GH_TOKEN: ${{ inputs.github_token }}
|
||||
DOGFOOD_REPO: "NixOS/nix"
|
||||
- name: "Gather system info for experimental installer"
|
||||
shell: bash
|
||||
if: ${{ inputs.experimental-installer == 'true' }}
|
||||
run: |
|
||||
echo "::notice Using experimental installer from $EXPERIMENTAL_INSTALLER_REPO (https://github.com/$EXPERIMENTAL_INSTALLER_REPO)"
|
||||
|
||||
if [ "$RUNNER_OS" == "Linux" ]; then
|
||||
EXPERIMENTAL_INSTALLER_SYSTEM="linux"
|
||||
echo "EXPERIMENTAL_INSTALLER_SYSTEM=$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV"
|
||||
elif [ "$RUNNER_OS" == "macOS" ]; then
|
||||
EXPERIMENTAL_INSTALLER_SYSTEM="darwin"
|
||||
echo "EXPERIMENTAL_INSTALLER_SYSTEM=$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "::error ::Unsupported RUNNER_OS: $RUNNER_OS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$RUNNER_ARCH" == "X64" ]; then
|
||||
EXPERIMENTAL_INSTALLER_ARCH=x86_64
|
||||
echo "EXPERIMENTAL_INSTALLER_ARCH=$EXPERIMENTAL_INSTALLER_ARCH" >> "$GITHUB_ENV"
|
||||
elif [ "$RUNNER_ARCH" == "ARM64" ]; then
|
||||
EXPERIMENTAL_INSTALLER_ARCH=aarch64
|
||||
echo "EXPERIMENTAL_INSTALLER_ARCH=$EXPERIMENTAL_INSTALLER_ARCH" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "::error ::Unsupported RUNNER_ARCH: $RUNNER_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "EXPERIMENTAL_INSTALLER_ARTIFACT=nix-installer-$EXPERIMENTAL_INSTALLER_ARCH-$EXPERIMENTAL_INSTALLER_SYSTEM" >> "$GITHUB_ENV"
|
||||
env:
|
||||
EXPERIMENTAL_INSTALLER_REPO: "NixOS/experimental-nix-installer"
|
||||
- name: "Download latest experimental installer"
|
||||
shell: bash
|
||||
id: download-latest-experimental-installer
|
||||
if: ${{ inputs.experimental-installer == 'true' && inputs.experimental-installer-version == 'latest' }}
|
||||
run: |
|
||||
RUN_ID=$(gh run list --repo "$EXPERIMENTAL_INSTALLER_REPO" --workflow ci.yml --branch main --status success --json databaseId --jq ".[0].databaseId")
|
||||
|
||||
EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR="$GITHUB_WORKSPACE/$EXPERIMENTAL_INSTALLER_ARTIFACT"
|
||||
mkdir -p "$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR"
|
||||
|
||||
gh run download "$RUN_ID" --repo "$EXPERIMENTAL_INSTALLER_REPO" -n "$EXPERIMENTAL_INSTALLER_ARTIFACT" -D "$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR"
|
||||
# Executable permissions are lost in artifacts
|
||||
find $EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR -type f -exec chmod +x {} +
|
||||
echo "installer-path=$EXPERIMENTAL_INSTALLER_DOWNLOAD_DIR" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: ${{ inputs.github_token }}
|
||||
EXPERIMENTAL_INSTALLER_REPO: "NixOS/experimental-nix-installer"
|
||||
- uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1
|
||||
if: ${{ inputs.experimental-installer != 'true' }}
|
||||
with:
|
||||
# Ternary operator in GHA: https://www.github.com/actions/runner/issues/409#issuecomment-752775072
|
||||
install_url: ${{ inputs.dogfood == 'true' && format('{0}/install', steps.download-nix-installer.outputs.installer-path) || inputs.install_url }}
|
||||
install_options: ${{ inputs.dogfood == 'true' && format('--tarball-url-prefix {0}', steps.download-nix-installer.outputs.installer-path) || '' }}
|
||||
extra_nix_config: ${{ inputs.extra_nix_config }}
|
||||
- uses: DeterminateSystems/nix-installer-action@786fff0690178f1234e4e1fe9b536e94f5433196 # v20
|
||||
if: ${{ inputs.experimental-installer == 'true' }}
|
||||
with:
|
||||
diagnostic-endpoint: ""
|
||||
# TODO: It'd be nice to use `artifacts.nixos.org` for both of these, maybe through an `/experimental-installer/latest` endpoint? or `/commit/<hash>`?
|
||||
local-root: ${{ inputs.experimental-installer-version == 'latest' && steps.download-latest-experimental-installer.outputs.installer-path || '' }}
|
||||
source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }}
|
||||
nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }}
|
||||
extra-conf: ${{ inputs.extra_nix_config }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13
|
||||
if: ${{ inputs.use_cache == 'true' }}
|
||||
with:
|
||||
diagnostic-endpoint: ''
|
||||
use-flakehub: false
|
||||
use-gha-cache: true
|
||||
source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main
|
||||
|
|
|
|||
37
.github/workflows/backport.yml
vendored
Normal file
37
.github/workflows/backport.yml
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
name: Backport
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed, labeled]
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport Pull Request
|
||||
permissions:
|
||||
# for korthout/backport-action
|
||||
contents: write
|
||||
pull-requests: write
|
||||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Generate GitHub App token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.CI_APP_ID }}
|
||||
private-key: ${{ secrets.CI_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
# required to find all branches
|
||||
fetch-depth: 0
|
||||
- name: Create backport PRs
|
||||
uses: korthout/backport-action@d07416681cab29bf2661702f925f020aaa962997 # v3.4.1
|
||||
id: backport
|
||||
with:
|
||||
# Config README: https://github.com/korthout/backport-action#backport-action
|
||||
github_token: ${{ steps.generate-token.outputs.token }}
|
||||
github_workspace: ${{ github.workspace }}
|
||||
auto_merge_enabled: true
|
||||
pull_description: |-
|
||||
Automatic backport to `${target_branch}`, triggered by a label in #${pull_number}.
|
||||
77
.github/workflows/ci.yml
vendored
77
.github/workflows/ci.yml
vendored
|
|
@ -4,6 +4,8 @@ on:
|
|||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dogfood:
|
||||
|
|
@ -12,6 +14,10 @@ on:
|
|||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
|
|
@ -27,6 +33,7 @@ jobs:
|
|||
extra_nix_config:
|
||||
experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
use_cache: false
|
||||
- run: nix flake show --all-systems --json
|
||||
|
||||
pre-commit-checks:
|
||||
|
|
@ -39,7 +46,6 @@ jobs:
|
|||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||
extra_nix_config: experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: ./ci/gha/tests/pre-commit-checks
|
||||
|
||||
basic-checks:
|
||||
|
|
@ -90,7 +96,6 @@ jobs:
|
|||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||
# The sandbox would otherwise be disabled by default on Darwin
|
||||
extra_nix_config: "sandbox = true"
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
|
||||
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
|
||||
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
|
|
@ -100,6 +105,12 @@ jobs:
|
|||
nix build --file ci/gha/tests/wrapper.nix componentTests -L \
|
||||
--arg withInstrumentation ${{ matrix.instrumented }} \
|
||||
--argstr stdenv "${{ matrix.stdenv }}"
|
||||
- name: Run VM tests
|
||||
run: |
|
||||
nix build --file ci/gha/tests/wrapper.nix vmTests -L \
|
||||
--arg withInstrumentation ${{ matrix.instrumented }} \
|
||||
--argstr stdenv "${{ matrix.stdenv }}"
|
||||
if: ${{ matrix.os == 'linux' }}
|
||||
- name: Run flake checks and prepare the installer tarball
|
||||
run: |
|
||||
ci/gha/tests/build-checks
|
||||
|
|
@ -135,9 +146,19 @@ jobs:
|
|||
- scenario: on ubuntu
|
||||
runs-on: ubuntu-24.04
|
||||
os: linux
|
||||
experimental-installer: false
|
||||
- scenario: on macos
|
||||
runs-on: macos-14
|
||||
os: darwin
|
||||
experimental-installer: false
|
||||
- scenario: on ubuntu (experimental)
|
||||
runs-on: ubuntu-24.04
|
||||
os: linux
|
||||
experimental-installer: true
|
||||
- scenario: on macos (experimental)
|
||||
runs-on: macos-14
|
||||
os: darwin
|
||||
experimental-installer: true
|
||||
name: installer test ${{ matrix.scenario }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
steps:
|
||||
|
|
@ -149,11 +170,22 @@ jobs:
|
|||
path: out
|
||||
- name: Looking up the installer tarball URL
|
||||
id: installer-tarball-url
|
||||
run: echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
|
||||
- uses: cachix/install-nix-action@v31
|
||||
run: |
|
||||
echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT"
|
||||
TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)"
|
||||
echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT"
|
||||
- uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1
|
||||
if: ${{ !matrix.experimental-installer }}
|
||||
with:
|
||||
install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }}
|
||||
install_options: ${{ format('--tarball-url-prefix {0}', steps.installer-tarball-url.outputs.installer-url) }}
|
||||
- uses: ./.github/actions/install-nix-action
|
||||
if: ${{ matrix.experimental-installer }}
|
||||
with:
|
||||
dogfood: false
|
||||
experimental-installer: true
|
||||
tarball_url: ${{ steps.installer-tarball-url.outputs.tarball-path }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: sudo apt install fish zsh
|
||||
if: matrix.os == 'linux'
|
||||
- run: brew install fish
|
||||
|
|
@ -185,7 +217,7 @@ jobs:
|
|||
echo "docker=${{ env._DOCKER_SECRETS != '' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
docker_push_image:
|
||||
needs: [tests, vm_tests, check_secrets]
|
||||
needs: [tests, check_secrets]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
|
@ -198,12 +230,13 @@ jobs:
|
|||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v31
|
||||
- uses: ./.github/actions/install-nix-action
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
||||
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
|
||||
dogfood: false
|
||||
extra_nix_config: |
|
||||
experimental-features = flakes nix-command
|
||||
- run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> $GITHUB_ENV
|
||||
- run: nix build .#dockerImage -L
|
||||
- run: docker load -i ./result/image.tar.gz
|
||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION
|
||||
- run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master
|
||||
|
|
@ -238,28 +271,8 @@ jobs:
|
|||
docker tag nix:$NIX_VERSION $IMAGE_ID:master
|
||||
docker push $IMAGE_ID:master
|
||||
|
||||
vm_tests:
|
||||
needs: basic-checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./.github/actions/install-nix-action
|
||||
with:
|
||||
dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }}
|
||||
extra_nix_config:
|
||||
experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: |
|
||||
nix build -L \
|
||||
.#hydraJobs.tests.functional_user \
|
||||
.#hydraJobs.tests.githubFlakes \
|
||||
.#hydraJobs.tests.nix-docker \
|
||||
.#hydraJobs.tests.tarballFlakes \
|
||||
;
|
||||
|
||||
flake_regressions:
|
||||
needs: vm_tests
|
||||
needs: tests
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout nix
|
||||
|
|
@ -280,7 +293,6 @@ jobs:
|
|||
extra_nix_config:
|
||||
experimental-features = nix-command flakes
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh
|
||||
|
||||
profile_build:
|
||||
|
|
@ -301,7 +313,6 @@ jobs:
|
|||
extra_nix_config: |
|
||||
experimental-features = flakes nix-command ca-derivations impure-derivations
|
||||
max-jobs = 1
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: |
|
||||
nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md
|
||||
cat build-time-report.md >> $GITHUB_STEP_SUMMARY
|
||||
|
|
|
|||
174
.mergify.yml
174
.mergify.yml
|
|
@ -1,174 +0,0 @@
|
|||
queue_rules:
|
||||
- name: default
|
||||
# all required tests need to go here
|
||||
merge_conditions:
|
||||
- check-success=tests on macos
|
||||
- check-success=tests on ubuntu
|
||||
- check-success=installer test on macos
|
||||
- check-success=installer test on ubuntu
|
||||
- check-success=vm_tests
|
||||
batch_size: 5
|
||||
|
||||
pull_request_rules:
|
||||
- name: merge using the merge queue
|
||||
conditions:
|
||||
- base~=master|.+-maintenance
|
||||
- label~=merge-queue|dependencies
|
||||
actions:
|
||||
queue: {}
|
||||
|
||||
# The rules below will first create backport pull requests and put those in a merge queue.
|
||||
|
||||
- name: backport patches to 2.18
|
||||
conditions:
|
||||
- label=backport 2.18-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.18-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.19
|
||||
conditions:
|
||||
- label=backport 2.19-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.19-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.20
|
||||
conditions:
|
||||
- label=backport 2.20-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.20-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.21
|
||||
conditions:
|
||||
- label=backport 2.21-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.21-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.22
|
||||
conditions:
|
||||
- label=backport 2.22-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.22-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.23
|
||||
conditions:
|
||||
- label=backport 2.23-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- 2.23-maintenance
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.24
|
||||
conditions:
|
||||
- label=backport 2.24-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.24-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.25
|
||||
conditions:
|
||||
- label=backport 2.25-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.25-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.26
|
||||
conditions:
|
||||
- label=backport 2.26-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.26-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.27
|
||||
conditions:
|
||||
- label=backport 2.27-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.27-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.28
|
||||
conditions:
|
||||
- label=backport 2.28-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.28-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.29
|
||||
conditions:
|
||||
- label=backport 2.29-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.29-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.30
|
||||
conditions:
|
||||
- label=backport 2.30-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.30-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.31
|
||||
conditions:
|
||||
- label=backport 2.31-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.31-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
2
.version
2
.version
|
|
@ -1 +1 @@
|
|||
2.32.0
|
||||
2.33.0
|
||||
|
|
|
|||
|
|
@ -21,16 +21,6 @@ let
|
|||
packages' = nixFlake.packages.${system};
|
||||
stdenv = (getStdenv pkgs);
|
||||
|
||||
enableSanitizersLayer = finalAttrs: prevAttrs: {
|
||||
mesonFlags =
|
||||
(prevAttrs.mesonFlags or [ ])
|
||||
++ [ (lib.mesonOption "b_sanitize" "address,undefined") ]
|
||||
++ (lib.optionals stdenv.cc.isClang [
|
||||
# https://www.github.com/mesonbuild/meson/issues/764
|
||||
(lib.mesonBool "b_lundef" false)
|
||||
]);
|
||||
};
|
||||
|
||||
collectCoverageLayer = finalAttrs: prevAttrs: {
|
||||
env =
|
||||
let
|
||||
|
|
@ -53,14 +43,15 @@ let
|
|||
'';
|
||||
};
|
||||
|
||||
componentOverrides =
|
||||
(lib.optional withSanitizers enableSanitizersLayer)
|
||||
++ (lib.optional withCoverage collectCoverageLayer);
|
||||
componentOverrides = (lib.optional withCoverage collectCoverageLayer);
|
||||
in
|
||||
|
||||
rec {
|
||||
nixComponentsInstrumented = nixComponents.overrideScope (
|
||||
final: prev: {
|
||||
withASan = withSanitizers;
|
||||
withUBSan = withSanitizers;
|
||||
|
||||
nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; };
|
||||
# Boehm is incompatible with ASAN.
|
||||
nix-expr = prev.nix-expr.override { enableGC = !withSanitizers; };
|
||||
|
|
@ -71,6 +62,14 @@ rec {
|
|||
}
|
||||
);
|
||||
|
||||
# Import NixOS tests using the instrumented components
|
||||
nixosTests = import ../../../tests/nixos {
|
||||
inherit lib pkgs;
|
||||
nixComponents = nixComponentsInstrumented;
|
||||
nixpkgs = nixFlake.inputs.nixpkgs;
|
||||
inherit (nixFlake.inputs) nixpkgs-23-11;
|
||||
};
|
||||
|
||||
/**
|
||||
Top-level tests for the flake outputs, as they would be built by hydra.
|
||||
These tests generally can't be overridden to run with sanitizers.
|
||||
|
|
@ -117,6 +116,7 @@ rec {
|
|||
) nixComponentsInstrumented)
|
||||
// lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) {
|
||||
"${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests;
|
||||
"${componentTestsPrefix}nix-json-schema-checks" = nixComponentsInstrumented.nix-json-schema-checks;
|
||||
};
|
||||
|
||||
codeCoverage =
|
||||
|
|
@ -221,4 +221,20 @@ rec {
|
|||
{
|
||||
inherit coverageProfileDrvs mergedProfdata coverageReports;
|
||||
};
|
||||
|
||||
vmTests = {
|
||||
inherit (nixosTests) s3-binary-cache-store;
|
||||
}
|
||||
// lib.optionalAttrs (!withSanitizers && !withCoverage) {
|
||||
# evalNixpkgs uses non-instrumented components from hydraJobs, so only run it
|
||||
# when not testing with sanitizers to avoid rebuilding nix
|
||||
inherit (hydraJobs.tests) evalNixpkgs;
|
||||
# FIXME: CI times out when building vm tests instrumented
|
||||
inherit (nixosTests)
|
||||
functional_user
|
||||
githubFlakes
|
||||
nix-docker
|
||||
tarballFlakes
|
||||
;
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ pymod = import('python')
|
|||
python = pymod.find_installation('python3')
|
||||
|
||||
nix_env_for_docs = {
|
||||
'ASAN_OPTIONS' : 'abort_on_error=1:print_summary=1:detect_leaks=0',
|
||||
'HOME' : '/dummy',
|
||||
'NIX_CONF_DIR' : '/dummy',
|
||||
'NIX_SSL_CERT_FILE' : '/dummy/no-ca-bundle.crt',
|
||||
|
|
@ -89,7 +88,7 @@ manual = custom_target(
|
|||
@0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@
|
||||
@0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md
|
||||
sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml
|
||||
@4@ -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/
|
||||
@4@ -r -L --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/
|
||||
(cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3
|
||||
rm -rf @2@/manual
|
||||
mv @2@/html @2@/manual
|
||||
|
|
@ -116,6 +115,7 @@ manual = custom_target(
|
|||
builtins_md,
|
||||
rl_next_generated,
|
||||
summary_rl_next,
|
||||
json_schema_generated_files,
|
||||
nix_input,
|
||||
],
|
||||
output : [
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@
|
|||
rsync,
|
||||
nix-cli,
|
||||
changelog-d,
|
||||
json-schema-for-humans,
|
||||
officialRelease,
|
||||
|
||||
# Configuration Options
|
||||
|
|
@ -32,6 +33,13 @@ mkMesonDerivation (finalAttrs: {
|
|||
fileset.difference
|
||||
(fileset.unions [
|
||||
../../.version
|
||||
# For example JSON
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
../../src/libstore-tests/data/derived-path
|
||||
../../src/libstore-tests/data/path-info
|
||||
../../src/libstore-tests/data/nar-info
|
||||
# Too many different types of files to filter for now
|
||||
../../doc/manual
|
||||
./.
|
||||
|
|
@ -55,6 +63,7 @@ mkMesonDerivation (finalAttrs: {
|
|||
jq
|
||||
python3
|
||||
rsync
|
||||
json-schema-for-humans
|
||||
changelog-d
|
||||
]
|
||||
++ lib.optionals (!officialRelease) [
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
synopsis: "C API: `nix_get_attr_name_byidx`, `nix_get_attr_byidx` take a `nix_value *` instead of `const nix_value *`"
|
||||
prs: [13987]
|
||||
---
|
||||
|
||||
In order to accommodate a more optimized internal representation of attribute set merges these functions require
|
||||
a mutable `nix_value *` that might be modified on access. This does *not* break the ABI of these functions.
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
---
|
||||
synopsis: "C API: Add lazy attribute and list item accessors"
|
||||
prs: [14030]
|
||||
---
|
||||
|
||||
The C API now includes lazy accessor functions for retrieving values from lists and attribute sets without forcing evaluation:
|
||||
|
||||
- `nix_get_list_byidx_lazy()` - Get a list element without forcing its evaluation
|
||||
- `nix_get_attr_byname_lazy()` - Get an attribute value by name without forcing evaluation
|
||||
- `nix_get_attr_byidx_lazy()` - Get an attribute by index without forcing evaluation
|
||||
|
||||
These functions are useful when forwarding unevaluated sub-values to other lists, attribute sets, or function calls. They allow more efficient handling of Nix values by deferring evaluation until actually needed.
|
||||
|
||||
Additionally, bounds checking has been improved for all `_byidx` functions to properly validate indices before access, preventing potential out-of-bounds errors.
|
||||
|
||||
The documentation for `NIX_ERR_KEY` error handling has also been clarified to specify when this error code is returned.
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
---
|
||||
synopsis: "Substituted flake inputs are no longer re-copied to the store"
|
||||
prs: [14041]
|
||||
---
|
||||
|
||||
Since 2.25, Nix would fail to store a cache entry for substituted flake inputs,
|
||||
which in turn would cause them to be re-copied to the store on initial
|
||||
evaluation. Caching these inputs results in a near doubling of a performance in
|
||||
some cases — especially on I/O-bound machines and when using commands that
|
||||
fetch many inputs, like `nix flake archive/prefetch-inputs`
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
---
|
||||
synopsis: Derivation JSON format now uses store path basenames (no store dir) only
|
||||
prs: [13980]
|
||||
issues: [13570]
|
||||
---
|
||||
|
||||
Experience with many JSON frameworks (e.g. nlohmann/json in C++, Serde in Rust, and Aeson in Haskell), has shown that the use of the store dir in JSON formats is an impediment to systematic JSON formats,
|
||||
because it requires the serializer/deserializer to take an extra paramater (the store dir).
|
||||
|
||||
We ultimately want to rectify this issue with all (non-stable, able to be changed) JSON formats.
|
||||
To start with, we are changing the JSON format for derivations because the `nix derivation` commands are
|
||||
--- in addition to being formally unstable
|
||||
--- less widely used than other unstable commands.
|
||||
|
||||
See the documentation on the [JSON format for derivations](@docroot@/protocols/json/derivation.md) for further details.
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
---
|
||||
synopsis: "Removed support for daemons and clients older than Nix 2.0"
|
||||
prs: [13951]
|
||||
---
|
||||
|
||||
We have dropped support in the daemon worker protocol for daemons and clients that don't speak at least version 18 of the protocol. This first Nix release that supports this version is Nix 2.0, released in February 2018.
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
synopsis: "`nix flake check` now skips derivations that can be substituted"
|
||||
prs: [13574]
|
||||
---
|
||||
|
||||
Previously, `nix flake check` would evaluate and build/substitute all
|
||||
derivations. Now, it will skip downloading derivations that can be substituted.
|
||||
This can drastically decrease the time invocations take in environments where
|
||||
checks may already be cached (like in CI).
|
||||
26
doc/manual/rl-next/s3-curl-implementation.md
Normal file
26
doc/manual/rl-next/s3-curl-implementation.md
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
synopsis: "Improved S3 binary cache support via HTTP"
|
||||
prs: [13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 13752]
|
||||
issues: [13084, 12671, 11748, 12403]
|
||||
---
|
||||
|
||||
S3 binary cache operations now happen via HTTP, leveraging `libcurl`'s native
|
||||
AWS SigV4 authentication instead of the AWS C++ SDK, providing significant
|
||||
improvements:
|
||||
|
||||
- **Reduced memory usage**: Eliminates memory buffering issues that caused
|
||||
segfaults with large files
|
||||
- **Fixed upload reliability**: Resolves AWS SDK chunking errors
|
||||
(`InvalidChunkSizeError`)
|
||||
- **Lighter dependencies**: Uses lightweight `aws-crt-cpp` instead of full
|
||||
`aws-cpp-sdk`, reducing build complexity
|
||||
|
||||
The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential
|
||||
management.
|
||||
|
||||
All existing S3 URL formats and parameters remain supported, with the notable
|
||||
exception of multi-part uploads, which are no longer supported.
|
||||
|
||||
Note that this change also means Nix now supports S3 binary cache stores even
|
||||
if build without `aws-crt-cpp`, but only for public buckets which do not
|
||||
require auth.
|
||||
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
14
doc/manual/rl-next/s3-object-versioning.md
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
synopsis: "S3 URLs now support object versioning via versionId parameter"
|
||||
prs: [14274]
|
||||
issues: [13955]
|
||||
---
|
||||
|
||||
S3 URLs now support a `versionId` query parameter to fetch specific versions
|
||||
of objects from S3 buckets with versioning enabled. This allows pinning to
|
||||
exact object versions for reproducibility and protection against unexpected
|
||||
changes:
|
||||
|
||||
```
|
||||
s3://bucket/key?region=us-east-1&versionId=abc123def456
|
||||
```
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
---
|
||||
synopsis: "Temporary build directories no longer include derivation names"
|
||||
prs: [13839]
|
||||
---
|
||||
|
||||
Temporary build directories created during derivation builds no longer include the derivation name in their path to avoid build failures when the derivation name is too long. This change ensures predictable prefix lengths for build directories under `/nix/var/nix/builds`.
|
||||
|
|
@ -117,8 +117,12 @@
|
|||
- [Architecture and Design](architecture/architecture.md)
|
||||
- [Formats and Protocols](protocols/index.md)
|
||||
- [JSON Formats](protocols/json/index.md)
|
||||
- [Hash](protocols/json/hash.md)
|
||||
- [Content Address](protocols/json/content-address.md)
|
||||
- [Store Path](protocols/json/store-path.md)
|
||||
- [Store Object Info](protocols/json/store-object-info.md)
|
||||
- [Derivation](protocols/json/derivation.md)
|
||||
- [Deriving Path](protocols/json/deriving-path.md)
|
||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||
- [Store Path Specification](protocols/store-path.md)
|
||||
- [Nix Archive (NAR) Format](protocols/nix-archive.md)
|
||||
|
|
@ -138,6 +142,7 @@
|
|||
- [Contributing](development/contributing.md)
|
||||
- [Releases](release-notes/index.md)
|
||||
{{#include ./SUMMARY-rl-next.md}}
|
||||
- [Release 2.32 (2025-10-06)](release-notes/rl-2.32.md)
|
||||
- [Release 2.31 (2025-08-21)](release-notes/rl-2.31.md)
|
||||
- [Release 2.30 (2025-07-07)](release-notes/rl-2.30.md)
|
||||
- [Release 2.29 (2025-05-14)](release-notes/rl-2.29.md)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
This man page describes the command `nix-shell`, which is distinct from `nix
|
||||
shell`. For documentation on the latter, run `nix shell --help` or see `man
|
||||
nix3-shell`.
|
||||
nix3-env-shell`.
|
||||
|
||||
# Description
|
||||
|
||||
|
|
|
|||
|
|
@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the
|
|||
configuration file.
|
||||
|
||||
By default, the collector prints the total number of freed bytes when it
|
||||
finishes (or when it is interrupted). With `--print-dead`, it prints the
|
||||
number of bytes that would be freed.
|
||||
finishes (or when it is interrupted).
|
||||
|
||||
{{#include ./opt-common.md}}
|
||||
|
||||
|
|
|
|||
|
|
@ -25,20 +25,31 @@ nix build .#nix-manual
|
|||
and open `./result/share/doc/nix/manual/index.html`.
|
||||
|
||||
|
||||
To build the manual incrementally, [enter the development shell](./building.md) and run:
|
||||
To build the manual incrementally, [enter the development shell](./building.md) and configure with `doc-gen` enabled:
|
||||
|
||||
**If using interactive `nix develop`:**
|
||||
|
||||
```console
|
||||
make manual-html-open -j $NIX_BUILD_CORES
|
||||
$ nix develop
|
||||
$ mesonFlags="$mesonFlags -Ddoc-gen=true" mesonConfigurePhase
|
||||
```
|
||||
|
||||
In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
|
||||
|
||||
[Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||
**If using direnv:**
|
||||
|
||||
```console
|
||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/source/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES
|
||||
$ direnv allow
|
||||
$ bash -c 'source $stdenv/setup && mesonFlags="$mesonFlags -Ddoc-gen=true" mesonConfigurePhase'
|
||||
```
|
||||
|
||||
Then build the manual:
|
||||
|
||||
```console
|
||||
$ cd build
|
||||
$ meson compile manual
|
||||
```
|
||||
|
||||
The HTML manual will be generated at `build/src/nix-manual/manual/index.html`.
|
||||
|
||||
## Style guide
|
||||
|
||||
The goal of this style guide is to make it such that
|
||||
|
|
@ -229,3 +240,9 @@ $ configurePhase
|
|||
$ ninja src/external-api-docs/html
|
||||
$ xdg-open src/external-api-docs/html/index.html
|
||||
```
|
||||
|
||||
If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use:
|
||||
|
||||
```bash
|
||||
nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'appendToVar mesonFlags "-Ddoc-gen=true"; mesonConfigurePhase'
|
||||
```
|
||||
|
|
|
|||
|
|
@ -3,19 +3,21 @@
|
|||
To run the latest stable release of Nix with Docker run the following command:
|
||||
|
||||
```console
|
||||
$ docker run -ti ghcr.io/nixos/nix
|
||||
Unable to find image 'ghcr.io/nixos/nix:latest' locally
|
||||
latest: Pulling from ghcr.io/nixos/nix
|
||||
$ docker run -ti docker.io/nixos/nix
|
||||
Unable to find image 'docker.io/nixos/nix:latest' locally
|
||||
latest: Pulling from docker.io/nixos/nix
|
||||
5843afab3874: Pull complete
|
||||
b52bf13f109c: Pull complete
|
||||
1e2415612aa3: Pull complete
|
||||
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
||||
Status: Downloaded newer image for ghcr.io/nixos/nix:latest
|
||||
Status: Downloaded newer image for docker.io/nixos/nix:latest
|
||||
35ca4ada6e96:/# nix --version
|
||||
nix (Nix) 2.3.12
|
||||
35ca4ada6e96:/# exit
|
||||
```
|
||||
|
||||
> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix
|
||||
|
||||
# What is included in Nix's Docker image?
|
||||
|
||||
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# Process JSON schema documentation
|
||||
subdir('protocols')
|
||||
|
||||
summary_rl_next = custom_target(
|
||||
command : [
|
||||
bash,
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/content-address.md
Normal file
21
doc/manual/source/protocols/json/content-address.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include content-address-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### [Text](@docroot@/store/store-object/content-address.html#method-text) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/text.json}}
|
||||
```
|
||||
|
||||
### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method
|
||||
|
||||
```json
|
||||
{{#include schema/content-address-v1/nar.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Hash v1](schema/content-address-v1.json)
|
||||
-->
|
||||
|
|
@ -1,120 +1,7 @@
|
|||
# Derivation JSON Format
|
||||
{{#include derivation-v3-fixed.md}}
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
The JSON serialization of a
|
||||
[derivations](@docroot@/glossary.md#gloss-store-derivation)
|
||||
is a JSON object with the following fields:
|
||||
|
||||
* `name`:
|
||||
The name of the derivation.
|
||||
This is used when calculating the store paths of the derivation's outputs.
|
||||
|
||||
* `version`:
|
||||
Must be `3`.
|
||||
This is a guard that allows us to continue evolving this format.
|
||||
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
||||
|
||||
- Version 0: A-Term format
|
||||
|
||||
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format.
|
||||
|
||||
- Version 2: Separate `method` and `hashAlgo` fields in output specs
|
||||
|
||||
- Verison 3: Drop store dir from store paths, just include base name.
|
||||
|
||||
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
|
||||
|
||||
* `outputs`:
|
||||
Information about the output paths of the derivation.
|
||||
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields:
|
||||
|
||||
* `path`:
|
||||
The output path, if it is known in advanced.
|
||||
Otherwise, `null`.
|
||||
|
||||
|
||||
* `method`:
|
||||
For an output which will be [content addressed], a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
|
||||
Otherwise, `null`.
|
||||
|
||||
* `hashAlgo`:
|
||||
For an output which will be [content addressed], the name of the hash algorithm used.
|
||||
Valid algorithm strings are:
|
||||
|
||||
- `blake3`
|
||||
- `md5`
|
||||
- `sha1`
|
||||
- `sha256`
|
||||
- `sha512`
|
||||
|
||||
* `hash`:
|
||||
For fixed-output derivations, the expected content hash in base-16.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "outputs": {
|
||||
> "out": {
|
||||
> "method": "nar",
|
||||
> "hashAlgo": "sha256",
|
||||
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
||||
> }
|
||||
> }
|
||||
> ```
|
||||
|
||||
* `inputSrcs`:
|
||||
A list of store paths on which this derivation depends.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "inputSrcs": [
|
||||
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
|
||||
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
|
||||
> ]
|
||||
> ```
|
||||
|
||||
* `inputDrvs`:
|
||||
A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "inputDrvs": {
|
||||
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
||||
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
||||
> }
|
||||
> ```
|
||||
|
||||
specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||
|
||||
* `system`:
|
||||
The system type on which this derivation is to be built
|
||||
(e.g. `x86_64-linux`).
|
||||
|
||||
* `builder`:
|
||||
The absolute path of the program to be executed to run the build.
|
||||
Typically this is the `bash` shell
|
||||
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
||||
|
||||
* `args`:
|
||||
The command-line arguments passed to the `builder`.
|
||||
|
||||
* `env`:
|
||||
The environment passed to the `builder`.
|
||||
|
||||
* `structuredAttrs`:
|
||||
[Strucutured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
|
||||
Structured attributes are JSON, and thus embedded as-is.
|
||||
[JSON Schema for Derivation v3](schema/derivation-v3.json)
|
||||
-->
|
||||
|
|
|
|||
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
21
doc/manual/source/protocols/json/deriving-path.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{{#include deriving-path-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Constant
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_opaque.json}}
|
||||
```
|
||||
|
||||
### Output of static derivation
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_built.json}}
|
||||
```
|
||||
|
||||
### Output of dynamic derivation
|
||||
|
||||
```json
|
||||
{{#include schema/deriving-path-v1/single_built_built.json}}
|
||||
```
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# For some reason, backticks in the JSON schema are being escaped rather
|
||||
# than being kept as intentional code spans. This removes all backtick
|
||||
# escaping, which is an ugly solution, but one that is fine, because we
|
||||
# are not using backticks for any other purpose.
|
||||
s/\\`/`/g
|
||||
|
||||
# The way that semi-external references are rendered (i.e. ones to
|
||||
# sibling schema files, as opposed to separate website ones, is not nice
|
||||
# for humans. Replace it with a nice relative link within the manual
|
||||
# instead.
|
||||
#
|
||||
# As we have more such relative links, more replacements of this nature
|
||||
# should appear below.
|
||||
s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g
|
||||
s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g
|
||||
s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g
|
||||
s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g
|
||||
33
doc/manual/source/protocols/json/hash.md
Normal file
33
doc/manual/source/protocols/json/hash.md
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
{{#include hash-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### SHA-256 with Base64 encoding
|
||||
|
||||
```json
|
||||
{{#include schema/hash-v1/sha256-base64.json}}
|
||||
```
|
||||
|
||||
### SHA-256 with Base16 (hexadecimal) encoding
|
||||
|
||||
```json
|
||||
{{#include schema/hash-v1/sha256-base16.json}}
|
||||
```
|
||||
|
||||
### SHA-256 with Nix32 encoding
|
||||
|
||||
```json
|
||||
{{#include schema/hash-v1/sha256-nix32.json}}
|
||||
```
|
||||
|
||||
### BLAKE3 with Base64 encoding
|
||||
|
||||
```json
|
||||
{{#include schema/hash-v1/blake3-base64.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Hash v1](schema/hash-v1.json)
|
||||
-->
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# Configuration file for json-schema-for-humans
|
||||
#
|
||||
# https://github.com/coveooss/json-schema-for-humans/blob/main/docs/examples/examples_md_default/Configuration.md
|
||||
|
||||
template_name: md
|
||||
show_toc: true
|
||||
# impure timestamp and distracting
|
||||
with_footer: false
|
||||
recursive_detection_depth: 3
|
||||
show_breadcrumbs: false
|
||||
description_is_markdown: true
|
||||
template_md_options:
|
||||
properties_table_columns:
|
||||
- Property
|
||||
- Type
|
||||
- Pattern
|
||||
- Title/Description
|
||||
78
doc/manual/source/protocols/json/meson.build
Normal file
78
doc/manual/source/protocols/json/meson.build
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# Tests in: ../../../../src/json-schema-checks
|
||||
|
||||
fs = import('fs')
|
||||
|
||||
# Find json-schema-for-humans if available
|
||||
json_schema_for_humans = find_program('generate-schema-doc', required : false)
|
||||
|
||||
# Configuration for json-schema-for-humans
|
||||
json_schema_config = files('json-schema-for-humans-config.yaml')
|
||||
|
||||
schemas = [
|
||||
'hash-v1',
|
||||
'content-address-v1',
|
||||
'store-path-v1',
|
||||
'store-object-info-v1',
|
||||
'derivation-v3',
|
||||
'deriving-path-v1',
|
||||
]
|
||||
|
||||
schema_files = files()
|
||||
foreach schema_name : schemas
|
||||
schema_files += files('schema' / schema_name + '.yaml')
|
||||
endforeach
|
||||
|
||||
|
||||
schema_outputs = []
|
||||
foreach schema_name : schemas
|
||||
schema_outputs += schema_name + '.md'
|
||||
endforeach
|
||||
|
||||
json_schema_generated_files = []
|
||||
|
||||
# Generate markdown documentation from JSON schema
|
||||
# Note: output must be just a filename, not a path
|
||||
gen_file = custom_target(
|
||||
schema_name + '-schema-docs.tmp',
|
||||
command : [
|
||||
json_schema_for_humans,
|
||||
'--config-file',
|
||||
json_schema_config,
|
||||
meson.current_source_dir() / 'schema',
|
||||
meson.current_build_dir(),
|
||||
],
|
||||
input : schema_files + [
|
||||
json_schema_config,
|
||||
],
|
||||
output : schema_outputs,
|
||||
capture : false,
|
||||
build_by_default : true,
|
||||
)
|
||||
|
||||
idx = 0
|
||||
if json_schema_for_humans.found()
|
||||
foreach schema_name : schemas
|
||||
#schema_file = 'schema' / schema_name + '.yaml'
|
||||
|
||||
# There is one so-so hack, and one horrible hack being done here.
|
||||
sedded_file = custom_target(
|
||||
schema_name + '-schema-docs',
|
||||
command : [
|
||||
'sed',
|
||||
'-f',
|
||||
# Out of line to avoid https://github.com/mesonbuild/meson/issues/1564
|
||||
files('fixup-json-schema-generated-doc.sed'),
|
||||
'@INPUT@',
|
||||
],
|
||||
capture : true,
|
||||
input : gen_file[idx],
|
||||
output : schema_name + '-fixed.md',
|
||||
)
|
||||
idx += 1
|
||||
json_schema_generated_files += [ sedded_file ]
|
||||
endforeach
|
||||
else
|
||||
warning(
|
||||
'json-schema-for-humans not found, skipping JSON schema documentation generation',
|
||||
)
|
||||
endif
|
||||
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/content-address-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/content-address
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json"
|
||||
title: Content Address
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references).
|
||||
> It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant.
|
||||
|
||||
<!--
|
||||
TODO currently `ContentAddress` is used in both of these, and so same rationale applies, but actually in both cases the JSON is currently ad-hoc.
|
||||
That will be fixed, and as each is fixed, the example (along with a more precise link to the field in question) should be become part of the above note, so what is is saying is more clear.
|
||||
|
||||
> For example:
|
||||
|
||||
> - Fixed outputs of derivations are not allowed to have any references, so an empty reference set is statically known by assumption.
|
||||
|
||||
> - [Store object info](./store-object-info.md) includes the set of references along side the (optional) content address.
|
||||
|
||||
> This data type is thus safely used in both of these contexts.
|
||||
|
||||
-->
|
||||
|
||||
type: object
|
||||
properties:
|
||||
method:
|
||||
"$ref": "#/$defs/method"
|
||||
hash:
|
||||
title: Content Address
|
||||
description: |
|
||||
This would be the content-address itself.
|
||||
|
||||
For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole.
|
||||
In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods).
|
||||
"$ref": "./hash-v1.yaml"
|
||||
required:
|
||||
- method
|
||||
- hash
|
||||
additionalProperties: false
|
||||
"$defs":
|
||||
method:
|
||||
type: string
|
||||
enum: [flat, nar, text, git]
|
||||
title: Content-Addressing Method
|
||||
description: |
|
||||
A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
|
||||
Valid method strings are:
|
||||
|
||||
- [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file)
|
||||
- [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive)
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text)
|
||||
- [`git`](@docroot@/store/store-object/content-address.md#method-git)
|
||||
203
doc/manual/source/protocols/json/schema/derivation-v3.yaml
Normal file
203
doc/manual/source/protocols/json/schema/derivation-v3.yaml
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json"
|
||||
title: Derivation
|
||||
description: |
|
||||
Experimental JSON representation of a Nix derivation (version 3).
|
||||
|
||||
This schema describes the JSON representation of Nix's `Derivation` type.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
|
||||
type: object
|
||||
required:
|
||||
- name
|
||||
- version
|
||||
- outputs
|
||||
- inputSrcs
|
||||
- inputDrvs
|
||||
- system
|
||||
- builder
|
||||
- args
|
||||
- env
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
title: Derivation name
|
||||
description: |
|
||||
The name of the derivation.
|
||||
Used when calculating store paths for the derivation’s outputs.
|
||||
|
||||
version:
|
||||
const: 3
|
||||
title: Format version (must be 3)
|
||||
description: |
|
||||
Must be `3`.
|
||||
This is a guard that allows us to continue evolving this format.
|
||||
The choice of `3` is fairly arbitrary, but corresponds to this informal version:
|
||||
|
||||
- Version 0: A-Term format
|
||||
|
||||
- Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format.
|
||||
|
||||
- Version 2: Separate `method` and `hashAlgo` fields in output specs
|
||||
|
||||
- Version 3: Drop store dir from store paths, just include base name.
|
||||
|
||||
Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change.
|
||||
|
||||
outputs:
|
||||
type: object
|
||||
title: Output specifications
|
||||
description: |
|
||||
Information about the output paths of the derivation.
|
||||
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object as described.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "outputs": {
|
||||
> "out": {
|
||||
> "method": "nar",
|
||||
> "hashAlgo": "sha256",
|
||||
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
||||
> }
|
||||
> }
|
||||
> ```
|
||||
additionalProperties:
|
||||
"$ref": "#/$defs/output"
|
||||
|
||||
inputSrcs:
|
||||
type: array
|
||||
title: Input source paths
|
||||
description: |
|
||||
List of store paths on which this derivation depends.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "inputSrcs": [
|
||||
> "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh",
|
||||
> "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch"
|
||||
> ]
|
||||
> ```
|
||||
items:
|
||||
$ref: "store-path-v1.yaml"
|
||||
|
||||
inputDrvs:
|
||||
type: object
|
||||
title: Input derivations
|
||||
description: |
|
||||
Mapping of derivation paths to lists of output names they provide.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```json
|
||||
> "inputDrvs": {
|
||||
> "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
||||
> "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||
patternProperties:
|
||||
"^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$":
|
||||
title: Store Path
|
||||
description: |
|
||||
A store path to a derivation, mapped to the outputs of that derivation.
|
||||
oneOf:
|
||||
- "$ref": "#/$defs/outputNames"
|
||||
- "$ref": "#/$defs/dynamicOutputs"
|
||||
additionalProperties: false
|
||||
|
||||
system:
|
||||
type: string
|
||||
title: Build system type
|
||||
description: |
|
||||
The system type on which this derivation is to be built
|
||||
(e.g. `x86_64-linux`).
|
||||
|
||||
builder:
|
||||
type: string
|
||||
title: Build program path
|
||||
description: |
|
||||
Absolute path of the program used to perform the build.
|
||||
Typically this is the `bash` shell
|
||||
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
||||
|
||||
args:
|
||||
type: array
|
||||
title: Builder arguments
|
||||
description: |
|
||||
Command-line arguments passed to the `builder`.
|
||||
items:
|
||||
type: string
|
||||
|
||||
env:
|
||||
type: object
|
||||
title: Environment variables
|
||||
description: |
|
||||
Environment variables passed to the `builder`.
|
||||
additionalProperties:
|
||||
type: string
|
||||
|
||||
structuredAttrs:
|
||||
title: Structured attributes
|
||||
description: |
|
||||
[Structured Attributes](@docroot@/store/derivation/index.md#structured-attrs), only defined if the derivation contains them.
|
||||
Structured attributes are JSON, and thus embedded as-is.
|
||||
type: object
|
||||
additionalProperties: true
|
||||
|
||||
"$defs":
|
||||
output:
|
||||
type: object
|
||||
properties:
|
||||
path:
|
||||
$ref: "store-path-v1.yaml"
|
||||
title: Output path
|
||||
description: |
|
||||
The output path, if known in advance.
|
||||
|
||||
method:
|
||||
"$ref": "./content-address-v1.yaml#/$defs/method"
|
||||
description: |
|
||||
For an output which will be [content addressed](@docroot@/store/derivation/outputs/content-address.md), a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen.
|
||||
See the linked original definition for further details.
|
||||
hashAlgo:
|
||||
title: Hash algorithm
|
||||
"$ref": "./hash-v1.yaml#/$defs/algorithm"
|
||||
|
||||
hash:
|
||||
type: string
|
||||
title: Expected hash value
|
||||
description: |
|
||||
For fixed-output derivations, the expected content hash in base-16.
|
||||
|
||||
outputName:
|
||||
type: string
|
||||
title: Output name
|
||||
description: Name of the derivation output to depend on
|
||||
|
||||
outputNames:
|
||||
type: array
|
||||
title: Output Names
|
||||
description: Set of names of derivation outputs to depend on
|
||||
items:
|
||||
"$ref": "#/$defs/outputName"
|
||||
|
||||
dynamicOutputs:
|
||||
type: object
|
||||
title: Dynamic Outputs
|
||||
description: |
|
||||
**Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations)
|
||||
|
||||
This recursive data type allows for depending on outputs of outputs.
|
||||
properties:
|
||||
outputs:
|
||||
"$ref": "#/$defs/outputNames"
|
||||
dynamicOutputs:
|
||||
"$ref": "#/$defs/dynamicOutputs"
|
||||
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/deriving-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/derived-path
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json"
|
||||
title: Deriving Path
|
||||
description: |
|
||||
This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path).
|
||||
oneOf:
|
||||
- title: Constant
|
||||
description: |
|
||||
See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path.
|
||||
$ref: "store-path-v1.yaml"
|
||||
- title: Output
|
||||
description: |
|
||||
See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path.
|
||||
type: object
|
||||
properties:
|
||||
drvPath:
|
||||
"$ref": "#"
|
||||
description: |
|
||||
A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to.
|
||||
output:
|
||||
type: string
|
||||
description: |
|
||||
The name of an output produced by that derivation (e.g. "out", "doc", etc.).
|
||||
required:
|
||||
- drvPath
|
||||
- output
|
||||
additionalProperties: false
|
||||
1
doc/manual/source/protocols/json/schema/hash-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/hash-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libutil-tests/data/hash/
|
||||
54
doc/manual/source/protocols/json/schema/hash-v1.yaml
Normal file
54
doc/manual/source/protocols/json/schema/hash-v1.yaml
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
"$schema": "http://json-schema.org/draft-04/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json"
|
||||
title: Hash
|
||||
description: |
|
||||
A cryptographic hash value used throughout Nix for content addressing and integrity verification.
|
||||
|
||||
This schema describes the JSON representation of Nix's `Hash` type.
|
||||
type: object
|
||||
properties:
|
||||
algorithm:
|
||||
"$ref": "#/$defs/algorithm"
|
||||
format:
|
||||
type: string
|
||||
enum:
|
||||
- base64
|
||||
- nix32
|
||||
- base16
|
||||
- sri
|
||||
title: Hash format
|
||||
description: |
|
||||
The encoding format of the hash value.
|
||||
|
||||
- `base64` uses standard Base64 encoding [RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4)
|
||||
- `nix32` is Nix-specific base-32 encoding
|
||||
- `base16` is lowercase hexadecimal
|
||||
- `sri` is the [Subresource Integrity format](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
|
||||
hash:
|
||||
type: string
|
||||
title: Hash
|
||||
description: |
|
||||
The encoded hash value, itself.
|
||||
|
||||
It is specified in the format specified by the `format` field.
|
||||
It must be the right length for the hash algorithm specified in the `algorithm` field, also.
|
||||
The hash value does not include any algorithm prefix.
|
||||
required:
|
||||
- algorithm
|
||||
- format
|
||||
- hash
|
||||
additionalProperties: false
|
||||
"$defs":
|
||||
algorithm:
|
||||
type: string
|
||||
enum:
|
||||
- blake3
|
||||
- md5
|
||||
- sha1
|
||||
- sha256
|
||||
- sha512
|
||||
title: Hash algorithm
|
||||
description: |
|
||||
The hash algorithm used to compute the hash value.
|
||||
|
||||
`blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake-hashing) experimental feature.
|
||||
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/nar-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/nar-info
|
||||
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-object-info-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/path-info
|
||||
|
|
@ -0,0 +1,235 @@
|
|||
"$schema": "http://json-schema.org/draft-07/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v1.json"
|
||||
title: Store Object Info
|
||||
description: |
|
||||
Information about a [store object](@docroot@/store/store-object.md).
|
||||
|
||||
This schema describes the JSON representation of store object metadata as returned by commands like [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md).
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
|
||||
### Field Categories
|
||||
|
||||
Store object information can come in a few different variations.
|
||||
|
||||
Firstly, "impure" fields, which contain non-intrinsic information about the store object, may or may not be included.
|
||||
|
||||
Second, binary cache stores have extra non-intrinsic infomation about the store objects they contain.
|
||||
|
||||
Thirdly, [`nix path-info --json --closure-size`](@docroot@/command-ref/new-cli/nix3-path-info.html#opt-closure-size) can compute some extra information about not just the single store object in question, but the store object and its [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
The impure and NAR fields are grouped into separate variants below.
|
||||
See their descriptions for additional information.
|
||||
The closure fields however as just included as optional fields, to avoid a combinatorial explosion of variants.
|
||||
|
||||
oneOf:
|
||||
- $ref: "#/$defs/base"
|
||||
|
||||
- $ref: "#/$defs/impure"
|
||||
|
||||
- $ref: "#/$defs/narInfo"
|
||||
|
||||
$defs:
|
||||
base:
|
||||
title: Store Object Info
|
||||
description: |
|
||||
Basic store object metadata containing only intrinsic properties.
|
||||
This is the minimal set of fields that describe what a store object contains.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
title: Store Path
|
||||
description: |
|
||||
[Store path](@docroot@/store/store-path.md) to the given store object.
|
||||
|
||||
Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map.
|
||||
|
||||
narHash:
|
||||
type: string
|
||||
title: NAR Hash
|
||||
description: |
|
||||
Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||
|
||||
narSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: NAR Size
|
||||
description: |
|
||||
Size of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive).
|
||||
|
||||
references:
|
||||
type: array
|
||||
title: References
|
||||
description: |
|
||||
An array of [store paths](@docroot@/store/store-path.md), possibly including this one.
|
||||
items:
|
||||
type: string
|
||||
|
||||
ca:
|
||||
type: ["string", "null"]
|
||||
title: Content Address
|
||||
description: |
|
||||
If the store object is [content-addressed](@docroot@/store/store-object/content-address.md),
|
||||
this is the content address of this store object's file system object, used to compute its store path.
|
||||
Otherwise (i.e. if it is [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object)), this is `null`.
|
||||
additionalProperties: false
|
||||
|
||||
impure:
|
||||
title: Store Object Info with Impure Fields
|
||||
description: |
|
||||
Store object metadata including impure fields that are not *intrinsic* properties.
|
||||
In other words, the same store object in different stores could have different values for these impure fields.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
# impure
|
||||
- deriver
|
||||
- registrationTime
|
||||
- ultimate
|
||||
- signatures
|
||||
properties:
|
||||
path: { $ref: "#/$defs/base/properties/path" }
|
||||
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||
references: { $ref: "#/$defs/base/properties/references" }
|
||||
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||
deriver:
|
||||
type: ["string", "null"]
|
||||
title: Deriver
|
||||
description: |
|
||||
If known, the path to the [store derivation](@docroot@/glossary.md#gloss-store-derivation) from which this store object was produced.
|
||||
Otherwise `null`.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
registrationTime:
|
||||
type: ["integer", "null"]
|
||||
title: Registration Time
|
||||
description: |
|
||||
If known, when this derivation was added to the store (Unix timestamp).
|
||||
Otherwise `null`.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
ultimate:
|
||||
type: boolean
|
||||
title: Ultimate
|
||||
description: |
|
||||
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
|
||||
signatures:
|
||||
type: array
|
||||
title: Signatures
|
||||
description: |
|
||||
Signatures claiming that this store object is what it claims to be.
|
||||
Not relevant for [content-addressed](@docroot@/store/store-object/content-address.md) store objects,
|
||||
but useful for [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) store objects.
|
||||
|
||||
> This is an "impure" field that may not be included in certain contexts.
|
||||
items:
|
||||
type: string
|
||||
|
||||
# Computed closure fields
|
||||
closureSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Closure Size
|
||||
description: |
|
||||
The total size of this store object and every other object in its [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||
additionalProperties: false
|
||||
|
||||
narInfo:
|
||||
title: Store Object Info with Impure fields and NAR Info
|
||||
description: |
|
||||
The store object info in the "binary cache" family of Nix store type contain extra information pertaining to *downloads* of the store object in question.
|
||||
(This store info is called "NAR info", since the downloads take the form of [Nix Archives](@docroot@/store/file-system-object/content-address.md#serial-nix-archive, and the metadata is served in a file with a `.narinfo` extension.)
|
||||
|
||||
This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure.
|
||||
type: object
|
||||
required:
|
||||
- narHash
|
||||
- narSize
|
||||
- references
|
||||
- ca
|
||||
# impure
|
||||
- deriver
|
||||
- registrationTime
|
||||
- ultimate
|
||||
- signatures
|
||||
# nar
|
||||
- url
|
||||
- compression
|
||||
- downloadHash
|
||||
- downloadSize
|
||||
properties:
|
||||
path: { $ref: "#/$defs/base/properties/path" }
|
||||
narHash: { $ref: "#/$defs/base/properties/narHash" }
|
||||
narSize: { $ref: "#/$defs/base/properties/narSize" }
|
||||
references: { $ref: "#/$defs/base/properties/references" }
|
||||
ca: { $ref: "#/$defs/base/properties/ca" }
|
||||
deriver: { $ref: "#/$defs/impure/properties/deriver" }
|
||||
registrationTime: { $ref: "#/$defs/impure/properties/registrationTime" }
|
||||
ultimate: { $ref: "#/$defs/impure/properties/ultimate" }
|
||||
signatures: { $ref: "#/$defs/impure/properties/signatures" }
|
||||
closureSize: { $ref: "#/$defs/impure/properties/closureSize" }
|
||||
url:
|
||||
type: string
|
||||
title: URL
|
||||
description: |
|
||||
Where to download a compressed archive of the file system objects of this store object.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
compression:
|
||||
type: string
|
||||
title: Compression
|
||||
description: |
|
||||
The compression format that the archive is in.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
downloadHash:
|
||||
type: string
|
||||
title: Download Hash
|
||||
description: |
|
||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
downloadSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Download Size
|
||||
description: |
|
||||
The size of the compressed archive itself.
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
closureDownloadSize:
|
||||
type: integer
|
||||
minimum: 0
|
||||
title: Closure Download Size
|
||||
description: |
|
||||
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure](@docroot@/glossary.md#gloss-closure).
|
||||
|
||||
> This is an impure "`.narinfo`" field that may not be included in certain contexts.
|
||||
|
||||
> This field is not stored at all, but computed by traversing the other fields across all the store objects in a closure.
|
||||
additionalProperties: false
|
||||
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
1
doc/manual/source/protocols/json/schema/store-path-v1
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../../../../../src/libstore-tests/data/store-path
|
||||
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
32
doc/manual/source/protocols/json/schema/store-path-v1.yaml
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
"$schema": "http://json-schema.org/draft-07/schema"
|
||||
"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json"
|
||||
title: Store Path
|
||||
description: |
|
||||
A [store path](@docroot@/store/store-path.md) identifying a store object.
|
||||
|
||||
This schema describes the JSON representation of store paths as used in various Nix JSON APIs.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
|
||||
## Format
|
||||
|
||||
Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix.
|
||||
|
||||
For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"`
|
||||
|
||||
(If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.)
|
||||
|
||||
## Structure
|
||||
|
||||
The format follows this pattern: `${digest}-${name}`
|
||||
|
||||
- **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
||||
- **name**: The package name and optional version/suffix information
|
||||
|
||||
type: string
|
||||
pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$"
|
||||
minLength: 34
|
||||
|
|
@ -1,102 +1,45 @@
|
|||
# Store object info JSON format
|
||||
{{#include store-object-info-v1-fixed.md}}
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This JSON format is currently
|
||||
> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command)
|
||||
> and subject to change.
|
||||
## Examples
|
||||
|
||||
Info about a [store object].
|
||||
### Minimal store object (content-addressed)
|
||||
|
||||
* `path`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/pure.json}}
|
||||
```
|
||||
|
||||
[Store path][store path] to the given store object.
|
||||
### Store object with impure fields
|
||||
|
||||
* `narHash`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/impure.json}}
|
||||
```
|
||||
|
||||
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
### Minimal store object (empty)
|
||||
|
||||
* `narSize`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/empty_pure.json}}
|
||||
```
|
||||
|
||||
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||
### Store object with all impure fields
|
||||
|
||||
* `references`:
|
||||
```json
|
||||
{{#include schema/store-object-info-v1/empty_impure.json}}
|
||||
```
|
||||
|
||||
An array of [store paths][store path], possibly including this one.
|
||||
### NAR info (minimal)
|
||||
|
||||
* `ca`:
|
||||
```json
|
||||
{{#include schema/nar-info-v1/pure.json}}
|
||||
```
|
||||
|
||||
If the store object is [content-addressed],
|
||||
this is the content address of this store object's file system object, used to compute its store path.
|
||||
Otherwise (i.e. if it is [input-addressed]), this is `null`.
|
||||
### NAR info (with binary cache fields)
|
||||
|
||||
[store path]: @docroot@/store/store-path.md
|
||||
[file system object]: @docroot@/store/file-system-object.md
|
||||
[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive
|
||||
```json
|
||||
{{#include schema/nar-info-v1/impure.json}}
|
||||
```
|
||||
|
||||
## Impure fields
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
These are not intrinsic properties of the store object.
|
||||
In other words, the same store object residing in different store could have different values for these properties.
|
||||
|
||||
* `deriver`:
|
||||
|
||||
If known, the path to the [store derivation] from which this store object was produced.
|
||||
Otherwise `null`.
|
||||
|
||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||
|
||||
* `registrationTime` (optional):
|
||||
|
||||
If known, when this derivation was added to the store.
|
||||
Otherwise `null`.
|
||||
|
||||
* `ultimate`:
|
||||
|
||||
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||
|
||||
* `signatures`:
|
||||
|
||||
Signatures claiming that this store object is what it claims to be.
|
||||
Not relevant for [content-addressed] store objects,
|
||||
but useful for [input-addressed] store objects.
|
||||
|
||||
[content-addressed]: @docroot@/store/store-object/content-address.md
|
||||
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
|
||||
|
||||
### `.narinfo` extra fields
|
||||
|
||||
This meta data is specific to the "binary cache" family of Nix store types.
|
||||
This information is not intrinsic to the store object, but about how it is stored.
|
||||
|
||||
* `url`:
|
||||
|
||||
Where to download a compressed archive of the file system objects of this store object.
|
||||
|
||||
* `compression`:
|
||||
|
||||
The compression format that the archive is in.
|
||||
|
||||
* `fileHash`:
|
||||
|
||||
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||
|
||||
* `fileSize`:
|
||||
|
||||
The size of the compressed archive itself.
|
||||
|
||||
## Computed closure fields
|
||||
|
||||
These fields are not stored at all, but computed by traversing the other fields across all the store objects in a [closure].
|
||||
|
||||
* `closureSize`:
|
||||
|
||||
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
|
||||
|
||||
### `.narinfo` extra fields
|
||||
|
||||
* `closureSize`:
|
||||
|
||||
The total size of this store object and every other object in its [closure].
|
||||
|
||||
[closure]: @docroot@/glossary.md#gloss-closure
|
||||
[JSON Schema for Store Object Info v1](schema/store-object-info-v1.json)
|
||||
-->
|
||||
|
|
|
|||
15
doc/manual/source/protocols/json/store-path.md
Normal file
15
doc/manual/source/protocols/json/store-path.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{{#include store-path-v1-fixed.md}}
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple store path
|
||||
|
||||
```json
|
||||
{{#include schema/store-path-v1/simple.json}}
|
||||
```
|
||||
|
||||
<!-- need to convert YAML to JSON first
|
||||
## Raw Schema
|
||||
|
||||
[JSON Schema for Store Path v1](schema/store-path-v1.json)
|
||||
-->
|
||||
2
doc/manual/source/protocols/meson.build
Normal file
2
doc/manual/source/protocols/meson.build
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
# Process JSON schema documentation
|
||||
subdir('json')
|
||||
130
doc/manual/source/release-notes/rl-2.32.md
Normal file
130
doc/manual/source/release-notes/rl-2.32.md
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
# Release 2.32.0 (2025-10-06)
|
||||
|
||||
## Incompatible changes
|
||||
|
||||
- Removed support for daemons and clients older than Nix 2.0 [#13951](https://github.com/NixOS/nix/pull/13951)
|
||||
|
||||
We have dropped support in the daemon worker protocol for daemons and clients that don't speak at least version 18 of the protocol. This first Nix release that supports this version is Nix 2.0, released in February 2018.
|
||||
|
||||
- Derivation JSON format now uses store path basenames only [#13570](https://github.com/NixOS/nix/issues/13570) [#13980](https://github.com/NixOS/nix/pull/13980)
|
||||
|
||||
Experience with many JSON frameworks (e.g. nlohmann/json in C++, Serde in Rust, and Aeson in Haskell) has shown that the use of the store directory in JSON formats is an impediment to systematic JSON formats, because it requires the serializer/deserializer to take an extra paramater (the store directory).
|
||||
|
||||
We ultimately want to rectify this issue with all JSON formats to the extent allowed by our stability promises. To start with, we are changing the JSON format for derivations because the `nix derivation` commands are — in addition to being formally unstable — less widely used than other unstable commands.
|
||||
|
||||
See the documentation on the [JSON format for derivations](@docroot@/protocols/json/derivation.md) for further details.
|
||||
|
||||
- C API: `nix_get_attr_name_byidx`, `nix_get_attr_byidx` take a `nix_value *` instead of `const nix_value *` [#13987](https://github.com/NixOS/nix/pull/13987)
|
||||
|
||||
In order to accommodate a more optimized internal representation of attribute set merges these functions require
|
||||
a mutable `nix_value *` that might be modified on access. This does *not* break the ABI of these functions.
|
||||
|
||||
## New features
|
||||
|
||||
- C API: Add lazy attribute and list item accessors [#14030](https://github.com/NixOS/nix/pull/14030)
|
||||
|
||||
The C API now includes lazy accessor functions for retrieving values from lists and attribute sets without forcing evaluation:
|
||||
|
||||
- `nix_get_list_byidx_lazy()` - Get a list element without forcing its evaluation
|
||||
- `nix_get_attr_byname_lazy()` - Get an attribute value by name without forcing evaluation
|
||||
- `nix_get_attr_byidx_lazy()` - Get an attribute by index without forcing evaluation
|
||||
|
||||
These functions are useful when forwarding unevaluated sub-values to other lists, attribute sets, or function calls. They allow more efficient handling of Nix values by deferring evaluation until actually needed.
|
||||
|
||||
Additionally, bounds checking has been improved for all `_byidx` functions to properly validate indices before access, preventing potential out-of-bounds errors.
|
||||
|
||||
The documentation for `NIX_ERR_KEY` error handling has also been clarified to specify when this error code is returned.
|
||||
|
||||
- HTTP binary caches now support transparent compression for metadata
|
||||
|
||||
HTTP binary cache stores can now compress `.narinfo`, `.ls`, and build log files before uploading them,
|
||||
reducing bandwidth usage and storage requirements. The compression is applied transparently using the
|
||||
`Content-Encoding` header, allowing compatible clients to automatically decompress the files.
|
||||
|
||||
Three new configuration options control this behavior:
|
||||
- `narinfo-compression`: Compression method for `.narinfo` files
|
||||
- `ls-compression`: Compression method for `.ls` files
|
||||
- `log-compression`: Compression method for build logs in `log/` directory
|
||||
|
||||
Example usage:
|
||||
```
|
||||
nix copy --to 'http://cache.example.com?narinfo-compression=gzip&ls-compression=gzip' /nix/store/...
|
||||
nix store copy-log --to 'http://cache.example.com?log-compression=br' /nix/store/...
|
||||
```
|
||||
|
||||
- Temporary build directories no longer include derivation names [#13839](https://github.com/NixOS/nix/pull/13839)
|
||||
|
||||
Temporary build directories created during derivation builds no longer include the derivation name in their path to avoid build failures when the derivation name is too long. This change ensures predictable prefix lengths for build directories under `/nix/var/nix/builds`.
|
||||
|
||||
- External derivation builders [#14145](https://github.com/NixOS/nix/pull/14145)
|
||||
|
||||
These are helper programs that Nix calls to perform derivations for specified system types, e.g. by using QEMU to emulate a different type of platform. For more information, see the [`external-builders` setting](../command-ref/conf-file.md#conf-external-builders).
|
||||
|
||||
This is currently an experimental feature.
|
||||
|
||||
## Performance improvements
|
||||
|
||||
- Optimize memory usage of attribute set merges [#13987](https://github.com/NixOS/nix/pull/13987)
|
||||
|
||||
[Attribute set update operations](@docroot@/language/operators.md#update) have been optimized to
|
||||
reduce reallocations in cases when the second operand is small.
|
||||
|
||||
For typical evaluations of nixpkgs this optimization leads to ~20% less memory allocated in total
|
||||
without significantly affecting evaluation performance.
|
||||
|
||||
See [eval-attrset-update-layer-rhs-threshold](@docroot@/command-ref/conf-file.md#conf-eval-attrset-update-layer-rhs-threshold)
|
||||
|
||||
- Substituted flake inputs are no longer re-copied to the store [#14041](https://github.com/NixOS/nix/pull/14041)
|
||||
|
||||
Since 2.25, Nix would fail to store a cache entry for substituted flake inputs, which in turn would cause them to be re-copied to the store on initial evaluation. Caching these inputs results in a near doubling of performance in some cases — especially on I/O-bound machines and when using commands that fetch many inputs, like `nix flake [archive|prefetch-inputs]`.
|
||||
|
||||
- `nix flake check` now skips derivations that can be substituted [#13574](https://github.com/NixOS/nix/pull/13574)
|
||||
|
||||
Previously, `nix flake check` would evaluate and build/substitute all
|
||||
derivations. Now, it will skip downloading derivations that can be substituted.
|
||||
This can drastically decrease the time invocations take in environments where
|
||||
checks may already be cached (like in CI).
|
||||
|
||||
- `fetchTarball` and `fetchurl` now correctly substitute (#14138)
|
||||
|
||||
At some point we stopped substituting calls to `fetchTarball` and `fetchurl` with a set `narHash` to avoid incorrectly substituting things in `fetchTree`, even though it would be safe to substitute when calling the legacy `fetch{Tarball,url}`. This fixes that regression where it is safe.
|
||||
- Started moving AST allocations into a bump allocator [#14088](https://github.com/NixOS/nix/issues/14088)
|
||||
|
||||
This leaves smaller, immutable structures in the AST. So far this saves about 2% memory on a NixOS config evaluation.
|
||||
## Contributors
|
||||
|
||||
This release was made possible by the following 32 contributors:
|
||||
|
||||
- Farid Zakaria [**(@fzakaria)**](https://github.com/fzakaria)
|
||||
- dram [**(@dramforever)**](https://github.com/dramforever)
|
||||
- Ephraim Siegfried [**(@EphraimSiegfried)**](https://github.com/EphraimSiegfried)
|
||||
- Robert Hensing [**(@roberth)**](https://github.com/roberth)
|
||||
- Taeer Bar-Yam [**(@Radvendii)**](https://github.com/Radvendii)
|
||||
- Emily [**(@emilazy)**](https://github.com/emilazy)
|
||||
- Jens Petersen [**(@juhp)**](https://github.com/juhp)
|
||||
- Bernardo Meurer [**(@lovesegfault)**](https://github.com/lovesegfault)
|
||||
- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92)
|
||||
- Leandro Emmanuel Reina Kiperman [**(@kip93)**](https://github.com/kip93)
|
||||
- Marie [**(@NyCodeGHG)**](https://github.com/NyCodeGHG)
|
||||
- Ethan Evans [**(@ethanavatar)**](https://github.com/ethanavatar)
|
||||
- Yaroslav Bolyukin [**(@CertainLach)**](https://github.com/CertainLach)
|
||||
- Matej Urbas [**(@urbas)**](https://github.com/urbas)
|
||||
- Jami Kettunen [**(@JamiKettunen)**](https://github.com/JamiKettunen)
|
||||
- Clayton [**(@netadr)**](https://github.com/netadr)
|
||||
- Grégory Marti [**(@gmarti)**](https://github.com/gmarti)
|
||||
- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra)
|
||||
- rszyma [**(@rszyma)**](https://github.com/rszyma)
|
||||
- Philip Wilk [**(@philipwilk)**](https://github.com/philipwilk)
|
||||
- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314)
|
||||
- Tom Westerhout [**(@twesterhout)**](https://github.com/twesterhout)
|
||||
- Tristan Ross [**(@RossComputerGuy)**](https://github.com/RossComputerGuy)
|
||||
- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium)
|
||||
- Jean-François Roche [**(@jfroche)**](https://github.com/jfroche)
|
||||
- Seth Flynn [**(@getchoo)**](https://github.com/getchoo)
|
||||
- éclairevoyant [**(@eclairevoyant)**](https://github.com/eclairevoyant)
|
||||
- Glen Huang [**(@hgl)**](https://github.com/hgl)
|
||||
- osman - オスマン [**(@osbm)**](https://github.com/osbm)
|
||||
- David McFarland [**(@corngood)**](https://github.com/corngood)
|
||||
- Cole Helbling [**(@cole-h)**](https://github.com/cole-h)
|
||||
- Sinan Mohd [**(@sinanmohd)**](https://github.com/sinanmohd)
|
||||
- Philipp Otterbein
|
||||
|
|
@ -106,7 +106,7 @@ The system type on which the [`builder`](#attr-builder) executable is meant to b
|
|||
|
||||
A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option].
|
||||
|
||||
By putting the `system` in each derivation, Nix allows *heterogenous* build plans, where not all steps can be run on the same machine or same sort of machine.
|
||||
By putting the `system` in each derivation, Nix allows *heterogeneous* build plans, where not all steps can be run on the same machine or same sort of machine.
|
||||
Nix can schedule builds such that it automatically builds on other platforms by [forwarding build requests](@docroot@/advanced-topics/distributed-builds.md) to other Nix instances.
|
||||
|
||||
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
||||
|
|
|
|||
25
flake.nix
25
flake.nix
|
|
@ -413,6 +413,10 @@
|
|||
supportsCross = false;
|
||||
};
|
||||
|
||||
"nix-json-schema-checks" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
|
||||
"nix-perl-bindings" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
|
|
@ -467,6 +471,27 @@
|
|||
}
|
||||
);
|
||||
|
||||
apps = forAllSystems (
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
opener = if pkgs.stdenv.isDarwin then "open" else "xdg-open";
|
||||
in
|
||||
{
|
||||
open-manual = {
|
||||
type = "app";
|
||||
program = "${pkgs.writeShellScript "open-nix-manual" ''
|
||||
manual_path="${self.packages.${system}.nix-manual}/share/doc/nix/manual/index.html"
|
||||
if ! ${opener} "$manual_path"; then
|
||||
echo "Failed to open manual with ${opener}. Manual is located at:"
|
||||
echo "$manual_path"
|
||||
fi
|
||||
''}";
|
||||
meta.description = "Open the Nix manual in your browser";
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
devShells =
|
||||
let
|
||||
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };
|
||||
|
|
|
|||
|
|
@ -203,5 +203,26 @@
|
|||
"ConnorBaker01@Gmail.com": "ConnorBaker",
|
||||
"jsoo1@asu.edu": "jsoo1",
|
||||
"hsngrmpf+github@gmail.com": "DavHau",
|
||||
"matthew@floxdev.com": "mkenigs"
|
||||
"matthew@floxdev.com": "mkenigs",
|
||||
"taeer@bar-yam.me": "Radvendii",
|
||||
"beme@anthropic.com": "lovesegfault",
|
||||
"osbm@osbm.dev": "osbm",
|
||||
"jami.kettunen@protonmail.com": "JamiKettunen",
|
||||
"ephraim.siegfried@hotmail.com": "EphraimSiegfried",
|
||||
"rszyma.dev@gmail.com": "rszyma",
|
||||
"tristan.ross@determinate.systems": "RossComputerGuy",
|
||||
"corngood@gmail.com": "corngood",
|
||||
"jfroche@pyxel.be": "jfroche",
|
||||
"848000+eclairevoyant@users.noreply.github.com": "eclairevoyant",
|
||||
"petersen@redhat.com": "juhp",
|
||||
"dramforever@live.com": "dramforever",
|
||||
"me@glenhuang.com": "hgl",
|
||||
"philip.wilk@fivium.co.uk": "philipwilk",
|
||||
"me@nycode.dev": "NyCodeGHG",
|
||||
"14264576+twesterhout@users.noreply.github.com": "twesterhout",
|
||||
"sinan@sinanmohd.com": "sinanmohd",
|
||||
"42688647+netadr@users.noreply.github.com": "netadr",
|
||||
"matej.urbas@gmail.com": "urbas",
|
||||
"ethanalexevans@gmail.com": "ethanavatar",
|
||||
"greg.marti@gmail.com": "gmarti"
|
||||
}
|
||||
|
|
@ -177,5 +177,24 @@
|
|||
"avnik": "Alexander V. Nikolaev",
|
||||
"DavHau": null,
|
||||
"aln730": "AGawas",
|
||||
"vog": "Volker Diels-Grabsch"
|
||||
"vog": "Volker Diels-Grabsch",
|
||||
"corngood": "David McFarland",
|
||||
"twesterhout": "Tom Westerhout",
|
||||
"JamiKettunen": "Jami Kettunen",
|
||||
"dramforever": "dram",
|
||||
"philipwilk": "Philip Wilk",
|
||||
"netadr": "Clayton",
|
||||
"NyCodeGHG": "Marie",
|
||||
"jfroche": "Jean-Fran\u00e7ois Roche",
|
||||
"urbas": "Matej Urbas",
|
||||
"osbm": "osman - \u30aa\u30b9\u30de\u30f3",
|
||||
"rszyma": null,
|
||||
"eclairevoyant": "\u00e9clairevoyant",
|
||||
"Radvendii": "Taeer Bar-Yam",
|
||||
"sinanmohd": "Sinan Mohd",
|
||||
"ethanavatar": "Ethan Evans",
|
||||
"gmarti": "Gr\u00e9gory Marti",
|
||||
"lovesegfault": "Bernardo Meurer",
|
||||
"EphraimSiegfried": "Ephraim Siegfried",
|
||||
"hgl": "Glen Huang"
|
||||
}
|
||||
|
|
@ -104,88 +104,6 @@
|
|||
};
|
||||
shellcheck = {
|
||||
enable = true;
|
||||
excludes = [
|
||||
# We haven't linted these files yet
|
||||
''^tests/functional/flakes/prefetch\.sh$''
|
||||
''^tests/functional/flakes/run\.sh$''
|
||||
''^tests/functional/flakes/show\.sh$''
|
||||
''^tests/functional/formatter\.sh$''
|
||||
''^tests/functional/formatter\.simple\.sh$''
|
||||
''^tests/functional/gc-auto\.sh$''
|
||||
''^tests/functional/gc-concurrent\.builder\.sh$''
|
||||
''^tests/functional/gc-concurrent\.sh$''
|
||||
''^tests/functional/gc-concurrent2\.builder\.sh$''
|
||||
''^tests/functional/gc-non-blocking\.sh$''
|
||||
''^tests/functional/hash-convert\.sh$''
|
||||
''^tests/functional/impure-derivations\.sh$''
|
||||
''^tests/functional/impure-eval\.sh$''
|
||||
''^tests/functional/install-darwin\.sh$''
|
||||
''^tests/functional/legacy-ssh-store\.sh$''
|
||||
''^tests/functional/linux-sandbox\.sh$''
|
||||
''^tests/functional/logging\.sh$''
|
||||
''^tests/functional/misc\.sh$''
|
||||
''^tests/functional/multiple-outputs\.sh$''
|
||||
''^tests/functional/nested-sandboxing\.sh$''
|
||||
''^tests/functional/nested-sandboxing/command\.sh$''
|
||||
''^tests/functional/nix-build\.sh$''
|
||||
''^tests/functional/nix-channel\.sh$''
|
||||
''^tests/functional/nix-collect-garbage-d\.sh$''
|
||||
''^tests/functional/nix-copy-ssh-common\.sh$''
|
||||
''^tests/functional/nix-copy-ssh-ng\.sh$''
|
||||
''^tests/functional/nix-copy-ssh\.sh$''
|
||||
''^tests/functional/nix-daemon-untrusting\.sh$''
|
||||
''^tests/functional/nix-profile\.sh$''
|
||||
''^tests/functional/nix-shell\.sh$''
|
||||
''^tests/functional/nix_path\.sh$''
|
||||
''^tests/functional/optimise-store\.sh$''
|
||||
''^tests/functional/output-normalization\.sh$''
|
||||
''^tests/functional/parallel\.builder\.sh$''
|
||||
''^tests/functional/parallel\.sh$''
|
||||
''^tests/functional/pass-as-file\.sh$''
|
||||
''^tests/functional/path-from-hash-part\.sh$''
|
||||
''^tests/functional/path-info\.sh$''
|
||||
''^tests/functional/placeholders\.sh$''
|
||||
''^tests/functional/post-hook\.sh$''
|
||||
''^tests/functional/pure-eval\.sh$''
|
||||
''^tests/functional/push-to-store-old\.sh$''
|
||||
''^tests/functional/push-to-store\.sh$''
|
||||
''^tests/functional/read-only-store\.sh$''
|
||||
''^tests/functional/readfile-context\.sh$''
|
||||
''^tests/functional/recursive\.sh$''
|
||||
''^tests/functional/referrers\.sh$''
|
||||
''^tests/functional/remote-store\.sh$''
|
||||
''^tests/functional/repair\.sh$''
|
||||
''^tests/functional/restricted\.sh$''
|
||||
''^tests/functional/search\.sh$''
|
||||
''^tests/functional/secure-drv-outputs\.sh$''
|
||||
''^tests/functional/selfref-gc\.sh$''
|
||||
''^tests/functional/shell\.shebang\.sh$''
|
||||
''^tests/functional/simple\.builder\.sh$''
|
||||
''^tests/functional/supplementary-groups\.sh$''
|
||||
''^tests/functional/toString-path\.sh$''
|
||||
''^tests/functional/user-envs-migration\.sh$''
|
||||
''^tests/functional/user-envs-test-case\.sh$''
|
||||
''^tests/functional/user-envs\.builder\.sh$''
|
||||
''^tests/functional/user-envs\.sh$''
|
||||
''^tests/functional/why-depends\.sh$''
|
||||
|
||||
# Content-addressed test files that use recursive-*looking* sourcing
|
||||
# (cd .. && source <self>), causing shellcheck to loop
|
||||
# They're small wrapper scripts with not a lot going on
|
||||
''^tests/functional/ca/build-delete\.sh$''
|
||||
''^tests/functional/ca/build-dry\.sh$''
|
||||
''^tests/functional/ca/eval-store\.sh$''
|
||||
''^tests/functional/ca/gc\.sh$''
|
||||
''^tests/functional/ca/import-from-derivation\.sh$''
|
||||
''^tests/functional/ca/multiple-outputs\.sh$''
|
||||
''^tests/functional/ca/new-build-cmd\.sh$''
|
||||
''^tests/functional/ca/nix-shell\.sh$''
|
||||
''^tests/functional/ca/post-hook\.sh$''
|
||||
''^tests/functional/ca/recursive\.sh$''
|
||||
''^tests/functional/ca/repl\.sh$''
|
||||
''^tests/functional/ca/selfref-gc\.sh$''
|
||||
''^tests/functional/ca/why-depends\.sh$''
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -142,7 +142,6 @@ release:
|
|||
$ git pull
|
||||
$ NEW_VERSION=2.13.0
|
||||
$ echo $NEW_VERSION > .version
|
||||
$ ... edit .mergify.yml to add the previous version ...
|
||||
$ git checkout -b bump-$NEW_VERSION
|
||||
$ git commit -a -m 'Bump version'
|
||||
$ git push --set-upstream origin bump-$NEW_VERSION
|
||||
|
|
|
|||
|
|
@ -60,3 +60,4 @@ if get_option('unit-tests')
|
|||
subproject('libflake-tests')
|
||||
endif
|
||||
subproject('nix-functional-tests')
|
||||
subproject('json-schema-checks')
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# shellcheck disable=all
|
||||
#compdef nix
|
||||
# shellcheck disable=all
|
||||
|
||||
function _nix() {
|
||||
local ifs_bk="$IFS"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
extern "C" [[gnu::retain, gnu::weak]] const char * __asan_default_options()
|
||||
{
|
||||
// We leak a bunch of memory knowingly on purpose. It's not worthwhile to
|
||||
// diagnose that memory being leaked for now.
|
||||
return "abort_on_error=1:print_summary=1:detect_leaks=0:detect_odr_violation=0";
|
||||
}
|
||||
|
|
@ -1,7 +1,3 @@
|
|||
asan_test_options_env = {
|
||||
'ASAN_OPTIONS' : 'abort_on_error=1:print_summary=1:detect_leaks=0',
|
||||
}
|
||||
|
||||
# Clang gets grumpy about missing libasan symbols if -shared-libasan is not
|
||||
# passed when building shared libs, at least on Linux
|
||||
if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefined' in get_option(
|
||||
|
|
@ -10,3 +6,6 @@ if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefi
|
|||
add_project_link_arguments('-shared-libasan', language : 'cpp')
|
||||
endif
|
||||
|
||||
if 'address' in get_option('b_sanitize')
|
||||
deps_other += declare_dependency(sources : 'asan-options.cc')
|
||||
endif
|
||||
32
nix-meson-build-support/common/assert-fail/meson.build
Normal file
32
nix-meson-build-support/common/assert-fail/meson.build
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
can_wrap_assert_fail_test_code = '''
|
||||
#include <cstdlib>
|
||||
#include <cassert>
|
||||
|
||||
int main()
|
||||
{
|
||||
assert(0);
|
||||
}
|
||||
|
||||
extern "C" void * __real___assert_fail(const char *, const char *, unsigned int, const char *);
|
||||
|
||||
extern "C" void *
|
||||
__wrap___assert_fail(const char *, const char *, unsigned int, const char *)
|
||||
{
|
||||
return __real___assert_fail(nullptr, nullptr, 0, nullptr);
|
||||
}
|
||||
'''
|
||||
|
||||
wrap_assert_fail_args = [ '-Wl,--wrap=__assert_fail' ]
|
||||
|
||||
can_wrap_assert_fail = cxx.links(
|
||||
can_wrap_assert_fail_test_code,
|
||||
args : wrap_assert_fail_args,
|
||||
name : 'linker can wrap __assert_fail',
|
||||
)
|
||||
|
||||
if can_wrap_assert_fail
|
||||
deps_other += declare_dependency(
|
||||
sources : 'wrap-assert-fail.cc',
|
||||
link_args : wrap_assert_fail_args,
|
||||
)
|
||||
endif
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
#include "nix/util/error.hh"
|
||||
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <cinttypes>
|
||||
#include <string_view>
|
||||
|
||||
extern "C" [[noreturn]] void __attribute__((weak))
|
||||
__wrap___assert_fail(const char * assertion, const char * file, unsigned int line, const char * function)
|
||||
{
|
||||
char buf[512];
|
||||
int n =
|
||||
snprintf(buf, sizeof(buf), "Assertion '%s' failed in %s at %s:%" PRIuLEAST32, assertion, function, file, line);
|
||||
if (n < 0)
|
||||
nix::panic("Assertion failed and could not format error message");
|
||||
nix::panic(std::string_view(buf, std::min(static_cast<int>(sizeof(buf)), n)));
|
||||
}
|
||||
|
|
@ -42,5 +42,8 @@ if cxx.get_id() == 'clang'
|
|||
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
||||
endif
|
||||
|
||||
# Darwin ld doesn't like "X.Y.Zpre"
|
||||
nix_soversion = meson.project_version().split('pre')[0]
|
||||
# Darwin ld doesn't like "X.Y.ZpreABCD+W"
|
||||
nix_soversion = meson.project_version().split('+')[0].split('pre')[0]
|
||||
|
||||
subdir('assert-fail')
|
||||
subdir('asan-options')
|
||||
|
|
|
|||
|
|
@ -204,6 +204,25 @@ let
|
|||
mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ];
|
||||
};
|
||||
|
||||
enableSanitizersLayer =
|
||||
finalAttrs: prevAttrs:
|
||||
let
|
||||
sanitizers = lib.optional scope.withASan "address" ++ lib.optional scope.withUBSan "undefined";
|
||||
in
|
||||
{
|
||||
mesonFlags =
|
||||
(prevAttrs.mesonFlags or [ ])
|
||||
++ lib.optionals (lib.length sanitizers > 0) (
|
||||
[
|
||||
(lib.mesonOption "b_sanitize" (lib.concatStringsSep "," sanitizers))
|
||||
]
|
||||
++ (lib.optionals stdenv.cc.isClang [
|
||||
# https://www.github.com/mesonbuild/meson/issues/764
|
||||
(lib.mesonBool "b_lundef" false)
|
||||
])
|
||||
);
|
||||
};
|
||||
|
||||
nixDefaultsLayer = finalAttrs: prevAttrs: {
|
||||
strictDeps = prevAttrs.strictDeps or true;
|
||||
enableParallelBuilding = true;
|
||||
|
|
@ -246,6 +265,16 @@ in
|
|||
|
||||
inherit filesetToSource;
|
||||
|
||||
/**
|
||||
Whether meson components are built with [AddressSanitizer](https://clang.llvm.org/docs/AddressSanitizer.html).
|
||||
*/
|
||||
withASan = false;
|
||||
|
||||
/**
|
||||
Whether meson components are built with [UndefinedBehaviorSanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html).
|
||||
*/
|
||||
withUBSan = false;
|
||||
|
||||
/**
|
||||
A user-provided extension function to apply to each component derivation.
|
||||
*/
|
||||
|
|
@ -332,6 +361,7 @@ in
|
|||
setVersionLayer
|
||||
mesonLayer
|
||||
fixupStaticLayer
|
||||
enableSanitizersLayer
|
||||
scope.mesonComponentOverrides
|
||||
];
|
||||
mkMesonExecutable = mkPackageBuilder [
|
||||
|
|
@ -342,6 +372,7 @@ in
|
|||
mesonLayer
|
||||
mesonBuildLayer
|
||||
fixupStaticLayer
|
||||
enableSanitizersLayer
|
||||
scope.mesonComponentOverrides
|
||||
];
|
||||
mkMesonLibrary = mkPackageBuilder [
|
||||
|
|
@ -353,6 +384,7 @@ in
|
|||
mesonBuildLayer
|
||||
mesonLibraryLayer
|
||||
fixupStaticLayer
|
||||
enableSanitizersLayer
|
||||
scope.mesonComponentOverrides
|
||||
];
|
||||
|
||||
|
|
@ -406,6 +438,11 @@ in
|
|||
*/
|
||||
nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; };
|
||||
|
||||
/**
|
||||
JSON schema validation checks
|
||||
*/
|
||||
nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { };
|
||||
|
||||
nix-perl-bindings = callPackage ../src/perl/package.nix { };
|
||||
|
||||
/**
|
||||
|
|
@ -458,7 +495,7 @@ in
|
|||
|
||||
Example:
|
||||
```
|
||||
overrideScope (finalScope: prevScope: { aws-sdk-cpp = null; })
|
||||
overrideScope (finalScope: prevScope: { aws-crt-cpp = null; })
|
||||
```
|
||||
*/
|
||||
overrideScope = f: (scope.overrideScope f).nix-everything;
|
||||
|
|
|
|||
|
|
@ -16,21 +16,6 @@ in
|
|||
scope: {
|
||||
inherit stdenv;
|
||||
|
||||
aws-sdk-cpp =
|
||||
(pkgs.aws-sdk-cpp.override {
|
||||
apis = [
|
||||
"identity-management"
|
||||
"s3"
|
||||
"transfer"
|
||||
];
|
||||
customMemoryManagement = false;
|
||||
}).overrideAttrs
|
||||
{
|
||||
# only a stripped down version is built, which takes a lot less resources
|
||||
# to build, so we don't need a "big-parallel" machine.
|
||||
requiredSystemFeatures = [ ];
|
||||
};
|
||||
|
||||
boehmgc =
|
||||
(pkgs.boehmgc.override {
|
||||
enableLargeConfig = true;
|
||||
|
|
@ -57,15 +42,20 @@ scope: {
|
|||
prevAttrs.postInstall;
|
||||
});
|
||||
|
||||
toml11 = pkgs.toml11.overrideAttrs rec {
|
||||
version = "4.4.0";
|
||||
src = pkgs.fetchFromGitHub {
|
||||
owner = "ToruNiina";
|
||||
repo = "toml11";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-sgWKYxNT22nw376ttGsTdg0AMzOwp8QH3E8mx0BZJTQ=";
|
||||
};
|
||||
};
|
||||
# TODO: Remove this when https://github.com/NixOS/nixpkgs/pull/442682 is included in a stable release
|
||||
toml11 =
|
||||
if lib.versionAtLeast pkgs.toml11.version "4.4.0" then
|
||||
pkgs.toml11
|
||||
else
|
||||
pkgs.toml11.overrideAttrs rec {
|
||||
version = "4.4.0";
|
||||
src = pkgs.fetchFromGitHub {
|
||||
owner = "ToruNiina";
|
||||
repo = "toml11";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-sgWKYxNT22nw376ttGsTdg0AMzOwp8QH3E8mx0BZJTQ=";
|
||||
};
|
||||
};
|
||||
|
||||
# TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed.
|
||||
boost =
|
||||
|
|
@ -84,38 +74,4 @@ scope: {
|
|||
buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase;
|
||||
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase;
|
||||
});
|
||||
|
||||
libgit2 =
|
||||
if lib.versionAtLeast pkgs.libgit2.version "1.9.0" then
|
||||
pkgs.libgit2
|
||||
else
|
||||
pkgs.libgit2.overrideAttrs (attrs: {
|
||||
# libgit2: Nixpkgs 24.11 has < 1.9.0, which needs our patches
|
||||
nativeBuildInputs =
|
||||
attrs.nativeBuildInputs or [ ]
|
||||
# gitMinimal does not build on Windows. See packbuilder patch.
|
||||
++ lib.optionals (!stdenv.hostPlatform.isWindows) [
|
||||
# Needed for `git apply`; see `prePatch`
|
||||
pkgs.buildPackages.gitMinimal
|
||||
];
|
||||
# Only `git apply` can handle git binary patches
|
||||
prePatch =
|
||||
attrs.prePatch or ""
|
||||
+ lib.optionalString (!stdenv.hostPlatform.isWindows) ''
|
||||
patch() {
|
||||
git apply
|
||||
}
|
||||
'';
|
||||
patches =
|
||||
attrs.patches or [ ]
|
||||
++ [
|
||||
./patches/libgit2-mempack-thin-packfile.patch
|
||||
]
|
||||
# gitMinimal does not build on Windows, but fortunately this patch only
|
||||
# impacts interruptibility
|
||||
++ lib.optionals (!stdenv.hostPlatform.isWindows) [
|
||||
# binary patch; see `prePatch`
|
||||
./patches/libgit2-packbuilder-callback-interruptible.patch
|
||||
];
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,6 +70,9 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
|
|||
|
||||
# We use this shell with the local checkout, not unpackPhase.
|
||||
src = null;
|
||||
# Workaround https://sourceware.org/pipermail/gdb-patches/2025-October/221398.html
|
||||
# Remove when gdb fix is rolled out everywhere.
|
||||
separateDebugInfo = false;
|
||||
|
||||
env = {
|
||||
# For `make format`, to work without installing pre-commit
|
||||
|
|
@ -93,38 +96,45 @@ pkgs.nixComponents2.nix-util.overrideAttrs (
|
|||
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents2.nix-cmd.mesonFlags);
|
||||
|
||||
nativeBuildInputs =
|
||||
attrs.nativeBuildInputs or [ ]
|
||||
++ pkgs.nixComponents2.nix-util.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-store.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-fetchers.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-expr.nativeBuildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.nativeBuildInputs
|
||||
++ lib.optionals buildCanExecuteHost pkgs.nixComponents2.nix-manual.externalNativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
||||
++ lib.optional (
|
||||
!buildCanExecuteHost
|
||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
|
||||
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
|
||||
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
|
||||
) pkgs.buildPackages.mesonEmulatorHook
|
||||
++ [
|
||||
pkgs.buildPackages.cmake
|
||||
pkgs.buildPackages.gnused
|
||||
pkgs.buildPackages.shellcheck
|
||||
pkgs.buildPackages.changelog-d
|
||||
modular.pre-commit.settings.package
|
||||
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
|
||||
pkgs.buildPackages.nixfmt-rfc-style
|
||||
pkgs.buildPackages.shellcheck
|
||||
pkgs.buildPackages.gdb
|
||||
]
|
||||
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (
|
||||
lib.hiPrio pkgs.buildPackages.clang-tools
|
||||
)
|
||||
++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped;
|
||||
let
|
||||
inputs =
|
||||
attrs.nativeBuildInputs or [ ]
|
||||
++ pkgs.nixComponents2.nix-util.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-store.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-fetchers.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-expr.nativeBuildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.nativeBuildInputs
|
||||
++ lib.optionals buildCanExecuteHost pkgs.nixComponents2.nix-manual.externalNativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs
|
||||
++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs
|
||||
++ lib.optional (
|
||||
!buildCanExecuteHost
|
||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
|
||||
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
|
||||
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
|
||||
) pkgs.buildPackages.mesonEmulatorHook
|
||||
++ [
|
||||
pkgs.buildPackages.cmake
|
||||
pkgs.buildPackages.gnused
|
||||
pkgs.buildPackages.changelog-d
|
||||
modular.pre-commit.settings.package
|
||||
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
|
||||
pkgs.buildPackages.nixfmt-rfc-style
|
||||
pkgs.buildPackages.shellcheck
|
||||
pkgs.buildPackages.include-what-you-use
|
||||
pkgs.buildPackages.gdb
|
||||
]
|
||||
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (
|
||||
lib.hiPrio pkgs.buildPackages.clang-tools
|
||||
)
|
||||
++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped;
|
||||
in
|
||||
# FIXME: separateDebugInfo = false doesn't actually prevent -Wa,--compress-debug-sections
|
||||
# from making its way into NIX_CFLAGS_COMPILE.
|
||||
lib.filter (p: !lib.hasInfix "separate-debug-info" p) inputs;
|
||||
|
||||
buildInputs = [
|
||||
pkgs.gbenchmark
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ let
|
|||
"nix-cmd"
|
||||
"nix-cli"
|
||||
"nix-functional-tests"
|
||||
"nix-json-schema-checks"
|
||||
]
|
||||
++ lib.optionals enableBindings [
|
||||
"nix-perl-bindings"
|
||||
|
|
@ -73,7 +74,7 @@ let
|
|||
]
|
||||
);
|
||||
in
|
||||
{
|
||||
rec {
|
||||
/**
|
||||
An internal check to make sure our package listing is complete.
|
||||
*/
|
||||
|
|
@ -145,13 +146,25 @@ in
|
|||
)
|
||||
);
|
||||
|
||||
buildNoGc =
|
||||
# Builds with sanitizers already have GC disabled, so this buildNoGc can just
|
||||
# point to buildWithSanitizers in order to reduce the load on hydra.
|
||||
buildNoGc = buildWithSanitizers;
|
||||
|
||||
buildWithSanitizers =
|
||||
let
|
||||
components = forAllSystems (
|
||||
system:
|
||||
nixpkgsFor.${system}.native.nixComponents2.overrideScope (
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
in
|
||||
pkgs.nixComponents2.overrideScope (
|
||||
self: super: {
|
||||
# Boost coroutines fail with ASAN on darwin.
|
||||
withASan = !pkgs.stdenv.buildPlatform.isDarwin;
|
||||
withUBSan = true;
|
||||
nix-expr = super.nix-expr.override { enableGC = false; };
|
||||
# Unclear how to make Perl bindings work with a dynamically linked ASAN.
|
||||
nix-perl-bindings = null;
|
||||
}
|
||||
)
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,282 +0,0 @@
|
|||
commit 9bacade4a3ef4b6b26e2c02f549eef0e9eb9eaa2
|
||||
Author: Robert Hensing <robert@roberthensing.nl>
|
||||
Date: Sun Aug 18 20:20:36 2024 +0200
|
||||
|
||||
Add unoptimized git_mempack_write_thin_pack
|
||||
|
||||
diff --git a/include/git2/sys/mempack.h b/include/git2/sys/mempack.h
|
||||
index 17da590a3..3688bdd50 100644
|
||||
--- a/include/git2/sys/mempack.h
|
||||
+++ b/include/git2/sys/mempack.h
|
||||
@@ -44,6 +44,29 @@ GIT_BEGIN_DECL
|
||||
*/
|
||||
GIT_EXTERN(int) git_mempack_new(git_odb_backend **out);
|
||||
|
||||
+/**
|
||||
+ * Write a thin packfile with the objects in the memory store.
|
||||
+ *
|
||||
+ * A thin packfile is a packfile that does not contain its transitive closure of
|
||||
+ * references. This is useful for efficiently distributing additions to a
|
||||
+ * repository over the network, but also finds use in the efficient bulk
|
||||
+ * addition of objects to a repository, locally.
|
||||
+ *
|
||||
+ * This operation performs the (shallow) insert operations into the
|
||||
+ * `git_packbuilder`, but does not write the packfile to disk;
|
||||
+ * see `git_packbuilder_write_buf`.
|
||||
+ *
|
||||
+ * It also does not reset the memory store; see `git_mempack_reset`.
|
||||
+ *
|
||||
+ * @note This function may or may not write trees and blobs that are not
|
||||
+ * referenced by commits. Currently everything is written, but this
|
||||
+ * behavior may change in the future as the packer is optimized.
|
||||
+ *
|
||||
+ * @param backend The mempack backend
|
||||
+ * @param pb The packbuilder to use to write the packfile
|
||||
+ */
|
||||
+GIT_EXTERN(int) git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb);
|
||||
+
|
||||
/**
|
||||
* Dump all the queued in-memory writes to a packfile.
|
||||
*
|
||||
diff --git a/src/libgit2/odb_mempack.c b/src/libgit2/odb_mempack.c
|
||||
index 6f27f45f8..0b61e2b66 100644
|
||||
--- a/src/libgit2/odb_mempack.c
|
||||
+++ b/src/libgit2/odb_mempack.c
|
||||
@@ -132,6 +132,35 @@ cleanup:
|
||||
return err;
|
||||
}
|
||||
|
||||
+int git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb)
|
||||
+{
|
||||
+ struct memory_packer_db *db = (struct memory_packer_db *)backend;
|
||||
+ const git_oid *oid;
|
||||
+ size_t iter = 0;
|
||||
+ int err = -1;
|
||||
+
|
||||
+ /* TODO: Implement the recency heuristics.
|
||||
+ For this it probably makes sense to only write what's referenced
|
||||
+ through commits, an option I've carved out for you in the docs.
|
||||
+ wrt heuristics: ask your favorite LLM to translate https://git-scm.com/docs/pack-heuristics/en
|
||||
+ to actual normal reference documentation. */
|
||||
+ while (true) {
|
||||
+ err = git_oidmap_iterate(NULL, db->objects, &iter, &oid);
|
||||
+ if (err == GIT_ITEROVER) {
|
||||
+ err = 0;
|
||||
+ break;
|
||||
+ }
|
||||
+ if (err != 0)
|
||||
+ return err;
|
||||
+
|
||||
+ err = git_packbuilder_insert(pb, oid, NULL);
|
||||
+ if (err != 0)
|
||||
+ return err;
|
||||
+ }
|
||||
+
|
||||
+ return 0;
|
||||
+}
|
||||
+
|
||||
int git_mempack_dump(
|
||||
git_buf *pack,
|
||||
git_repository *repo,
|
||||
diff --git a/tests/libgit2/mempack/thinpack.c b/tests/libgit2/mempack/thinpack.c
|
||||
new file mode 100644
|
||||
index 000000000..604a4dda2
|
||||
--- /dev/null
|
||||
+++ b/tests/libgit2/mempack/thinpack.c
|
||||
@@ -0,0 +1,196 @@
|
||||
+#include "clar_libgit2.h"
|
||||
+#include "git2/indexer.h"
|
||||
+#include "git2/odb_backend.h"
|
||||
+#include "git2/tree.h"
|
||||
+#include "git2/types.h"
|
||||
+#include "git2/sys/mempack.h"
|
||||
+#include "git2/sys/odb_backend.h"
|
||||
+#include "util.h"
|
||||
+
|
||||
+static git_repository *_repo;
|
||||
+static git_odb_backend * _mempack_backend;
|
||||
+
|
||||
+void test_mempack_thinpack__initialize(void)
|
||||
+{
|
||||
+ git_odb *odb;
|
||||
+
|
||||
+ _repo = cl_git_sandbox_init_new("mempack_thinpack_repo");
|
||||
+
|
||||
+ cl_git_pass(git_mempack_new(&_mempack_backend));
|
||||
+ cl_git_pass(git_repository_odb(&odb, _repo));
|
||||
+ cl_git_pass(git_odb_add_backend(odb, _mempack_backend, 999));
|
||||
+ git_odb_free(odb);
|
||||
+}
|
||||
+
|
||||
+void _mempack_thinpack__cleanup(void)
|
||||
+{
|
||||
+ cl_git_sandbox_cleanup();
|
||||
+}
|
||||
+
|
||||
+/*
|
||||
+ Generating a packfile for an unchanged repo works and produces an empty packfile.
|
||||
+ Even if we allow this scenario to be detected, it shouldn't misbehave if the
|
||||
+ application is unaware of it.
|
||||
+*/
|
||||
+void test_mempack_thinpack__empty(void)
|
||||
+{
|
||||
+ git_packbuilder *pb;
|
||||
+ int version;
|
||||
+ int n;
|
||||
+ git_buf buf = GIT_BUF_INIT;
|
||||
+
|
||||
+ git_packbuilder_new(&pb, _repo);
|
||||
+
|
||||
+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb));
|
||||
+ cl_git_pass(git_packbuilder_write_buf(&buf, pb));
|
||||
+ cl_assert_in_range(12, buf.size, 1024 /* empty packfile is >0 bytes, but certainly not that big */);
|
||||
+ cl_assert(buf.ptr[0] == 'P');
|
||||
+ cl_assert(buf.ptr[1] == 'A');
|
||||
+ cl_assert(buf.ptr[2] == 'C');
|
||||
+ cl_assert(buf.ptr[3] == 'K');
|
||||
+ version = (buf.ptr[4] << 24) | (buf.ptr[5] << 16) | (buf.ptr[6] << 8) | buf.ptr[7];
|
||||
+ /* Subject to change. https://git-scm.com/docs/pack-format: Git currently accepts version number 2 or 3 but generates version 2 only.*/
|
||||
+ cl_assert_equal_i(2, version);
|
||||
+ n = (buf.ptr[8] << 24) | (buf.ptr[9] << 16) | (buf.ptr[10] << 8) | buf.ptr[11];
|
||||
+ cl_assert_equal_i(0, n);
|
||||
+ git_buf_dispose(&buf);
|
||||
+
|
||||
+ git_packbuilder_free(pb);
|
||||
+}
|
||||
+
|
||||
+#define LIT_LEN(x) x, sizeof(x) - 1
|
||||
+
|
||||
+/*
|
||||
+ Check that git_mempack_write_thin_pack produces a thin packfile.
|
||||
+*/
|
||||
+void test_mempack_thinpack__thin(void)
|
||||
+{
|
||||
+ /* Outline:
|
||||
+ - Create tree 1
|
||||
+ - Flush to packfile A
|
||||
+ - Create tree 2
|
||||
+ - Flush to packfile B
|
||||
+
|
||||
+ Tree 2 has a new blob and a reference to a blob from tree 1.
|
||||
+
|
||||
+ Expectation:
|
||||
+ - Packfile B is thin and does not contain the objects from packfile A
|
||||
+ */
|
||||
+
|
||||
+
|
||||
+ git_oid oid_blob_1;
|
||||
+ git_oid oid_blob_2;
|
||||
+ git_oid oid_blob_3;
|
||||
+ git_oid oid_tree_1;
|
||||
+ git_oid oid_tree_2;
|
||||
+ git_treebuilder *tb;
|
||||
+
|
||||
+ git_packbuilder *pb;
|
||||
+ git_buf buf = GIT_BUF_INIT;
|
||||
+ git_indexer *indexer;
|
||||
+ git_indexer_progress stats;
|
||||
+ char pack_dir_path[1024];
|
||||
+
|
||||
+ char sbuf[1024];
|
||||
+ const char * repo_path;
|
||||
+ const char * pack_name_1;
|
||||
+ const char * pack_name_2;
|
||||
+ git_str pack_path_1 = GIT_STR_INIT;
|
||||
+ git_str pack_path_2 = GIT_STR_INIT;
|
||||
+ git_odb_backend * pack_odb_backend_1;
|
||||
+ git_odb_backend * pack_odb_backend_2;
|
||||
+
|
||||
+
|
||||
+ cl_assert_in_range(0, snprintf(pack_dir_path, sizeof(pack_dir_path), "%s/objects/pack", git_repository_path(_repo)), sizeof(pack_dir_path));
|
||||
+
|
||||
+ /* Create tree 1 */
|
||||
+
|
||||
+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1")));
|
||||
+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_2, _repo, LIT_LEN("thinpack blob 2")));
|
||||
+
|
||||
+
|
||||
+ cl_git_pass(git_treebuilder_new(&tb, _repo, NULL));
|
||||
+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB));
|
||||
+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob2", &oid_blob_2, GIT_FILEMODE_BLOB));
|
||||
+ cl_git_pass(git_treebuilder_write(&oid_tree_1, tb));
|
||||
+
|
||||
+ /* Flush */
|
||||
+
|
||||
+ cl_git_pass(git_packbuilder_new(&pb, _repo));
|
||||
+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb));
|
||||
+ cl_git_pass(git_packbuilder_write_buf(&buf, pb));
|
||||
+ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL));
|
||||
+ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats));
|
||||
+ cl_git_pass(git_indexer_commit(indexer, &stats));
|
||||
+ pack_name_1 = strdup(git_indexer_name(indexer));
|
||||
+ cl_assert(pack_name_1);
|
||||
+ git_buf_dispose(&buf);
|
||||
+ git_mempack_reset(_mempack_backend);
|
||||
+ git_indexer_free(indexer);
|
||||
+ git_packbuilder_free(pb);
|
||||
+
|
||||
+ /* Create tree 2 */
|
||||
+
|
||||
+ cl_git_pass(git_treebuilder_clear(tb));
|
||||
+ /* blob 1 won't be used, but we add it anyway to test that just "declaring" an object doesn't
|
||||
+ necessarily cause its inclusion in the next thin packfile. It must only be included if new. */
|
||||
+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1")));
|
||||
+ cl_git_pass(git_blob_create_from_buffer(&oid_blob_3, _repo, LIT_LEN("thinpack blob 3")));
|
||||
+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB));
|
||||
+ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob3", &oid_blob_3, GIT_FILEMODE_BLOB));
|
||||
+ cl_git_pass(git_treebuilder_write(&oid_tree_2, tb));
|
||||
+
|
||||
+ /* Flush */
|
||||
+
|
||||
+ cl_git_pass(git_packbuilder_new(&pb, _repo));
|
||||
+ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb));
|
||||
+ cl_git_pass(git_packbuilder_write_buf(&buf, pb));
|
||||
+ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL));
|
||||
+ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats));
|
||||
+ cl_git_pass(git_indexer_commit(indexer, &stats));
|
||||
+ pack_name_2 = strdup(git_indexer_name(indexer));
|
||||
+ cl_assert(pack_name_2);
|
||||
+ git_buf_dispose(&buf);
|
||||
+ git_mempack_reset(_mempack_backend);
|
||||
+ git_indexer_free(indexer);
|
||||
+ git_packbuilder_free(pb);
|
||||
+ git_treebuilder_free(tb);
|
||||
+
|
||||
+ /* Assertions */
|
||||
+
|
||||
+ assert(pack_name_1);
|
||||
+ assert(pack_name_2);
|
||||
+
|
||||
+ repo_path = git_repository_path(_repo);
|
||||
+
|
||||
+ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_1);
|
||||
+ git_str_joinpath(&pack_path_1, repo_path, sbuf);
|
||||
+ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_2);
|
||||
+ git_str_joinpath(&pack_path_2, repo_path, sbuf);
|
||||
+
|
||||
+ /* If they're the same, something definitely went wrong. */
|
||||
+ cl_assert(strcmp(pack_name_1, pack_name_2) != 0);
|
||||
+
|
||||
+ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_1, pack_path_1.ptr));
|
||||
+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_1));
|
||||
+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_2));
|
||||
+ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_3));
|
||||
+ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_1));
|
||||
+ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_2));
|
||||
+
|
||||
+ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_2, pack_path_2.ptr));
|
||||
+ /* blob 1 is already in the packfile 1, so packfile 2 must not include it, in order to be _thin_. */
|
||||
+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_1));
|
||||
+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_2));
|
||||
+ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_3));
|
||||
+ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_1));
|
||||
+ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_2));
|
||||
+
|
||||
+ pack_odb_backend_1->free(pack_odb_backend_1);
|
||||
+ pack_odb_backend_2->free(pack_odb_backend_2);
|
||||
+ free((void *)pack_name_1);
|
||||
+ free((void *)pack_name_2);
|
||||
+ git_str_dispose(&pack_path_1);
|
||||
+ git_str_dispose(&pack_path_2);
|
||||
+
|
||||
+}
|
||||
|
|
@ -1,930 +0,0 @@
|
|||
commit e9823c5da4fa977c46bcb97167fbdd0d70adb5ff
|
||||
Author: Robert Hensing <robert@roberthensing.nl>
|
||||
Date: Mon Aug 26 20:07:04 2024 +0200
|
||||
|
||||
Make packbuilder interruptible using progress callback
|
||||
|
||||
Forward errors from packbuilder->progress_cb
|
||||
|
||||
This allows the callback to terminate long-running operations when
|
||||
the application is interrupted.
|
||||
|
||||
diff --git a/include/git2/pack.h b/include/git2/pack.h
|
||||
index 0f6bd2ab9..bee72a6c0 100644
|
||||
--- a/include/git2/pack.h
|
||||
+++ b/include/git2/pack.h
|
||||
@@ -247,6 +247,9 @@ typedef int GIT_CALLBACK(git_packbuilder_progress)(
|
||||
* @param progress_cb Function to call with progress information during
|
||||
* pack building. Be aware that this is called inline with pack building
|
||||
* operations, so performance may be affected.
|
||||
+ * When progress_cb returns an error, the pack building process will be
|
||||
+ * aborted and the error will be returned from the invoked function.
|
||||
+ * `pb` must then be freed.
|
||||
* @param progress_cb_payload Payload for progress callback.
|
||||
* @return 0 or an error code
|
||||
*/
|
||||
diff --git a/src/libgit2/pack-objects.c b/src/libgit2/pack-objects.c
|
||||
index b2d80cba9..7c331c2d5 100644
|
||||
--- a/src/libgit2/pack-objects.c
|
||||
+++ b/src/libgit2/pack-objects.c
|
||||
@@ -932,6 +932,9 @@ static int report_delta_progress(
|
||||
{
|
||||
int ret;
|
||||
|
||||
+ if (pb->failure)
|
||||
+ return pb->failure;
|
||||
+
|
||||
if (pb->progress_cb) {
|
||||
uint64_t current_time = git_time_monotonic();
|
||||
uint64_t elapsed = current_time - pb->last_progress_report_time;
|
||||
@@ -943,8 +946,10 @@ static int report_delta_progress(
|
||||
GIT_PACKBUILDER_DELTAFICATION,
|
||||
count, pb->nr_objects, pb->progress_cb_payload);
|
||||
|
||||
- if (ret)
|
||||
+ if (ret) {
|
||||
+ pb->failure = ret;
|
||||
return git_error_set_after_callback(ret);
|
||||
+ }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -976,7 +981,10 @@ static int find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
}
|
||||
|
||||
pb->nr_deltified += 1;
|
||||
- report_delta_progress(pb, pb->nr_deltified, false);
|
||||
+ if ((error = report_delta_progress(pb, pb->nr_deltified, false)) < 0) {
|
||||
+ GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0);
|
||||
+ goto on_error;
|
||||
+ }
|
||||
|
||||
po = *list++;
|
||||
(*list_size)--;
|
||||
@@ -1124,6 +1132,10 @@ struct thread_params {
|
||||
size_t depth;
|
||||
size_t working;
|
||||
size_t data_ready;
|
||||
+
|
||||
+ /* A pb->progress_cb can stop the packing process by returning an error.
|
||||
+ When that happens, all threads observe the error and stop voluntarily. */
|
||||
+ bool stopped;
|
||||
};
|
||||
|
||||
static void *threaded_find_deltas(void *arg)
|
||||
@@ -1133,7 +1145,12 @@ static void *threaded_find_deltas(void *arg)
|
||||
while (me->remaining) {
|
||||
if (find_deltas(me->pb, me->list, &me->remaining,
|
||||
me->window, me->depth) < 0) {
|
||||
- ; /* TODO */
|
||||
+ me->stopped = true;
|
||||
+ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL);
|
||||
+ me->working = false;
|
||||
+ git_cond_signal(&me->pb->progress_cond);
|
||||
+ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_unlock(me->pb) == 0, NULL);
|
||||
+ return NULL;
|
||||
}
|
||||
|
||||
GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL);
|
||||
@@ -1175,8 +1192,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
pb->nr_threads = git__online_cpus();
|
||||
|
||||
if (pb->nr_threads <= 1) {
|
||||
- find_deltas(pb, list, &list_size, window, depth);
|
||||
- return 0;
|
||||
+ return find_deltas(pb, list, &list_size, window, depth);
|
||||
}
|
||||
|
||||
p = git__mallocarray(pb->nr_threads, sizeof(*p));
|
||||
@@ -1195,6 +1211,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
p[i].depth = depth;
|
||||
p[i].working = 1;
|
||||
p[i].data_ready = 0;
|
||||
+ p[i].stopped = 0;
|
||||
|
||||
/* try to split chunks on "path" boundaries */
|
||||
while (sub_size && sub_size < list_size &&
|
||||
@@ -1262,7 +1279,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
(!victim || victim->remaining < p[i].remaining))
|
||||
victim = &p[i];
|
||||
|
||||
- if (victim) {
|
||||
+ if (victim && !target->stopped) {
|
||||
sub_size = victim->remaining / 2;
|
||||
list = victim->list + victim->list_size - sub_size;
|
||||
while (sub_size && list[0]->hash &&
|
||||
@@ -1286,7 +1303,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
}
|
||||
target->list_size = sub_size;
|
||||
target->remaining = sub_size;
|
||||
- target->working = 1;
|
||||
+ target->working = 1; /* even when target->stopped, so that we don't process this thread again */
|
||||
GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0);
|
||||
|
||||
if (git_mutex_lock(&target->mutex)) {
|
||||
@@ -1299,7 +1316,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
git_cond_signal(&target->cond);
|
||||
git_mutex_unlock(&target->mutex);
|
||||
|
||||
- if (!sub_size) {
|
||||
+ if (target->stopped || !sub_size) {
|
||||
git_thread_join(&target->thread, NULL);
|
||||
git_cond_free(&target->cond);
|
||||
git_mutex_free(&target->mutex);
|
||||
@@ -1308,7 +1325,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list,
|
||||
}
|
||||
|
||||
git__free(p);
|
||||
- return 0;
|
||||
+ return pb->failure;
|
||||
}
|
||||
|
||||
#else
|
||||
@@ -1319,6 +1336,7 @@ int git_packbuilder__prepare(git_packbuilder *pb)
|
||||
{
|
||||
git_pobject **delta_list;
|
||||
size_t i, n = 0;
|
||||
+ int error;
|
||||
|
||||
if (pb->nr_objects == 0 || pb->done)
|
||||
return 0; /* nothing to do */
|
||||
@@ -1327,8 +1345,10 @@ int git_packbuilder__prepare(git_packbuilder *pb)
|
||||
* Although we do not report progress during deltafication, we
|
||||
* at least report that we are in the deltafication stage
|
||||
*/
|
||||
- if (pb->progress_cb)
|
||||
- pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload);
|
||||
+ if (pb->progress_cb) {
|
||||
+ if ((error = pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload)) < 0)
|
||||
+ return git_error_set_after_callback(error);
|
||||
+ }
|
||||
|
||||
delta_list = git__mallocarray(pb->nr_objects, sizeof(*delta_list));
|
||||
GIT_ERROR_CHECK_ALLOC(delta_list);
|
||||
@@ -1345,31 +1365,33 @@ int git_packbuilder__prepare(git_packbuilder *pb)
|
||||
|
||||
if (n > 1) {
|
||||
git__tsort((void **)delta_list, n, type_size_sort);
|
||||
- if (ll_find_deltas(pb, delta_list, n,
|
||||
+ if ((error = ll_find_deltas(pb, delta_list, n,
|
||||
GIT_PACK_WINDOW + 1,
|
||||
- GIT_PACK_DEPTH) < 0) {
|
||||
+ GIT_PACK_DEPTH)) < 0) {
|
||||
git__free(delta_list);
|
||||
- return -1;
|
||||
+ return error;
|
||||
}
|
||||
}
|
||||
|
||||
- report_delta_progress(pb, pb->nr_objects, true);
|
||||
+ error = report_delta_progress(pb, pb->nr_objects, true);
|
||||
|
||||
pb->done = true;
|
||||
git__free(delta_list);
|
||||
- return 0;
|
||||
+ return error;
|
||||
}
|
||||
|
||||
-#define PREPARE_PACK if (git_packbuilder__prepare(pb) < 0) { return -1; }
|
||||
+#define PREPARE_PACK error = git_packbuilder__prepare(pb); if (error < 0) { return error; }
|
||||
|
||||
int git_packbuilder_foreach(git_packbuilder *pb, int (*cb)(void *buf, size_t size, void *payload), void *payload)
|
||||
{
|
||||
+ int error;
|
||||
PREPARE_PACK;
|
||||
return write_pack(pb, cb, payload);
|
||||
}
|
||||
|
||||
int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb)
|
||||
{
|
||||
+ int error;
|
||||
PREPARE_PACK;
|
||||
|
||||
return write_pack(pb, &write_pack_buf, buf);
|
||||
diff --git a/src/libgit2/pack-objects.h b/src/libgit2/pack-objects.h
|
||||
index bbc8b9430..380a28ebe 100644
|
||||
--- a/src/libgit2/pack-objects.h
|
||||
+++ b/src/libgit2/pack-objects.h
|
||||
@@ -100,6 +100,10 @@ struct git_packbuilder {
|
||||
uint64_t last_progress_report_time;
|
||||
|
||||
bool done;
|
||||
+
|
||||
+ /* A non-zero error code in failure causes all threads to shut themselves
|
||||
+ down. Some functions will return this error code. */
|
||||
+ volatile int failure;
|
||||
};
|
||||
|
||||
int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb);
|
||||
diff --git a/tests/libgit2/pack/cancel.c b/tests/libgit2/pack/cancel.c
|
||||
new file mode 100644
|
||||
index 000000000..a0aa9716a
|
||||
--- /dev/null
|
||||
+++ b/tests/libgit2/pack/cancel.c
|
||||
@@ -0,0 +1,240 @@
|
||||
+#include "clar_libgit2.h"
|
||||
+#include "futils.h"
|
||||
+#include "pack.h"
|
||||
+#include "hash.h"
|
||||
+#include "iterator.h"
|
||||
+#include "vector.h"
|
||||
+#include "posix.h"
|
||||
+#include "hash.h"
|
||||
+#include "pack-objects.h"
|
||||
+
|
||||
+static git_repository *_repo;
|
||||
+static git_revwalk *_revwalker;
|
||||
+static git_packbuilder *_packbuilder;
|
||||
+static git_indexer *_indexer;
|
||||
+static git_vector _commits;
|
||||
+static int _commits_is_initialized;
|
||||
+static git_indexer_progress _stats;
|
||||
+
|
||||
+extern bool git_disable_pack_keep_file_checks;
|
||||
+
|
||||
+static void pack_packbuilder_init(const char *sandbox) {
|
||||
+ _repo = cl_git_sandbox_init(sandbox);
|
||||
+ /* cl_git_pass(p_chdir(sandbox)); */
|
||||
+ cl_git_pass(git_revwalk_new(&_revwalker, _repo));
|
||||
+ cl_git_pass(git_packbuilder_new(&_packbuilder, _repo));
|
||||
+ cl_git_pass(git_vector_init(&_commits, 0, NULL));
|
||||
+ _commits_is_initialized = 1;
|
||||
+ memset(&_stats, 0, sizeof(_stats));
|
||||
+ p_fsync__cnt = 0;
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__initialize(void)
|
||||
+{
|
||||
+ pack_packbuilder_init("small.git");
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cleanup(void)
|
||||
+{
|
||||
+ git_oid *o;
|
||||
+ unsigned int i;
|
||||
+
|
||||
+ cl_git_pass(git_libgit2_opts(GIT_OPT_ENABLE_FSYNC_GITDIR, 0));
|
||||
+ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, false));
|
||||
+
|
||||
+ if (_commits_is_initialized) {
|
||||
+ _commits_is_initialized = 0;
|
||||
+ git_vector_foreach(&_commits, i, o) {
|
||||
+ git__free(o);
|
||||
+ }
|
||||
+ git_vector_free(&_commits);
|
||||
+ }
|
||||
+
|
||||
+ git_packbuilder_free(_packbuilder);
|
||||
+ _packbuilder = NULL;
|
||||
+
|
||||
+ git_revwalk_free(_revwalker);
|
||||
+ _revwalker = NULL;
|
||||
+
|
||||
+ git_indexer_free(_indexer);
|
||||
+ _indexer = NULL;
|
||||
+
|
||||
+ /* cl_git_pass(p_chdir("..")); */
|
||||
+ cl_git_sandbox_cleanup();
|
||||
+ _repo = NULL;
|
||||
+}
|
||||
+
|
||||
+static int seed_packbuilder(void)
|
||||
+{
|
||||
+ int error;
|
||||
+ git_oid oid, *o;
|
||||
+ unsigned int i;
|
||||
+
|
||||
+ git_revwalk_sorting(_revwalker, GIT_SORT_TIME);
|
||||
+ cl_git_pass(git_revwalk_push_ref(_revwalker, "HEAD"));
|
||||
+
|
||||
+ while (git_revwalk_next(&oid, _revwalker) == 0) {
|
||||
+ o = git__malloc(sizeof(git_oid));
|
||||
+ cl_assert(o != NULL);
|
||||
+ git_oid_cpy(o, &oid);
|
||||
+ cl_git_pass(git_vector_insert(&_commits, o));
|
||||
+ }
|
||||
+
|
||||
+ git_vector_foreach(&_commits, i, o) {
|
||||
+ if((error = git_packbuilder_insert(_packbuilder, o, NULL)) < 0)
|
||||
+ return error;
|
||||
+ }
|
||||
+
|
||||
+ git_vector_foreach(&_commits, i, o) {
|
||||
+ git_object *obj;
|
||||
+ cl_git_pass(git_object_lookup(&obj, _repo, o, GIT_OBJECT_COMMIT));
|
||||
+ error = git_packbuilder_insert_tree(_packbuilder,
|
||||
+ git_commit_tree_id((git_commit *)obj));
|
||||
+ git_object_free(obj);
|
||||
+ if (error < 0)
|
||||
+ return error;
|
||||
+ }
|
||||
+
|
||||
+ return 0;
|
||||
+}
|
||||
+
|
||||
+static int fail_stage;
|
||||
+
|
||||
+static int packbuilder_cancel_after_n_calls_cb(int stage, uint32_t current, uint32_t total, void *payload)
|
||||
+{
|
||||
+
|
||||
+ /* Force the callback to run again on the next opportunity regardless
|
||||
+ of how fast we're running. */
|
||||
+ _packbuilder->last_progress_report_time = 0;
|
||||
+
|
||||
+ if (stage == fail_stage) {
|
||||
+ int *calls = (int *)payload;
|
||||
+ int n = *calls;
|
||||
+ /* Always decrement, including past zero. This way the error is only
|
||||
+ triggered once, making sure it is picked up immediately. */
|
||||
+ --*calls;
|
||||
+ if (n == 0)
|
||||
+ return GIT_EUSER;
|
||||
+ }
|
||||
+
|
||||
+ return 0;
|
||||
+}
|
||||
+
|
||||
+static void test_cancel(int n)
|
||||
+{
|
||||
+
|
||||
+ int calls_remaining = n;
|
||||
+ int err;
|
||||
+ git_buf buf = GIT_BUF_INIT;
|
||||
+
|
||||
+ /* Switch to a small repository, so that `packbuilder_cancel_after_n_calls_cb`
|
||||
+ can hack the time to call the callback on every opportunity. */
|
||||
+
|
||||
+ cl_git_pass(git_packbuilder_set_callbacks(_packbuilder, &packbuilder_cancel_after_n_calls_cb, &calls_remaining));
|
||||
+ err = seed_packbuilder();
|
||||
+ if (!err)
|
||||
+ err = git_packbuilder_write_buf(&buf, _packbuilder);
|
||||
+
|
||||
+ cl_assert_equal_i(GIT_EUSER, err);
|
||||
+}
|
||||
+void test_pack_cancel__cancel_after_add_0(void)
|
||||
+{
|
||||
+ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS;
|
||||
+ test_cancel(0);
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cancel_after_add_1(void)
|
||||
+{
|
||||
+ cl_skip();
|
||||
+ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS;
|
||||
+ test_cancel(1);
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cancel_after_delta_0(void)
|
||||
+{
|
||||
+ fail_stage = GIT_PACKBUILDER_DELTAFICATION;
|
||||
+ test_cancel(0);
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cancel_after_delta_1(void)
|
||||
+{
|
||||
+ fail_stage = GIT_PACKBUILDER_DELTAFICATION;
|
||||
+ test_cancel(1);
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cancel_after_delta_0_threaded(void)
|
||||
+{
|
||||
+#ifdef GIT_THREADS
|
||||
+ git_packbuilder_set_threads(_packbuilder, 8);
|
||||
+ fail_stage = GIT_PACKBUILDER_DELTAFICATION;
|
||||
+ test_cancel(0);
|
||||
+#else
|
||||
+ cl_skip();
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__cancel_after_delta_1_threaded(void)
|
||||
+{
|
||||
+#ifdef GIT_THREADS
|
||||
+ git_packbuilder_set_threads(_packbuilder, 8);
|
||||
+ fail_stage = GIT_PACKBUILDER_DELTAFICATION;
|
||||
+ test_cancel(1);
|
||||
+#else
|
||||
+ cl_skip();
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+static int foreach_cb(void *buf, size_t len, void *payload)
|
||||
+{
|
||||
+ git_indexer *idx = (git_indexer *) payload;
|
||||
+ cl_git_pass(git_indexer_append(idx, buf, len, &_stats));
|
||||
+ return 0;
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__foreach(void)
|
||||
+{
|
||||
+ git_indexer *idx;
|
||||
+
|
||||
+ seed_packbuilder();
|
||||
+
|
||||
+#ifdef GIT_EXPERIMENTAL_SHA256
|
||||
+ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL));
|
||||
+#else
|
||||
+ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL));
|
||||
+#endif
|
||||
+
|
||||
+ cl_git_pass(git_packbuilder_foreach(_packbuilder, foreach_cb, idx));
|
||||
+ cl_git_pass(git_indexer_commit(idx, &_stats));
|
||||
+ git_indexer_free(idx);
|
||||
+}
|
||||
+
|
||||
+static int foreach_cancel_cb(void *buf, size_t len, void *payload)
|
||||
+{
|
||||
+ git_indexer *idx = (git_indexer *)payload;
|
||||
+ cl_git_pass(git_indexer_append(idx, buf, len, &_stats));
|
||||
+ return (_stats.total_objects > 2) ? -1111 : 0;
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__foreach_with_cancel(void)
|
||||
+{
|
||||
+ git_indexer *idx;
|
||||
+
|
||||
+ seed_packbuilder();
|
||||
+
|
||||
+#ifdef GIT_EXPERIMENTAL_SHA256
|
||||
+ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL));
|
||||
+#else
|
||||
+ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL));
|
||||
+#endif
|
||||
+
|
||||
+ cl_git_fail_with(
|
||||
+ git_packbuilder_foreach(_packbuilder, foreach_cancel_cb, idx), -1111);
|
||||
+ git_indexer_free(idx);
|
||||
+}
|
||||
+
|
||||
+void test_pack_cancel__keep_file_check(void)
|
||||
+{
|
||||
+ assert(!git_disable_pack_keep_file_checks);
|
||||
+ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, true));
|
||||
+ assert(git_disable_pack_keep_file_checks);
|
||||
+}
|
||||
diff --git a/tests/resources/small.git/HEAD b/tests/resources/small.git/HEAD
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..cb089cd89a7d7686d284d8761201649346b5aa1c
|
||||
GIT binary patch
|
||||
literal 23
|
||||
ecmXR)O|w!cN=+-)&qz&7Db~+TEG|hc;sO9;xClW2
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/config b/tests/resources/small.git/config
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..07d359d07cf1ed0c0074fdad71ffff5942f0adfa
|
||||
GIT binary patch
|
||||
literal 66
|
||||
zcmaz}&M!)h<>D+#Eyyp<EXgmbOv^9IO)M!(Eh^5;&r`5fFyP`$%gjm5%}+@M@=A(I
|
||||
MQ@J>k5{uv*03B5png9R*
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/description b/tests/resources/small.git/description
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..498b267a8c7812490d6479839c5577eaaec79d62
|
||||
GIT binary patch
|
||||
literal 73
|
||||
zcmWH|%S+5nO;IRHEyyp<EXgmbv{pz>$t+PQ$;d2LNXyJgRZve!Elw`VEGWs$&r??@
|
||||
Q$yWgB0LrH#Y0~2Y0PnOK(EtDd
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/applypatch-msg.sample b/tests/resources/small.git/hooks/applypatch-msg.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..dcbf8167fa503f96ff6a39c68409007eadc9b1f3
|
||||
GIT binary patch
|
||||
literal 535
|
||||
zcmY+AX;Q;542A#a6e8^~FyI8r&I~hf2QJ{GO6(?HuvEG*+#R{4EI%zhfA8r{j%sh$
|
||||
zHE~E-UtQd8{bq4@*S%jq3@bmxwQDXGv#o!N`o3AHMw3xD)hy0#>&E&zzl%vRffo<B
|
||||
z)-H|+CWHZ~O*S%cfYx9;02_ohIA<Bg(1SxF-6OCb&_lBkf{t<AM9r;%E(Hf#h{|a@
|
||||
z9>mqo=v6>_2NRa#TwDdYvTVQyueO*15Nlo%=#DXgC0bhF3vTa`LQGaO9;jeD$OP?~
|
||||
za$G4Q{z+Q_{5V?5h;a-noM$P{<>Q~j4o7u%#P6^o^16{y*jU=-K8GYD_dUtdj4FSx
|
||||
zSC0C!DvAnv%S!4d<Yg@O<;m`;oSw)=Fz+hrL<mY{rBr8j4pi^88FX3}jKrYUP)>gk
|
||||
XB^)11aoGMJPCqWs%IS0YSv(eBT&%T6
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/commit-msg.sample b/tests/resources/small.git/hooks/commit-msg.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..f3780f92349638ebe32f6baf24c7c3027675d7c9
|
||||
GIT binary patch
|
||||
literal 953
|
||||
zcmaJ<TW`}a6n<`g#Ya}rZCaYOzy?UGG&Tf#VG`?}7@eHtB=MTqneFUS%75oLS;atz
|
||||
zm&iUo=ey->y@-{3h^^Cx;#d0zEA@DDc$nY4ez&|=%jTg@_HU*ub=!!y$xW09TSjlj
|
||||
z(`I@QCsM`!9&80$I98wsQ8yK#)Orb<8re8FjkKh630D$QUDwi~(gkX=RunYm$rDjk
|
||||
zlp%RUSnzA#6yjdG5?T?2DcYKp+v_lts0ljn&bh3J0bD5@N@1UKZ190O6ZeWr-BuZ^
|
||||
zWRebCX%(%=Xoj#(xYk1Cjtr!=tyBesf@m6}8zY6Ijbz9i9ziI_jG9Mv<Cz(ymp*>R
|
||||
zDH*e>^ga9IR?2wrSrAVm;eButj4<aWB@zzNl|1Wp@4;}1O?MUF>Y>7(E2?b~jsu>&
|
||||
zRKCJ7bp#19sqYh627wD%D9R$8=Ml$TNlumDypl~$jBu*G>5fIR^FB0h0Ex&TGZNr>
|
||||
zL5hs1_K>taRb!|ThN9ns7^@4MXKP+6aGI_UK)T-M#rcP$;kN(Vcf#P)+5GzWa{l@J
|
||||
z>-E{`$1iiNVYxq27}<DnwLRXQUG0o_hw&da-s5T#H=`Y9D_8=eTZ?cpWatp#a1vs@
|
||||
z2BjrO)z@aTuI#g#`)oJcnhM7oYLT@~CHX@CNXv4>j;uo%;)r3kJI2xCFF~Ux;$Q%)
|
||||
wjbk6JlDCM`jU&P+UVOvg`|iYl<7~9k>HHB4I;pdlQ=I-^$DrHaN$@lH1?P!0U;qFB
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/fsmonitor-watchman.sample b/tests/resources/small.git/hooks/fsmonitor-watchman.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..41184ebc318c159f51cd1ebe2290559805df89d8
|
||||
GIT binary patch
|
||||
literal 4777
|
||||
zcmbtYYi}F368$Xwipg4lq(BeHMvzvH-4;n7DGJBPqq#tw3aed8+IU5-m)yvL>;Cqh
|
||||
z8FFRGj$`9CA8ao<GaSz2oMCnz4Rv-gw9b@j_$0GQT1?XYi|;S??Y`Z6`Z;}C6#27N
|
||||
z8ShS>J?j^$%==FV``-=rhLcPW`McSytRm~mEO7_&_cAVZrf1fFy*ha@8oe%*-aBYE
|
||||
zcjzZg>LOkgxuUr-XJnHyD;zmPnRaSc#!k_P*d_BttRdc+J6G7za5#+<HG#rlmbrN~
|
||||
z8DwU-3}VABEwM=0VLP@^Dy6ERR5_J6cmg|GEh*M1EliqCGwe^ZT-iZ$2Yc`4!I#WZ
|
||||
z5nGGhn7*jeW=2ydsmfAmm#=8AD<<;TI+#z{Q)kW;yE!%GB6f~7EtEMLdM47Qaz*8=
|
||||
zIObA(VVj-VG{Ax|66d*hi`+bRG>^Y1nkc2Oowk`ya47uUR3Feu?B<phm31&PQB<lt
|
||||
zb{W(W4wf#Bab%|Q_tKPS?3^o=5)Z8^Vh(#slNI}pO(f^|{U0GZhLnycSaNd&h?CaC
|
||||
z0XklU6^<ky6rES9T=na$M8P<_)aKMAMo+UDewAu4wF{#&6diFshiudixAoh|&0<BJ
|
||||
zR>(w;S{(VYzxh}q-=#zP@uxSx{wbyPUMFU;K(06)$o{07&3yI?q{GqMcQ1c_^M<0<
|
||||
zF4acAV)Il-V(rCTC1(;bsZ*}bl8dmejAk~yb`B}!^0;g^(o9kGUfZfDOvyp@x4OQt
|
||||
zSgWh6T|3eq;9MFs8-#z+FDM1h(IjRUP|``PxupgJ7CUHOH90gbgl^2~97`?_X{P))
|
||||
zB*$r1cDlF-%azKND}?Gv`2K8-9v5e`gQoft=j?T<&a13c^!wY_$D`5z-X1g?ty&6-
|
||||
zQN50{8?bUk9AI->^W@~~nkOghHIC2YN+<JiT_ob7ttND1oh`HH28Y+OV~HedG&uB`
|
||||
zy}rA*r_xT#bR`Q7-*)3t*(!Hf?jKzyxk=8hdi3L^d<p<uU9q_<4k&xEr4@YWv_vsW
|
||||
zp(#32bYtA5u|s#1+}h`0kwpz4kUD&+>AXkLQG_2-{Pq3%{`3KUMeG$iIn%%^6*NYb
|
||||
zn|_BdV#C)n4565Vcc<EWC-nglECZGy!j9I4&;hUCzr(?6TftH=0^@!mI^E@y5HZw8
|
||||
ztH&kaSNyg=O6riqR^MPOX6oD__Jz@K;*icS)?m$!p{S$n;*EwlO<L!d7;utu(W9D!
|
||||
zaF!B~xV^2i?wH0s?Lw%Q)(`aPkajs1XojlPv@Y-d5#YFg#KG+!r7A(dJLnkiJMs`p
|
||||
zV|_=d!upN{TsxH1?sZNdzxeHsmtzTV`1{pykJ_~+^*>X;uT8&z3vSi!HXGbUj2B!R
|
||||
zdz~&#<?<tHJql=W&((YpzS06y-Z6Cn^H!*9qH?pOrD~(GV=JJ~z{tpLnGK|xp&C1`
|
||||
zsbn7O86xjF<~G*S1l*;;Bh%6><Up=oKy99?62P^?M&22U6QFRBXLb&u%=Ur<74wRy
|
||||
zMRG!f{AvZ>fk#L-&k$fLwo$4?>12g@AXOKFekuo#6EHB%gmpD?1eyh%N8s{2wGoTu
|
||||
z*@6cEZ^ZW!FAF_|JL`NkV7k}0ow|-2jHwbgH0;c@Dq*o?@&c*HnGdyx6^su8Qk%2{
|
||||
z*ye(dxO*6-&>qn1+zw}tc6;=sOX{4WB=VqjTS^))y1jlX2Q;=e!qMmFA5lC$#;BxC
|
||||
z=Y%tRpWxb+_uQAvAw7Q{HGV#R$xb&udLCzZ+HN?kTyB};1EJ8UlQ5!>5eGW@)RX0n
|
||||
zkjj>EF!3=0Gl^8dzv$B^NMGRxJoqN4A`xq-@wCbrx*u2NmIJ1<fUDQ=*^)h6`vzco
|
||||
z3F+ro$F!F6pc<B;<;isobIgbVGKUBByoQ4#CO({S7g?<Dh0^!7uJ3gxS=6F;+^gQc
|
||||
zeKi4`4`Fm3p|BU2v{M|-u!#QGCXiM=k=%np0<ZOPQqEjP_nneyOdgEuG9IH&YYPtp
|
||||
zKB_dvcYCcyhyT#<uhUEL$o~!TUz;cb&|`uSM{Dkv%&W2lcpYL&kv)tUvVv?&>xZ%H
|
||||
zh;{|4T3(!E9sY#Ni(wUJYs1MmIc9bl)(4Nl3_wD_BWB>i<1S(LX7m*{Q7PU$muMS*
|
||||
zM!%0EZx-Vw=Zey;erC?SNxF;pY@^A%-krqzfLV2meBp1vWdyArFYn`DD19T)Hw(?n
|
||||
z)}{NP(Lk(o*?gl#B@pP7^*r|=;PIDT4|F#{2Hzh-AL0Rv$6uT;<CP7qxbYms@WU7}
|
||||
z%}TsHJ!V_56ZFF{BfI=8jTBxMEATG376pS6a;u1@c?{~sL<N52`U6fuLkz4P@Mb^b
|
||||
z2`Q48y&!C0&A+WRCJAdmo3u2#?eI=si9Vm47$|`m**x<wKkM=QR&g?C63@P5X@xP8
|
||||
zi5QG2b-Fdz%S0%1{kKvL%^RTdpK|SU^VJ7JCmKdwp1`;HRoGM7ef^k_s_}2An=cql
|
||||
za|{IaxQWpdq<ae&x3BOJq+M5QNIk#C{Nv@u>n|WzE4=slK?on@(fZeGhRgQCu56qB
|
||||
z{+n81Az96qnQjMY*-*r-KV*7;Z#4Q<xfjbcBx_6JAN-HP@bq+eI%HhAB9&vLyOap{
|
||||
bw<Ywj(b#kdcCk7dCBY;|DBNpPjIa1F6*dXN
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/post-update.sample b/tests/resources/small.git/hooks/post-update.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..9f95e41a39cf974958277c8881ac6cce405ebb20
|
||||
GIT binary patch
|
||||
literal 246
|
||||
zcmXZVO?HDY3<Ti4Poaiwjq^yGw9DKf7mz^&Ly=V5q$H=W^Rt|p_r9s#9Ea7VERo!9
|
||||
zyT9>uJRJJV$M^KdldiMhj?ImK6~FvwJ*L5a){QoM=L5TYHkGO1$UrO3`a>{?Opw|b
|
||||
zG(#59NQ#jFL9v~vgOVkM@^^(^A}onOE))yWEwhIlk&{ZyseZ^O0b=w8&O=BK{k<5B
|
||||
k^Q-B@eG}LeHrquz%(SVEp_N)VhYZikCW__82JXfD17`J9Qvd(}
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-applypatch.sample b/tests/resources/small.git/hooks/pre-applypatch.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..625837e25f91421b8809a097f4a3103dd387ef31
|
||||
GIT binary patch
|
||||
literal 481
|
||||
zcmY+ATTa6;5Jms9iouO45IBJXEg&Jm9@v1LPHMM_ZR|;#6tQ<EeSrA%_2`_rGr1_8
|
||||
z?aM?yVtIc%-@9SGSk&8x=grP-Lf`7!^=$7xgL=|ysZ}!av6zL~ywui}<2##V6L@!k
|
||||
zy=p^)V7%Wzs-g`9<Y9}^)&uN}BCrXR_T3@Z2$gSJON2`X=mAs+%@7n-2I}ZrP|TFA
|
||||
zvJJGDl3HPLP<@!Q!}zXQvey#qEE#a#$vs97i4=A0stF@YQ)k_ZajaoS^dVYBc&37_
|
||||
zVI(L=X<V335r9~7T<;|HfKF+yM}}LB9d96V)Si;sj(;9Rh$#P>h$71hSXq*MxP;V&
|
||||
zj0cY7SCL=x4`a46sF)C>94Gk%=3q$W2s;j6iHtB2$R0%gix4oK@&T~=ALd_o*CKxt
|
||||
I-`Pv{1Bpzc>;M1&
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-commit.sample b/tests/resources/small.git/hooks/pre-commit.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..10b39b2e26981b8f87ea424e735ef87359066dbb
|
||||
GIT binary patch
|
||||
literal 1706
|
||||
zcmZuxU2ohr5PY_N#pZ0-F<{-<Zkx1j&cMNSQ3FN`GzR*R1O+9oPV`BnOj7q@1pV!u
|
||||
zrF0Hl^i33(yR)-1d-!H%&2|=|^E~_R{N1zNJ-&Zmt-t?iwXv&i+ZN}Km(TX8Q$H4u
|
||||
zd7(m`|1iDmF5k@xV`p;C4zojASmLc}yN0QDZbhN=ri&CEt=XGuN1IwjGJ#a#`t-kG
|
||||
zDqY)}7+Ft|;YKwLYbtg$S(-TBO=x3cP1cd}%f4kB!<6Wu-dCwz-)KDMEuM^_Hh*UC
|
||||
zC`1)|)T<(U6b`+yOH!6p*Ll}@qastwA*dyjsgOf5C=?LTprfORG6O{5L%@S0wyHpj
|
||||
zu|_A-=NWnYYR5m7kvm6|&T~GzoJ_OKR3sgFUgw?ifho^NQhvK#{6g0=&Fh)%n}#m0
|
||||
zk1sNmwb_AMDq};OOGw5|;OyX#?yQMMH6yAk(x$3tjFjHE?c$E2XC_xXav8tnIeIG?
|
||||
zYMI|~MLEVGkuT*>v&v-X^RA+u>k}E$4d&uD7=g_fA8+pNNV=4s0|iD3p<=DTXClTS
|
||||
zXV23tJ;ECmN@M0j@zUAKEYW@3bv!SeYZ8ZH`YQNTApFVNc;F|9r5p4TqGs=>8E?6y
|
||||
zi|gY{iM#PG1nL?UE9YCnWTk72kgZPG*Usqw!~Qd3c?~@w2?%eg@~)+VlSs6N5Yf2^
|
||||
zz;ow<fjf3n`imj7u5lnzt||q9QFM(H@<3EJCK|l5P!$yDfn~U-(5Vu7s+GqxNKyeI
|
||||
zP=-Oa(KH&gK`NhUa`cLj3B8%qL};DR7dk!`A^h&3-hFB6p($5Ufy^tGir)3et}qK4
|
||||
zpkPKYWzC+?=&gw-L6S)S=<lfq)%uLUAa%~81Jf9hk)w~U!DItnoSy`m^}#38M}K-o
|
||||
z!PqisQkV!&z4E*V01ro~qlVK^0BI`pLk6D+)f~*y!hCvwHq8zY9BGh<2s$@b^A<8G
|
||||
zRaqk}&qZyyv&|0QDFPA%h4TgF&vdlc|JUq*=>F#K#r^&KMq1A`oqVGFpD&-!Pv|Rc
|
||||
zO3KSqA@h9nSc%bm`0)Amk6*J}@14J*1-219l%%7D!Pl}UK>|lVi0Dfgu2jN3WC!uL
|
||||
z0ej??b2iSehVgdnWHmZV4kUo*QL#aiIp}U=9x)IXk}JJ7VQ;CI9Rtn5e0VcjbY<bp
|
||||
zv{}WlG6L;H!EzFKdY>cVt+`x5D+svCGD<sXw4|)E|JX43I1_3P(sI4{wj87bPSrNG
|
||||
w!SIr>;Z5hm*<gY+X;)Ryx4=nzaab9m`bwE*^s(%u*E3HbUuOR@+&s_z1=MCi2LJ#7
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-merge-commit.sample b/tests/resources/small.git/hooks/pre-merge-commit.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..d8e8ad4bcf8337f23c78a98537ecab4831301202
|
||||
GIT binary patch
|
||||
literal 473
|
||||
zcmaJ-O;f@!5WV+TJd4B0whSt$H%Dh2t`2u6q1!glCNbGU;n%yxdNsG~zR#WA6xIwy
|
||||
zWEZHoU#u?nykD=Y<HPgeWDkDm^kTof*l(|%^gh!nHrZpo^vhMDjV;E1GD~K7wV*+D
|
||||
zz9lry9T0cHcm_KhDVXYvQ==FrLTT4u=bEr{U1yl7%thf%wJnv<XQZ`ZbQEezaWdS%
|
||||
zI;c?h9a)Y!ux<WK8rQd_aA^?61hv_Pf<t7*z1USuL40FxYz<|hybsO?qnN}aMpcuf
|
||||
z6phFw1%Xx=wUk(m>E$jSEQZ%SQ(}oLgslTvrKK@9Qf#b!hajVFnp9@oIix;NcI9Wk
|
||||
xjnh0ya!AWet{I7YpD;y6HXyzI*lfSvH=o6*7mJZPkuaYpm>vzZ`wyGEBtOQPo|pgt
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-push.sample b/tests/resources/small.git/hooks/pre-push.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..02cbd80c287f959fe33975bb66c56293e3f5b396
|
||||
GIT binary patch
|
||||
literal 1431
|
||||
zcmaJ>U60!~5PUX&#a1@z9B{IIZkjLT0t5kq9#8~D(I5{+8&J~9;#ndUk~-ZT`r|uG
|
||||
z$#K$$J{TsK<m~Lsu9iP+t-0TZ=sa(K+C6);54X>s*LP1}9!GoZ@4I4myMMG_di|of
|
||||
z%?llx{O8TS-#^<H#%^V=)RNv>;(OioEmPy%kwWQBA1OMzV{hsQ8XFzS1k!~YQoLa5
|
||||
zhtP1fA$q6VmMbbAC_9)4I628k*O5J$NR19uHe4QYDK<==I~SQk)Nu%xQ~<Hy8U>KH
|
||||
z53w=!ke(FGb_PpnZfd*+hnXDTn;2*`u^~;?+5C~cn?bRka7NR%06%e6O91{MAgN6J
|
||||
zmlO8{Biw4&wr&&(z4p3eln`E}XR9m9bNYZ7Ibrg(4yZIXrfgD7N*AFD7L3YSM#j}%
|
||||
zo__rOS5fr;@8UM<6cl+cv_$YB$PQ&9dv($eM*))g!_cu!QcSh-mqE9i#QDZT)=o#`
|
||||
z?8!RtE?w6p?GkGZ-6yt_p~5~4ecu|Sf^)6096%h*q-eNiEA1;Xwg)p~Q&iGSG7-IQ
|
||||
z9aII&`ps$WOojFA`*bjG<mBv1n0hcYZWN0~(X01-hx(ExqWqaXgSk*@-GMp|K_3!5
|
||||
z9|O21N3%~izh(4fbp9wzd+!b&7cVwSP5H00)m5ej-(s=Pl#(90UOhn@OA9u+D{i@r
|
||||
za4*CP0I#<d-)-#xq5q-iY5nIef2s5OuQjcA>kFk|E@sHHuD}W^d`7YJ3YE^zrQnqR
|
||||
zGoq?;YGKe)93o|_=^f%3U1KYZGPOXRRxK7w`UUbMMa3<86OmVH!EKP$8RCrn9mWX+
|
||||
zC?9yF!fRVLmud3hF<}x;;sR}f(*r}6Gap3fR6zLHR~kbMgD{98N`L+r&?3p~*0+FX
|
||||
zcAL%j=(SO}xTJUTvA`&Lf`2mv4koPG9&|<CA~EHbWHMoFPwT(&U=7t0`RoFZPO9Kq
|
||||
zwwe$i=T|AXY#hD$aZlNMH`wZ%gwilGJ(zeYpOn*F3cy0XKXio^Sj#WXx=PWV`WGaA
|
||||
B&~*R+
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-rebase.sample b/tests/resources/small.git/hooks/pre-rebase.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..193ee3f127387f469894675f0d621b5bb9c275da
|
||||
GIT binary patch
|
||||
literal 5006
|
||||
zcmb7IX>;2+68$XxiXKL@ma;l5d2^5Ba_rPh_DHI-u1#&_upttZXp;no03$20|NFiM
|
||||
zK#D#xQ>!Z3JkX8T-LDVm!B5j7y_{;JDmmTTef+K1oIiPzeEr+Ai*<2PUgnG4^ZB>p
|
||||
z_fkAvoR1emuf~ri^K$-px=4#D-v<wZ2Xv&$O_eTJh6d4)=DWL(NBs9G{k<+yMMw0T
|
||||
z$VH*-+LM)}u&m^`l8~1nt(3Z;R8v(KbY5#i3z+~8h0D}Xvq&3J8BMWDizPNpaeb~9
|
||||
zBN9bSkthfXzskapf%Zt{*e#}{QaNiaAVZ4{$;;I6<vKNhO@%7P-(;l-x=pPoExHC!
|
||||
zB(hA#cDdD?s4P=!)=-K{<kHAWKetl-8I8wwO<ihJNs-$dEvr;&S_@6E=mNSJ5;mg#
|
||||
zyb)MbqKH<one{qrV;ZQ6WL}yLtyi*ekNLf|uC6M!)Cmq7*l?g0d6`MlE49|}>Y9w&
|
||||
z`bCv#<YfTKtb`!}CyNYd;|(C?vRVQmWOfR9X?FZ#=f$No)^#4>2zVn=YnJyeNey(Y
|
||||
zRh`9vtLw~A+5zsjp|W0Nsa|29Rm!B>OoG5a+vi;ari8O>KkU!KAWg_fa3btK2x*_@
|
||||
z0bEc7J;Ubghm}n9bOi(Sv_B66nQ7U)J7f0fO}<cB8i8vG{r39s_>8Wuf*uorcIgEG
|
||||
zOHc|-V6+HlRhOP}?Cn?@5iwSl43abmBA^2lyL$+cpabCGVES+v^j^FO_}?FIp<qP?
|
||||
z#_?*YMHIkyZxJxS;h@A)WDJ0bN&+F-#_lFjAf^g_Pb#5YXqYe|dZUpa^zI)VOBXQQ
|
||||
zAMhT>%En%Ll?Z*7*}TwrZyg5OSZ9rY-`aU~Mc-jjv{Ll)FLMgtB4ujktfQ`Xhqrka
|
||||
zT=P!A;9w^;Z?PqpLwOLu=cj3L>TdUKw2;DMu)`oVkj}<z_EjO_2uWYuvKG==%Zu?h
|
||||
zJiMVR^c30RbpW}<+z;jjoNC}YI4f6QC7d-08*4mCB1>#bcDx4tYg=j%D`+i{W~fVM
|
||||
zVmZ>W9VMyin9c-0KzI_;iZ-g|OyzuG`Yq%(%dvl;ifnVr0;jWE&S`z|rQu=!yHBBO
|
||||
zx`OJ;oOQ(KKM<$(bC38o>pD0%|HA(E0TRw7qj$fJ_pRN+7Nm>dS<q{AcOz#-;d7_2
|
||||
zL$z(_nhH{vOzT(}>C(gLg{(`t+5Z=?o+}wXU4tHy+&%F&aRhFebeEhR2R5|<c6J);
|
||||
zEY(q5F5<n*XP0|=PtPBn$B)V~d$Os-?&8UlaVe_|jdkzoWNsTP-_uyq4$R6o<h`&@
|
||||
z{loXa{^#TF=NJBYu9qB?hs}x=It_O}ZjX(_wy9@X(lmkRpNi0{8T{O_b_j*JC^_SM
|
||||
zz3G?1$KCNWF-|`Jbx2cQ-y5LW?Z2eikngTZmsx5C(@({8<l)Ue+gIp##Mosfa~iZN
|
||||
zZ|NLN9uE6Xaqpwk+@D+f?$tg2JRCY`pwZwbR9OvEn*zYm`ffKIzl4{r{ZgjfpbuX)
|
||||
z_r0=0y3)T-j$gljPyEJO*6Y<pj7G72aLoqaTpc=#u)*xJcVUm0;k(n;r)^DQNa6Oj
|
||||
zWvlHEGg~lz`Q_8`yQ9<BZ;ylE1Z}bD<8}<uB9Y5lRB=;JUD0h?_)4H&EhJjvwzJx?
|
||||
zr<o_vk&75j_5^d$8Z$_Oc1=R-I_DlNZ2@~81oV*J6%o3RuiCz}^VEW>$#Ycbp^w@t
|
||||
zTl%=f1t=w+WpJzF<|CE@?SCNAz)%9?w33lQ8vrHJqPfH9@}qs*QXOG71W=ylx;wOB
|
||||
zcx!Bj^)Yy6WX$a^vBkBJ5Cob<oubBW+a#9bX{0bkMTX_2sIrs`HMk@$q{dK5h2-g}
|
||||
z({`~#gm#G?c#>qlaDx_B0c<3b+8)f84LCrt;e;qxc+7>VbwVK{skNv!wvBiTa^9Iu
|
||||
zkwP;VK)jH$WJ{`MRwAA9fal!y0dtV;FWg8PTkWU>CwnqD>1ZX2B@;$DlX%C5MI+}{
|
||||
z9xQVnffR*~v2KAUj*hCd<gRCjR7~6Q(vF%VT7j97iv3j4Z0p%mU`7t061~l7b$!#t
|
||||
z3*Pveii}aQ?QD9aiX>gul~`bk#mk`o>zk9)<2Uc8?hUZAEvd!`9em)~$Z)zev>w^8
|
||||
zyAgCP_$&Y)7HSQ84`xG}OeTavaEswwF|8Xpi5iZzZa@hCiv(J-%bfFC&)HLlO+Rhw
|
||||
zG6g?9eL5&A!SuJnQ6}LxG%tU+@vZ`i+!+Rz6iYvsTdhnPo7lW{m-}{hya@viX4)XZ
|
||||
zngaw+j;gloB#|UwI@8sOmQpc`h+bicQJnQIB5eifIMQNgD2+oai33m!34~xU|0Azj
|
||||
zhu$8z+T5^;Pxx@d{N)pzOJLSa^e;aDf$W%N5XcOf!mGC9l9j$Ev2h6N+6ZQC+CJzl
|
||||
zaM7?S!SrFLS2DASjj(h6y1WN3N?|bmqmyzm!&nLoE|`rKBOc_yDF$a#FsUn!IQf(t
|
||||
zdC&Us(kQz*7mv<H12p@UD8XaLXdK{>H^j*^MC@>wTDb}g%~sx*ng#>{@lR=XG-Z5_
|
||||
z#<9*Oh0joMzt;nS)ObAp)347`D=}r-;nV!TbIq&xrGRGsF6fZg+!VkfUei@_&l-M&
|
||||
zPqQ+Dw)RV}+)I8RuqAxa`Pv8e&!_gXS=e2-un>=Ktn}-;%lLZxaVn?Q>yZCb2R3Wk
|
||||
z77zr%;Rq&h|2ncqyKYmFI0148JVY7Q$V5p=dWj<MQ<8r+@QLq+UROk&%quICq^O2C
|
||||
zp(17882jXI)_GaqzAY4AjoB_nh8k*r1mJ>+Qqpu%i|xp2<qF`Tw6&3`h654ceBjZ7
|
||||
z4>C=WaOb2Wudn^h0EcD%$p9YVU1fnoRV9`(cy(vv6K>FXS!2jY>1GnU--7)4usH&K
|
||||
zao*&P^@9~YmUe|ZdLW@C>H;!*<TIU}-!Tw#3oqZA*99}b3&uHiGO<{I6!pMnAiQdS
|
||||
P!fA@Yk4s_DjDP<F98V`a
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/pre-receive.sample b/tests/resources/small.git/hooks/pre-receive.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..32b597f8c32399017c3d7d51134d1f6490ad9575
|
||||
GIT binary patch
|
||||
literal 601
|
||||
zcmZ`#+iu%141Kn~f>Vt3>Nw4M*;=?j(TBD#O@XCv0|MEhA;z}kTFRv@`tPHhp=&Yh
|
||||
zg%Zhg4i7o_k{a5i&f5;tZ==%}^Sn4aD_6%qs<o-wO_Prn;}`SPs_*$C$(7T|$#C3`
|
||||
zPt%-C8gelZ1GqAP8`ZQmg0{8-S9H{R@D>_XAuJt&EumdH4Yu`UjT<s+s_~uXh}qA8
|
||||
zg|_HG)%7Pdc&$7*uR0HF@)~vmFjqyD?XZwCbLen^h5t)sm9<90Oa!@Y%8!~rF8G?W
|
||||
zkzmCF8kMtuuelMHIAlqqnm?72LeGM1J4`w(kX9&%LQn}ForlDLjBoCyvxmo@x3kH^
|
||||
z^loxLyPiDWPo-cBMnsg2M6}kuPGHEGBqVkC{D&9Kt%xFAsTw4QC1$_=fwBl=3dI+e
|
||||
zaSxI}JS}=Z(Ec80eF`!Zq3mqapXI|UN!a)t;@4hcu%Eq2xcoW}%!><-+XHTuHss+b
|
||||
YOmM2;hq8Egm*4=7_P9T{21QBYH*F=mfB*mh
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/prepare-commit-msg.sample b/tests/resources/small.git/hooks/prepare-commit-msg.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..b1970da1b6d3f42f00069fd17c325de72cda812e
|
||||
GIT binary patch
|
||||
literal 1702
|
||||
zcmb_cTW{Mo6n>t6#i?x6xmZ$SFLf{QfG*3r0L?Pg?px55l8$UTGO3bO;spKi{V3XX
|
||||
z))weX0X>M9bNMcZ-6yG%>(n}JI2|25dr<ew@wmMG{l(3lx~bQz>}WZBP@ih?JX^+@
|
||||
zu#5O48P>yRX(m<b*PU*sORp92TCD1dX`%HE+1$w5k<(Ngu7zQ83#MGJR?<<W=d@yL
|
||||
z#heqwo{FmCg0g#x<~R+PBD#}q(MBn;V$x;%UrJPP3*l%XtlvTWChI2SfJ$9e`YvSj
|
||||
zRSOQ?NUgSMLI`3vL48YBHzwzVXoe7v0ef|0YHgV$N@?N(-R)rPqRDrK$n(%+OF$`P
|
||||
zWdjC5N~`#RjV9}aYwQ4_yF5O-$h2`>fDIhYP)doc1&TADZa@ZGpusJ$6G+e$ZMcmC
|
||||
zoOosDQPS}l{H?YPsq(4;0SGkATa9eeqAaDcj<jNAU+LTSmM1jo(ti~T0B7acJnnTX
|
||||
zTarYy;Husdxal0!S<ba8=uu&a*SNYtr7|d7$g-q3_JHER2*oBsVW~i~XXdMx%LW~0
|
||||
zT*960LF<qZ6K&FZ;vGmtyywBU3^Q>q8n2wALbFwU@2i@FAaRV!=uw-nwx1gKn2SvY
|
||||
z>Ff>;2sg!+Hxfkwv1lsiii=p6WenF=5)6LZc<a$zDCD$GRuwvG4Fr+B#h?#9gTbio
|
||||
zk#MdxC@WY%zSGN#i}Ts_#q`bf-{)`7CcWeB*7WlIyHjioJJWw&A5VItPUq3^9!r}S
|
||||
zbykelFV-VFvcr>QaZ=aS_}+-4Y&?!@HWh|<^gJ21!|T@+%On#w6azxPHV}XsRbe*w
|
||||
zR_TZ2XEsQa1lPK~biYqg@0-RW@5J1@=<87cFzEUABdCoFH2CZo?}l(Z*!OFqUxo>K
|
||||
z_d`l#4d9|H6;VPT{X?^{VJ>oL|D7K{BJwwqB>`YcPoGk+9hbvHnoQ{EM|kPgD_`wk
|
||||
zKm4#2xu;-y`RAm!=L_BnLvJ8$AZm8@?)v<%vwvsw8AF2x6!mTT;c72A_~U9nIq0ST
|
||||
zv)N0!I!^1p=g8-RQfx5)E_Mb_4I2vtQpI30XZ&t<!9D6nI|;V7YR3)l6=S~QhuwLQ
|
||||
g$e&^kTY-M99*<-Iw@(78*M5W!4}U}|8YyMx3->-9h5!Hn
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/push-to-checkout.sample b/tests/resources/small.git/hooks/push-to-checkout.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..a80611e18896f212c390d845e49a3f6d5693b41d
|
||||
GIT binary patch
|
||||
literal 2840
|
||||
zcmai0U31$u5PXh)#YOS7cE^-rw@uolNhe9&aUS|HtvhX>G$45tVUYj>fRdF?|9kfU
|
||||
zNR~aG=E)WbEbeyq7JTw}ZuHIE2kUtL<<n;$&G!2F^Je|kx2ug=4L5!H^!ogx`7o$&
|
||||
z%Il(3zAe6<oe$^F=A|}s`8}CDp*M#3M)gC-)LOeDUpYMl3YNy9R)I-T)pE7sy09aj
|
||||
zJ7%&5PnSB-F#2{jc><WLR{I2izuK%VHc+{hRfXe<^_q)8RjcE(6WX+F2)iAtDtI{x
|
||||
ztAHVBq)eSp_E^xcV^i_5KLIHA$g{zEjh?rsacu+(Eyvve2~Kmw%;n3g(kWB56j~Js
|
||||
z<yE5tYUsAR&PY0wgRvM8x!zgLX8SI!eVY&}YZ|>AoeCNptd-NM1aZLhESzC;I`+Ns
|
||||
zfmNNjdAp^W8#Q*}l>CT7RB9F5(BbI8ly2l~+E};JW|>&d1)=epZ-8vm8ppkbEVn#R
|
||||
zt30a5A-c(YQR8eM5%;|UAnO>rt!&@x@G@yp+92%w-}%(5P_+P&Wf_zb$f-Qrl5(7z
|
||||
z2ah(bkE;!DK(&aAMuQ%1TS>ai?wSXCOCSj=_}8x4IbCx^$}9q)<W{Y<9o<WqZo^oV
|
||||
z3bR;Qa%VT-M~kU@2n{=+=)C!MD`12;@<F*EtPfV3K#g^1%&ggH?4{m<R$WEKcUI4%
|
||||
zl6{ik6Bo46pmNjdXg5@?hn;SjG$}rr3Gy#-BGgVD$8oD)OcK(oqca)L_kk)UBZ_&6
|
||||
z*ourb#Yc8l3J+uSda_Y$GY--5Zp3QK9w^?P%E0xb57?fY+Q#+KU4)+R>whwv)SBt|
|
||||
zg#MX4;;Oau`m=MI9(^&zPbueY@~>3*ixX%mvR5m_1&nAg@ZKvY1E$O}&EtLiG;mhV
|
||||
z1xhMIm~fGjmf_#{62f`y;09?I7M1W2tWQvz<}i9lR>OpQyUJi45_&*pQus&EkwY<>
|
||||
zI|ZAx=*3i9a-)g)hXkvO7>UJ5MNgL(Z+-wpXVcgbSgpmFmbf1~DPA(OVGI&FNLeIE
|
||||
zNH!_aiH$vsif$_j7=T2{cS(!DOI`~bn@)vSd-0d7xL=DF;UNP|tW}4i<qWTSNCe|y
|
||||
zg9kV&L9g;FhC@tvsVu#V-brqShHy2qZpA!gg{ZTY>h>DvHtu9tY_pbJ6x(6E*hxgC
|
||||
zzNDao%qlr-IE%YGbS4hF!n!on7#W3$bX-_hbZAaws^nHu#)Dx=WzdbJ>AKzAy@T$x
|
||||
zSWE^x9+|TEHVEPyaPYa0DOChp?AeHSBBDbZNokQpAY{lE!7geZI=jV)G^2@<iI(N4
|
||||
zJLJjy@Y<VI$yq;3bVpJICW3D?YDMDl4Oe5pDf{d`i1_3Qx%4uX`$dOz<9e}jm2B-u
|
||||
z8-?%!7XuiNF2O&MAdl-iw{drG+$F^zT2Z7WRV#c6;IRZEf>l)&91Zb1+`T+oq9wWF
|
||||
zRV~kGTGce0O~p^6mj{kT5kL(pv>r;Lvd7VDX*P>A^Th`$3cWO<svk?_?FeP@458*2
|
||||
zA1PqUOdd%VP5&4~ocLK16{1GLll64c=mU81RMFstpnMoLuI7hNIE4N)U%h*#<Fz{C
|
||||
z9#>0<l7lRrls|W(8=W1O80P~6+U7<4BqC}N4-4GR3w#{O7YmH?JxwJfru2d?e){$5
|
||||
z@5R+`7Z;1)FW;OkE-(HPisCS;5F4O^Q>L81p4Ysdo3ZP1(SrR-peEdTo;-@bkB((G
|
||||
zPHYQXUL!@Q$e(OQ;R9r%@Afz+50I7>*^^c&&|E*r-jN)LH=pM4AqMwWxSv|nqjddE
|
||||
Z4{_hwv8!W(<d3>T<BC%y-6O5i(|^OS%`gA}
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/sendemail-validate.sample b/tests/resources/small.git/hooks/sendemail-validate.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..effa6f54f2a5c1f0cebe258bf4805904be4b4852
|
||||
GIT binary patch
|
||||
literal 2365
|
||||
zcmb_dU2oe)5PUX&#g-Nv2{5JDX_MA~3%Iq?8g*kpRgpYZIFU=~BI=I5J6e{T{`bz^
|
||||
zk+$48h#x9Ika!=nv$M0y{clD}-j1x(hDWbnzP?l2k8j?TH{brS+Nf21yPm)Nczma>
|
||||
zYw`X3V>TCdnSD1ru8&`j=2DIPbCT@SnIgUw>$+lEYP}+x8(BMYnr=iT3*ndq)xzaV
|
||||
z>I+qjv}vC#8_9M+b1p#uNS0M0)q<p>8!3p_LRQ0MA3M`!2foxzRUjbFY@}O~(ki=S
|
||||
zqscnq8cU*dY)D$$cqE}n)V0yIk>CNKHCrndOtSP*HbOb;nbwAHSb;R+gs^?^Dve%)
|
||||
zoW}t(*D}$>O3ab0TS^-;J|u&sb-PkZzo#kn*#xYt(;<xzKW(YtQZ$u23?yV#kyh1~
|
||||
z@+Idh;EG5jXx8@%<;Y_W8SA=|T;MPQ)TB!!<QcbU)YR4)79eeeg4|vp-8jm%Dl3^I
|
||||
zRS7+i3>FGuwzSb^g&RDiGcOz9TB;Hu`nJh)$W=C=XCSm2AY=$w3G3P-V#Oo+N*;#2
|
||||
z4ijJ-pBZ=;T(RTgp_HYrD!uW-dTMfkuqY5jwOy)~gM;#=P^i{!l7`pXTS^s(&^{RU
|
||||
zydaw}OpS#^D1cXM8?FW+fh`t7D(g;yr6|}fdaNtZBx3hlK~IpkTu3!Qq%R+zAo#<L
|
||||
zU&m+XIFB0>t}Bs8^3$vHD+-TGT@`F>H1Cc#WAVW;&$S6%fE2d6@kLS0g&ihIM{}0z
|
||||
z8#XhD>b>3{(BH|Px7}&lJ4%y1v<t$W+!F|W@<gaU4;MqSHKR(wdML<XnCv;zaPrSi
|
||||
zxVCvek26-bf#Q!H+uAgy_{e_1UZCq>(CihZJx@8MPoGdl*BJGD;usf*iS7%;{Joe;
|
||||
zNFuBa>*~o&qETDPo~u&~$FxE1xb^x&(CbE`Y3GfsibL2rl+L;>P6j&Y3U>K$mkp*6
|
||||
zd`Q{<^+^&;GskGjwD-%!boR&i-TC<Uvy02w+l$Nb?B}aL-%ZDpluqd=K_@}z*fyuV
|
||||
zzAs1Hf?3v$k!X7GTc8Q=r`WJ_Uu=>A9UOR|@=GYb5x#<f&WSMH%mCJU<#=7=qFdL6
|
||||
zG?Wz&6z&J<@I(B>+dhd7fkaVIR^pol`Mv+rUbmZ43dVL6^S7g3{NsPiG$iy$5EDB%
|
||||
z6KIgnb$H(n&t3e4E6d4V7w^B?JS}JkG)PM6+X3Co`SQs($O*AA+MG~{S7RJ=cy-l&
|
||||
z>~%3y`tjfx2>uOu<lDGWewcb=oL@}B@B6FCZ?oxSJWldrm%UfOduahk%C0H>tB_^s
|
||||
ziwG=e=ch|FQ0IkN91US7rhdQkXhwwt$gU0WEVDjo=IPb+?6PC=s8}J*ua(Ms))`UL
|
||||
fi$|vMHn?H<rred|1&u#kOoJ`%vx)-*g-ZSfgGvdP
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/hooks/update.sample b/tests/resources/small.git/hooks/update.sample
|
||||
new file mode 100755
|
||||
index 0000000000000000000000000000000000000000..c3430f6fe27e572a1cebdb2f2bec70add0642a60
|
||||
GIT binary patch
|
||||
literal 3707
|
||||
zcmb_fU2oeq6n(aS#jRq*br9K3y0z;^QgkUc^kKlTrB6js&@yE)mPu8l<0$L?`wmIT
|
||||
zrsXba))@gJi@e|G+<SfSXe`CeSQ}OG@sr8ZTUlQ{dzM}Q@O-hBi}GeUom`#X%FiYH
|
||||
zX?m4Rna-0RN2lfK)A3ZuvHcz$L<jUn62D=~vfz{}wIH2VqBLX_O$(JSXeF7H$}q!c
|
||||
zWY}C&R;eX%X?P{%d;|>_tSE3ettp-hLlsZCxaLX8(nU;bVRB;Ce6@s#eu2|WvLz>-
|
||||
zvy(&>Gyfp@+BtKnpqWkKi^+v{4jn_pNw_zeuxE<mRXKx8G3;9pl+45&4~hHW!G@wo
|
||||
za7?X(0B}HbX*ExkDmas*xzV)FxygC8AL?2Z1x-0QJqS@qn8sD7r{bm30@<%eL_gOw
|
||||
z;~85O$Xw2AS}Qp)5ViRUe3|ir8;&&I<B7Y6^!kkNyYXF4EY(b8_5DsTYn_&?wkdEz
|
||||
z0y$tADo<&}nGs5kg2-J=0KlEGPb(%<An(pXY{K`qIZCuwiT|2{8JD&5o_~`o6<;dD
|
||||
zi@J#zCE4={8j%<uy|iutvHu1QKo5Tno-BAF2FwD%%O#UDDum=w!;!PNe-cOFNX4)5
|
||||
zd>TifiGO|)w}OANj2n2D^K=o3j6P6uOL70#cbA{uzWXDlk1wr9GV1X(2W{RuTvjXV
|
||||
zCmd<W?kH^?PXjkbF`XZtwu1B+%4@ZvHIwGpJ*8@8`MWAhq^B|Hj1lzj3R8bVuMpNb
|
||||
zz4Gzk!3T3bY;WEGIww&kq9BYW6EP*q$K|EB-@TH(Fjtz*`HMTO?i+3E;5tdSah&xZ
|
||||
z+t!x4K7)dpy5wiJhlJz~8qF|r8a&-SV7^I3C@_q=P`yt@_x_F-;PQR)fzP<zNN>8u
|
||||
zH%V`94=q3)Dk)PHNrnFC(T1)Om6f{Usj;u1R->&XoCYVK2V3ZlgZuF?N}1+33<P7e
|
||||
z<0yVF?Qoa{l#7q(3&jv=Ab)gpM8A7`p%3InNzSxy)ZER2U0C#9zKpnLY0I?>OER*x
|
||||
z*9Z=L=zI8CN>A_^jYjt0F$psO$sL=38q5q|SG)qCN6{^>RFh5E&l5GZ$pEahnF&d+
|
||||
z5c>64t}uJPkf~_!VUj#&N%nC-gUMj%=@B=!V>&}xtj2%@-mOm#rQUSJ3(ccmc+fza
|
||||
znZ#uxF>N?QN5UrIEd!5RgHEf<eGg}P45aAs(Xs6u!XW9r1I*E6XJ^1movX@xYLuPz
|
||||
z|7xBN4z@b}#x>W#;(nKYF+D<*rdshJ$X-z2OZ2X;)nn@KSVdVhaA?}@3;6gZxb4<W
|
||||
z`9sa`0lR_azMX|=t_(FvG@%w2);9P}SG0u&K1(*oX3};~=<<!N*F$UTSxD{V&6mgL
|
||||
ztw9Ntc2eOF@c!OJytNC4T^#)Ien7-`dI{6s#co~0f^E3J<0Ty)l3xq2u@Y8DXTK>v
|
||||
zozoWSr{{+!h}zGpumG3H`=AvWpm^9kW;J$Jp^Xl*?8ckr`fqN%c|Z;VC0|cM4vSrk
|
||||
zH_O8Yvh85nvJp^;``wo8=z0f`FWg?`>gO#y1hjX1{}rTlg9rwIKia8eyGexA3GnuR
|
||||
z`Rg~XZoW;0pA)vI8=p5!+6sIn#C^FCvR>ffv39h6SCNi9v);%WD;WZ`of_MgwyRWy
|
||||
z-yY%n*Y>X8<Sf+RyB|Sr2YG@1w~%U$o`(5EDkJ|3$u=eMZA&_wxEsy<Xxh2k^tP?4
|
||||
RGx&ZHQs^8zuIpu!=pQhfp8Eg*
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/info/exclude b/tests/resources/small.git/info/exclude
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..a5196d1be8fb59edf8062bef36d3a602e0812139
|
||||
GIT binary patch
|
||||
literal 240
|
||||
zcmXYqK?=e!6h!x)VxWtv*mf_t5?px$aS_{}Hj?C*<cHXeXE&AZhT*-L3ZoI&*l1%Z
|
||||
zqG?zr3TvQGZ__}H4(u*%p*rI=cU!%ya5ugfGATh66$IJHgu1Gs0-<N;$V+SsdE)?u
|
||||
zIq;i$f#WE4f$_MWicZjMEob9LWKMR#iwZq54~QgST^6=i%u0lUkJu-_J**QBMq}ZG
|
||||
Wth_)NDbl|`oQr&HAFQ5h`0jqAIZ*BZ
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/objects/88/f9a6eaa2cf008b9bc92847178621f21fa99f3e b/tests/resources/small.git/objects/88/f9a6eaa2cf008b9bc92847178621f21fa99f3e
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..7d80b8d78e0dc55669d831a6638f48ec9fed0982
|
||||
GIT binary patch
|
||||
literal 50
|
||||
zcmV-20L}k+0V^p=O;s>9W-v4`Ff%bx$Vkn}$!Ay}rnY6F$m-Kg*KD_+;Lx#g4|^&N
|
||||
I02NaX#p`nv=Kufz
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b b/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..822bc151862ec3763cf2d3fa2372b93bbd3a4b65
|
||||
GIT binary patch
|
||||
literal 30
|
||||
mcmb<m^geacKghr&@q@?NlP9kSYMj?U<r(;diNWtH+YSKNt_|)0
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/objects/c6/97d4f7a6eac8d7b131673c340bd3cc5bac14d4 b/tests/resources/small.git/objects/c6/97d4f7a6eac8d7b131673c340bd3cc5bac14d4
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..5adfa88cc6b00bb19f2031b8ab61f6d07f9ccdb8
|
||||
GIT binary patch
|
||||
literal 130
|
||||
zcmV-|0Db>>0i}&W3IZ_@1U=^!a~EV1casc=c+{&un1qQN*i9hD|0|m(2n|iwp*q%W
|
||||
z%N;b$hu%cM`$TMo*~EnC1BFP&Pfj~;jZVKXQ96s_PhV<-XAROi+@-v8dBLUa`!;GB
|
||||
k^i<X>XlEv8$>R)1G>9th&t3j;s7J{?^9n<zzF|~BaA?ar-~a#s
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/tests/resources/small.git/refs/heads/master b/tests/resources/small.git/refs/heads/master
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..4eb36d22298f060fd324155ab854d9d6486fc498
|
||||
GIT binary patch
|
||||
literal 41
|
||||
ucmV~$!4Uu;2m`Rc)5uXl$AMP&AHjriQg~T$i(A>|7U^`%mXoWC24Q^m!3%@{
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ programmatically:
|
|||
1. Embedding the evaluator
|
||||
2. Writing language plug-ins
|
||||
|
||||
Embedding means you link the Nix C libraries in your program and use them from
|
||||
Embedding means you link the Nix C API libraries in your program and use them from
|
||||
there. Adding a plug-in means you make a library that gets loaded by the Nix
|
||||
language evaluator, specified through a configuration option.
|
||||
|
||||
|
|
|
|||
1
src/json-schema-checks/.version
Symbolic link
1
src/json-schema-checks/.version
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../.version
|
||||
1
src/json-schema-checks/content-address
Symbolic link
1
src/json-schema-checks/content-address
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/content-address
|
||||
1
src/json-schema-checks/derivation
Symbolic link
1
src/json-schema-checks/derivation
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/derivation
|
||||
1
src/json-schema-checks/deriving-path
Symbolic link
1
src/json-schema-checks/deriving-path
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/derived-path
|
||||
1
src/json-schema-checks/hash
Symbolic link
1
src/json-schema-checks/hash
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libutil-tests/data/hash
|
||||
157
src/json-schema-checks/meson.build
Normal file
157
src/json-schema-checks/meson.build
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
# Run with:
|
||||
# meson test --suite json-schema
|
||||
# Run with: (without shell / configure)
|
||||
# nix build .#nix-json-schema-checks
|
||||
|
||||
project(
|
||||
'nix-json-schema-checks',
|
||||
version : files('.version'),
|
||||
meson_version : '>= 1.1',
|
||||
license : 'LGPL-2.1-or-later',
|
||||
)
|
||||
|
||||
fs = import('fs')
|
||||
|
||||
# Note: The 'jsonschema' package provides the 'jv' command
|
||||
jv = find_program('jv', required : true)
|
||||
|
||||
# The schema directory is a committed symlink to the actual schema location
|
||||
schema_dir = meson.current_source_dir() / 'schema'
|
||||
|
||||
# Get all example files
|
||||
schemas = [
|
||||
{
|
||||
'stem' : 'hash',
|
||||
'schema' : schema_dir / 'hash-v1.yaml',
|
||||
'files' : [
|
||||
'sha256-base64.json',
|
||||
'sha256-base16.json',
|
||||
'sha256-nix32.json',
|
||||
'blake3-base64.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'content-address',
|
||||
'schema' : schema_dir / 'content-address-v1.yaml',
|
||||
'files' : [
|
||||
'text.json',
|
||||
'nar.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'store-path',
|
||||
'schema' : schema_dir / 'store-path-v1.yaml',
|
||||
'files' : [
|
||||
'simple.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml',
|
||||
'files' : [
|
||||
'dyn-dep-derivation.json',
|
||||
'simple-derivation.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'derivation',
|
||||
'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output',
|
||||
'files' : [
|
||||
'output-caFixedFlat.json',
|
||||
'output-caFixedNAR.json',
|
||||
'output-caFixedText.json',
|
||||
'output-caFloating.json',
|
||||
'output-deferred.json',
|
||||
'output-impure.json',
|
||||
'output-inputAddressed.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'deriving-path',
|
||||
'schema' : schema_dir / 'deriving-path-v1.yaml',
|
||||
'files' : [
|
||||
'single_opaque.json',
|
||||
'single_built.json',
|
||||
'single_built_built.json',
|
||||
],
|
||||
},
|
||||
# Match overall
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
'empty_pure.json',
|
||||
'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'impure.json',
|
||||
],
|
||||
},
|
||||
# Match exact variant
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
'empty_pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'store-object-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/impure',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
'empty_impure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base',
|
||||
'files' : [
|
||||
'pure.json',
|
||||
],
|
||||
},
|
||||
{
|
||||
'stem' : 'nar-info',
|
||||
'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/narInfo',
|
||||
'files' : [
|
||||
'impure.json',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Validate each example against the schema
|
||||
foreach schema : schemas
|
||||
stem = schema['stem']
|
||||
schema_file = schema['schema']
|
||||
if '#' not in schema_file
|
||||
# Validate the schema itself against JSON Schema Draft 04
|
||||
test(
|
||||
stem + '-schema-valid',
|
||||
jv,
|
||||
args : [
|
||||
'http://json-schema.org/draft-04/schema',
|
||||
schema_file,
|
||||
],
|
||||
suite : 'json-schema',
|
||||
)
|
||||
endif
|
||||
foreach example : schema['files']
|
||||
test(
|
||||
stem + '-example-' + fs.stem(example),
|
||||
jv,
|
||||
args : [
|
||||
schema_file,
|
||||
files(stem / example),
|
||||
],
|
||||
suite : 'json-schema',
|
||||
)
|
||||
endforeach
|
||||
endforeach
|
||||
1
src/json-schema-checks/nar-info
Symbolic link
1
src/json-schema-checks/nar-info
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/nar-info
|
||||
56
src/json-schema-checks/package.nix
Normal file
56
src/json-schema-checks/package.nix
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
# Run with: nix build .#nix-json-schema-checks
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
meson,
|
||||
ninja,
|
||||
jsonschema,
|
||||
|
||||
# Configuration Options
|
||||
|
||||
version,
|
||||
}:
|
||||
|
||||
mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-json-schema-checks";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = lib.fileset.unions [
|
||||
../../.version
|
||||
../../doc/manual/source/protocols/json/schema
|
||||
../../src/libutil-tests/data/hash
|
||||
../../src/libstore-tests/data/content-address
|
||||
../../src/libstore-tests/data/store-path
|
||||
../../src/libstore-tests/data/derivation
|
||||
../../src/libstore-tests/data/derived-path
|
||||
../../src/libstore-tests/data/path-info
|
||||
../../src/libstore-tests/data/nar-info
|
||||
./.
|
||||
];
|
||||
|
||||
outputs = [ "out" ];
|
||||
|
||||
passthru.externalNativeBuildInputs = [
|
||||
jsonschema
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
meson
|
||||
ninja
|
||||
]
|
||||
++ finalAttrs.passthru.externalNativeBuildInputs;
|
||||
|
||||
doCheck = true;
|
||||
|
||||
mesonCheckFlags = [ "--print-errorlogs" ];
|
||||
|
||||
postInstall = ''
|
||||
touch $out
|
||||
'';
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
})
|
||||
1
src/json-schema-checks/schema
Symbolic link
1
src/json-schema-checks/schema
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../doc/manual/source/protocols/json/schema
|
||||
1
src/json-schema-checks/store-object-info
Symbolic link
1
src/json-schema-checks/store-object-info
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/path-info
|
||||
1
src/json-schema-checks/store-path
Symbolic link
1
src/json-schema-checks/store-path
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../src/libstore-tests/data/store-path
|
||||
|
|
@ -117,10 +117,11 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||
store.printStorePath(p.drvPath->outPath()),
|
||||
outputName);
|
||||
auto thisRealisation = store.queryRealisation(DrvOutput{*drvOutput, outputName});
|
||||
DrvOutput key{*drvOutput, outputName};
|
||||
auto thisRealisation = store.queryRealisation(key);
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
// have the realisation
|
||||
res.insert(*thisRealisation);
|
||||
res.insert(Realisation{*thisRealisation, std::move(key)});
|
||||
} else {
|
||||
res.insert(outputPath);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -350,6 +350,20 @@ struct MixEnvironment : virtual Args
|
|||
void setEnviron();
|
||||
};
|
||||
|
||||
struct MixNoCheckSigs : virtual Args
|
||||
{
|
||||
CheckSigsFlag checkSigs = CheckSigs;
|
||||
|
||||
MixNoCheckSigs()
|
||||
{
|
||||
addFlag({
|
||||
.longName = "no-check-sigs",
|
||||
.description = "Do not require that paths are signed by trusted keys.",
|
||||
.handler = {&checkSigs, NoCheckSigs},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ struct InstallableFlake : InstallableValue
|
|||
*/
|
||||
std::vector<ref<eval_cache::AttrCursor>> getCursors(EvalState & state) override;
|
||||
|
||||
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
|
||||
ref<flake::LockedFlake> getLockedFlake() const;
|
||||
|
||||
FlakeRef nixpkgsFlakeRef() const;
|
||||
};
|
||||
|
|
@ -87,6 +87,4 @@ static inline FlakeRef defaultNixpkgsFlakeRef()
|
|||
return FlakeRef::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", "nixpkgs"}});
|
||||
}
|
||||
|
||||
ref<eval_cache::EvalCache> openEvalCache(EvalState & state, std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -185,16 +185,16 @@ std::vector<ref<eval_cache::AttrCursor>> InstallableFlake::getCursors(EvalState
|
|||
return res;
|
||||
}
|
||||
|
||||
std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
|
||||
ref<flake::LockedFlake> InstallableFlake::getLockedFlake() const
|
||||
{
|
||||
if (!_lockedFlake) {
|
||||
flake::LockFlags lockFlagsApplyConfig = lockFlags;
|
||||
// FIXME why this side effect?
|
||||
lockFlagsApplyConfig.applyNixConfig = true;
|
||||
_lockedFlake =
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig));
|
||||
_lockedFlake = make_ref<flake::LockedFlake>(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig));
|
||||
}
|
||||
return _lockedFlake;
|
||||
// _lockedFlake is now non-null but still just a shared_ptr
|
||||
return ref<flake::LockedFlake>(_lockedFlake);
|
||||
}
|
||||
|
||||
FlakeRef InstallableFlake::nixpkgsFlakeRef() const
|
||||
|
|
|
|||
|
|
@ -342,8 +342,7 @@ void completeFlakeRefWithFragment(
|
|||
parseFlakeRef(fetchSettings, expandTilde(flakeRefS), std::filesystem::current_path().string());
|
||||
|
||||
auto evalCache = openEvalCache(
|
||||
*evalState,
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags)));
|
||||
*evalState, make_ref<flake::LockedFlake>(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags)));
|
||||
|
||||
auto root = evalCache->getRoot();
|
||||
|
||||
|
|
@ -443,42 +442,6 @@ static StorePath getDeriver(ref<Store> store, const Installable & i, const Store
|
|||
return *derivers.begin();
|
||||
}
|
||||
|
||||
ref<eval_cache::EvalCache> openEvalCache(EvalState & state, std::shared_ptr<flake::LockedFlake> lockedFlake)
|
||||
{
|
||||
auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval
|
||||
? lockedFlake->getFingerprint(state.store, state.fetchSettings)
|
||||
: std::nullopt;
|
||||
auto rootLoader = [&state, lockedFlake]() {
|
||||
/* For testing whether the evaluation cache is
|
||||
complete. */
|
||||
if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0")
|
||||
throw Error("not everything is cached, but evaluation is not allowed");
|
||||
|
||||
auto vFlake = state.allocValue();
|
||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||
|
||||
state.forceAttrs(*vFlake, noPos, "while parsing cached flake data");
|
||||
|
||||
auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
||||
return aOutputs->value;
|
||||
};
|
||||
|
||||
if (fingerprint) {
|
||||
auto search = state.evalCaches.find(fingerprint.value());
|
||||
if (search == state.evalCaches.end()) {
|
||||
search =
|
||||
state.evalCaches
|
||||
.emplace(fingerprint.value(), make_ref<nix::eval_cache::EvalCache>(fingerprint, state, rootLoader))
|
||||
.first;
|
||||
}
|
||||
return search->second;
|
||||
} else {
|
||||
return make_ref<nix::eval_cache::EvalCache>(std::nullopt, state, rootLoader);
|
||||
}
|
||||
}
|
||||
|
||||
Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<std::string> ss)
|
||||
{
|
||||
Installables result;
|
||||
|
|
@ -604,28 +567,28 @@ std::vector<BuiltPathWithResult> Installable::build(
|
|||
|
||||
static void throwBuildErrors(std::vector<KeyedBuildResult> & buildResults, const Store & store)
|
||||
{
|
||||
std::vector<KeyedBuildResult> failed;
|
||||
std::vector<std::pair<const KeyedBuildResult *, const KeyedBuildResult::Failure *>> failed;
|
||||
for (auto & buildResult : buildResults) {
|
||||
if (!buildResult.success()) {
|
||||
failed.push_back(buildResult);
|
||||
if (auto * failure = buildResult.tryGetFailure()) {
|
||||
failed.push_back({&buildResult, failure});
|
||||
}
|
||||
}
|
||||
|
||||
auto failedResult = failed.begin();
|
||||
if (failedResult != failed.end()) {
|
||||
if (failed.size() == 1) {
|
||||
failedResult->rethrow();
|
||||
failedResult->second->rethrow();
|
||||
} else {
|
||||
StringSet failedPaths;
|
||||
for (; failedResult != failed.end(); failedResult++) {
|
||||
if (!failedResult->errorMsg.empty()) {
|
||||
if (!failedResult->second->errorMsg.empty()) {
|
||||
logError(
|
||||
ErrorInfo{
|
||||
.level = lvlError,
|
||||
.msg = failedResult->errorMsg,
|
||||
.msg = failedResult->second->errorMsg,
|
||||
});
|
||||
}
|
||||
failedPaths.insert(failedResult->path.to_string(store));
|
||||
failedPaths.insert(failedResult->first->path.to_string(store));
|
||||
}
|
||||
throw Error("build of %s failed", concatStringsSep(", ", quoteStrings(failedPaths)));
|
||||
}
|
||||
|
|
@ -695,12 +658,14 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
|||
auto buildResults = store->buildPathsWithResults(pathsToBuild, bMode, evalStore);
|
||||
throwBuildErrors(buildResults, *store);
|
||||
for (auto & buildResult : buildResults) {
|
||||
// If we didn't throw, they must all be sucesses
|
||||
auto & success = std::get<nix::BuildResult::Success>(buildResult.inner);
|
||||
for (auto & aux : backmap[buildResult.path]) {
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & [outputName, realisation] : buildResult.builtOutputs)
|
||||
for (auto & [outputName, realisation] : success.builtOutputs)
|
||||
outputs.emplace(outputName, realisation.outPath);
|
||||
res.push_back(
|
||||
{aux.installable,
|
||||
|
|
|
|||
|
|
@ -67,7 +67,6 @@ config_priv_h = configure_file(
|
|||
)
|
||||
|
||||
subdir('nix-meson-build-support/common')
|
||||
subdir('nix-meson-build-support/asan-options')
|
||||
|
||||
sources = files(
|
||||
'built-path.cc',
|
||||
|
|
|
|||
|
|
@ -669,7 +669,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
ss << "No documentation found.\n\n";
|
||||
}
|
||||
|
||||
auto markdown = toView(ss);
|
||||
auto markdown = ss.view();
|
||||
logger->cout(trim(renderMarkdownToTerminal(markdown)));
|
||||
|
||||
} else
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ deps_public_maybe_subproject = [
|
|||
subdir('nix-meson-build-support/subprojects')
|
||||
|
||||
subdir('nix-meson-build-support/common')
|
||||
subdir('nix-meson-build-support/asan-options')
|
||||
|
||||
sources = files(
|
||||
'nix_api_expr.cc',
|
||||
|
|
|
|||
|
|
@ -4,11 +4,14 @@
|
|||
* @brief Bindings to the Nix language evaluator
|
||||
*
|
||||
* See *[Embedding the Nix Evaluator](@ref nix_evaluator_example)* for an example.
|
||||
* @{
|
||||
*/
|
||||
/** @file
|
||||
* @brief Main entry for the libexpr C bindings
|
||||
*/
|
||||
/** @defgroup libexpr_init Initialization
|
||||
* @ingroup libexpr
|
||||
* @{
|
||||
*/
|
||||
|
||||
#include "nix_api_store.h"
|
||||
#include "nix_api_util.h"
|
||||
|
|
@ -45,7 +48,10 @@ typedef struct nix_eval_state_builder nix_eval_state_builder;
|
|||
*/
|
||||
typedef struct EvalState EvalState; // nix::EvalState
|
||||
|
||||
/** @} */
|
||||
|
||||
/** @brief A Nix language value, or thunk that may evaluate to a value.
|
||||
* @ingroup value
|
||||
*
|
||||
* Values are the primary objects manipulated in the Nix language.
|
||||
* They are considered to be immutable from a user's perspective, but the process of evaluating a value changes its
|
||||
|
|
@ -56,7 +62,8 @@ typedef struct EvalState EvalState; // nix::EvalState
|
|||
*
|
||||
* The evaluator manages its own memory, but your use of the C API must follow the reference counting rules.
|
||||
*
|
||||
* @see value_manip
|
||||
* @struct nix_value
|
||||
* @see value_create, value_extract
|
||||
* @see nix_value_incref, nix_value_decref
|
||||
*/
|
||||
typedef struct nix_value nix_value;
|
||||
|
|
@ -65,6 +72,7 @@ NIX_DEPRECATED("use nix_value instead") typedef nix_value Value;
|
|||
// Function prototypes
|
||||
/**
|
||||
* @brief Initialize the Nix language evaluator.
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* This function must be called at least once,
|
||||
* at some point before constructing a EvalState for the first time.
|
||||
|
|
@ -77,6 +85,7 @@ nix_err nix_libexpr_init(nix_c_context * context);
|
|||
|
||||
/**
|
||||
* @brief Parses and evaluates a Nix expression from a string.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state The state of the evaluation.
|
||||
|
|
@ -93,6 +102,7 @@ nix_err nix_expr_eval_from_string(
|
|||
|
||||
/**
|
||||
* @brief Calls a Nix function with an argument.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state The state of the evaluation.
|
||||
|
|
@ -107,6 +117,7 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, nix_value * f
|
|||
|
||||
/**
|
||||
* @brief Calls a Nix function with multiple arguments.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Technically these are functions that return functions. It is common for Nix
|
||||
* functions to be curried, so this function is useful for calling them.
|
||||
|
|
@ -126,10 +137,12 @@ nix_err nix_value_call_multi(
|
|||
|
||||
/**
|
||||
* @brief Calls a Nix function with multiple arguments.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Technically these are functions that return functions. It is common for Nix
|
||||
* functions to be curried, so this function is useful for calling them.
|
||||
*
|
||||
* @def NIX_VALUE_CALL
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state The state of the evaluation.
|
||||
* @param[out] value The result of the function call.
|
||||
|
|
@ -147,6 +160,7 @@ nix_err nix_value_call_multi(
|
|||
|
||||
/**
|
||||
* @brief Forces the evaluation of a Nix value.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* The Nix interpreter is lazy, and not-yet-evaluated values can be
|
||||
* of type NIX_TYPE_THUNK instead of their actual value.
|
||||
|
|
@ -180,18 +194,20 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val
|
|||
|
||||
/**
|
||||
* @brief Create a new nix_eval_state_builder
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* The settings are initialized to their default value.
|
||||
* Values can be sourced elsewhere with nix_eval_state_builder_load.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] store The Nix store to use.
|
||||
* @return A new nix_eval_state_builder or NULL on failure.
|
||||
* @return A new nix_eval_state_builder or NULL on failure. Call nix_eval_state_builder_free() when you're done.
|
||||
*/
|
||||
nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store);
|
||||
|
||||
/**
|
||||
* @brief Read settings from the ambient environment
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* Settings are sourced from environment variables and configuration files,
|
||||
* as documented in the Nix manual.
|
||||
|
|
@ -204,6 +220,7 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil
|
|||
|
||||
/**
|
||||
* @brief Set the lookup path for `<...>` expressions
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* @param[in] context Optional, stores error information
|
||||
* @param[in] builder The builder to modify.
|
||||
|
|
@ -214,18 +231,21 @@ nix_err nix_eval_state_builder_set_lookup_path(
|
|||
|
||||
/**
|
||||
* @brief Create a new Nix language evaluator state
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* Remember to nix_eval_state_builder_free after building the state.
|
||||
* The builder becomes unusable after this call. Remember to call nix_eval_state_builder_free()
|
||||
* after building the state.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] builder The builder to use and free
|
||||
* @return A new Nix state or NULL on failure.
|
||||
* @return A new Nix state or NULL on failure. Call nix_state_free() when you're done.
|
||||
* @see nix_eval_state_builder_new, nix_eval_state_builder_free
|
||||
*/
|
||||
EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder);
|
||||
|
||||
/**
|
||||
* @brief Free a nix_eval_state_builder
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* Does not fail.
|
||||
*
|
||||
|
|
@ -235,19 +255,21 @@ void nix_eval_state_builder_free(nix_eval_state_builder * builder);
|
|||
|
||||
/**
|
||||
* @brief Create a new Nix language evaluator state
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* For more control, use nix_eval_state_builder
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH.
|
||||
* @param[in] store The Nix store to use.
|
||||
* @return A new Nix state or NULL on failure.
|
||||
* @return A new Nix state or NULL on failure. Call nix_state_free() when you're done.
|
||||
* @see nix_state_builder_new
|
||||
*/
|
||||
EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store);
|
||||
|
||||
/**
|
||||
* @brief Frees a Nix state.
|
||||
* @ingroup libexpr_init
|
||||
*
|
||||
* Does not fail.
|
||||
*
|
||||
|
|
@ -256,6 +278,7 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath,
|
|||
void nix_state_free(EvalState * state);
|
||||
|
||||
/** @addtogroup GC
|
||||
* @ingroup libexpr
|
||||
* @brief Reference counting and garbage collector operations
|
||||
*
|
||||
* The Nix language evaluator uses a garbage collector. To ease C interop, we implement
|
||||
|
|
@ -286,6 +309,9 @@ nix_err nix_gc_incref(nix_c_context * context, const void * object);
|
|||
/**
|
||||
* @brief Decrement the garbage collector reference counter for the given object
|
||||
*
|
||||
* @deprecated We are phasing out the general nix_gc_decref() in favor of type-specified free functions, such as
|
||||
* nix_value_decref().
|
||||
*
|
||||
* We also provide typed `nix_*_decref` functions, which are
|
||||
* - safer to use
|
||||
* - easier to integrate when deriving bindings
|
||||
|
|
@ -314,12 +340,11 @@ void nix_gc_now();
|
|||
*/
|
||||
void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * obj, void * cd));
|
||||
|
||||
/** @} */
|
||||
/** @} */ // doxygen group GC
|
||||
|
||||
// cffi end
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
/** @} */
|
||||
|
||||
#endif // NIX_API_EXPR_H
|
||||
|
|
|
|||
|
|
@ -2,11 +2,12 @@
|
|||
#define NIX_API_EXTERNAL_H
|
||||
/** @ingroup libexpr
|
||||
* @addtogroup Externals
|
||||
* @brief Deal with external values
|
||||
* @brief Externals let Nix expressions work with foreign values that aren't part of the normal Nix value data model
|
||||
* @{
|
||||
*/
|
||||
/** @file
|
||||
* @brief libexpr C bindings dealing with external values
|
||||
* @see Externals
|
||||
*/
|
||||
|
||||
#include "nix_api_expr.h"
|
||||
|
|
@ -115,7 +116,7 @@ typedef struct NixCExternalValueDesc
|
|||
* @brief Try to compare two external values
|
||||
*
|
||||
* Optional, the default is always false.
|
||||
* If the other object was not a Nix C external value, this comparison will
|
||||
* If the other object was not a Nix C API external value, this comparison will
|
||||
* also return false
|
||||
* @param[in] self the void* passed to nix_create_external_value
|
||||
* @param[in] other the void* passed to the other object's
|
||||
|
|
@ -168,7 +169,7 @@ typedef struct NixCExternalValueDesc
|
|||
/**
|
||||
* @brief Create an external value, that can be given to nix_init_external
|
||||
*
|
||||
* Owned by the GC. Use nix_gc_decref when you're done with the pointer.
|
||||
* Call nix_gc_decref() when you're done with the pointer.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] desc a NixCExternalValueDesc, you should keep this alive as long
|
||||
|
|
@ -180,10 +181,11 @@ typedef struct NixCExternalValueDesc
|
|||
ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v);
|
||||
|
||||
/**
|
||||
* @brief Extract the pointer from a nix c external value.
|
||||
* @brief Extract the pointer from a Nix C API external value.
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] b The external value
|
||||
* @returns The pointer, or null if the external value was not from nix c.
|
||||
* @returns The pointer, valid while the external value is valid, or null if the external value was not from the Nix C
|
||||
* API.
|
||||
* @see nix_get_external
|
||||
*/
|
||||
void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b);
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
#ifndef NIX_API_VALUE_H
|
||||
#define NIX_API_VALUE_H
|
||||
|
||||
/** @addtogroup libexpr
|
||||
* @{
|
||||
*/
|
||||
/** @file
|
||||
* @brief libexpr C bindings dealing with values
|
||||
*/
|
||||
|
|
@ -20,18 +17,89 @@ extern "C" {
|
|||
#endif
|
||||
// cffi start
|
||||
|
||||
/** @defgroup value Value
|
||||
* @ingroup libexpr
|
||||
* @brief nix_value type and core operations for working with Nix values
|
||||
* @see value_create
|
||||
* @see value_extract
|
||||
*/
|
||||
|
||||
/** @defgroup value_create Value Creation
|
||||
* @ingroup libexpr
|
||||
* @brief Functions for allocating and initializing Nix values
|
||||
*
|
||||
* Values are usually created with `nix_alloc_value` followed by `nix_init_*` functions.
|
||||
* In primop callbacks, allocation is already done and only initialization is needed.
|
||||
*/
|
||||
|
||||
/** @defgroup value_extract Value Extraction
|
||||
* @ingroup libexpr
|
||||
* @brief Functions for extracting data from Nix values
|
||||
*/
|
||||
|
||||
/** @defgroup primops PrimOps and Builtins
|
||||
* @ingroup libexpr
|
||||
*/
|
||||
|
||||
// Type definitions
|
||||
/** @brief Represents the state of a Nix value
|
||||
*
|
||||
* Thunk values (NIX_TYPE_THUNK) change to their final, unchanging type when forced.
|
||||
*
|
||||
* @see https://nix.dev/manual/nix/latest/language/evaluation.html
|
||||
* @enum ValueType
|
||||
* @ingroup value
|
||||
*/
|
||||
typedef enum {
|
||||
/** Unevaluated expression
|
||||
*
|
||||
* Thunks often contain an expression and closure, but may contain other
|
||||
* representations too.
|
||||
*
|
||||
* Their state is mutable, unlike that of the other types.
|
||||
*/
|
||||
NIX_TYPE_THUNK,
|
||||
/**
|
||||
* A 64 bit signed integer.
|
||||
*/
|
||||
NIX_TYPE_INT,
|
||||
/** @brief IEEE 754 double precision floating point number
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-float
|
||||
*/
|
||||
NIX_TYPE_FLOAT,
|
||||
/** @brief Boolean true or false value
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-bool
|
||||
*/
|
||||
NIX_TYPE_BOOL,
|
||||
/** @brief String value with context
|
||||
*
|
||||
* String content may contain arbitrary bytes, not necessarily UTF-8.
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-string
|
||||
*/
|
||||
NIX_TYPE_STRING,
|
||||
/** @brief Filesystem path
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-path
|
||||
*/
|
||||
NIX_TYPE_PATH,
|
||||
/** @brief Null value
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-null
|
||||
*/
|
||||
NIX_TYPE_NULL,
|
||||
/** @brief Attribute set (key-value mapping)
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-attrs
|
||||
*/
|
||||
NIX_TYPE_ATTRS,
|
||||
/** @brief Ordered list of values
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-list
|
||||
*/
|
||||
NIX_TYPE_LIST,
|
||||
/** @brief Function (lambda or builtin)
|
||||
* @see https://nix.dev/manual/nix/latest/language/types.html#type-function
|
||||
*/
|
||||
NIX_TYPE_FUNCTION,
|
||||
/** @brief External value from C++ plugins or C API
|
||||
* @see Externals
|
||||
*/
|
||||
NIX_TYPE_EXTERNAL
|
||||
} ValueType;
|
||||
|
||||
|
|
@ -39,22 +107,41 @@ typedef enum {
|
|||
typedef struct nix_value nix_value;
|
||||
typedef struct EvalState EvalState;
|
||||
|
||||
/** @deprecated Use nix_value instead */
|
||||
[[deprecated("use nix_value instead")]] typedef nix_value Value;
|
||||
|
||||
// type defs
|
||||
/** @brief Stores an under-construction set of bindings
|
||||
* @ingroup value_manip
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Do not reuse.
|
||||
* Each builder can only be used once. After calling nix_make_attrs(), the builder
|
||||
* becomes invalid and must not be used again. Call nix_bindings_builder_free() to release it.
|
||||
*
|
||||
* Typical usage pattern:
|
||||
* 1. Create with nix_make_bindings_builder()
|
||||
* 2. Insert attributes with nix_bindings_builder_insert()
|
||||
* 3. Create final attribute set with nix_make_attrs()
|
||||
* 4. Free builder with nix_bindings_builder_free()
|
||||
*
|
||||
* @struct BindingsBuilder
|
||||
* @see nix_make_bindings_builder, nix_bindings_builder_free, nix_make_attrs
|
||||
* @see nix_bindings_builder_insert
|
||||
*/
|
||||
typedef struct BindingsBuilder BindingsBuilder;
|
||||
|
||||
/** @brief Stores an under-construction list
|
||||
* @ingroup value_manip
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Do not reuse.
|
||||
* Each builder can only be used once. After calling nix_make_list(), the builder
|
||||
* becomes invalid and must not be used again. Call nix_list_builder_free() to release it.
|
||||
*
|
||||
* Typical usage pattern:
|
||||
* 1. Create with nix_make_list_builder()
|
||||
* 2. Insert elements with nix_list_builder_insert()
|
||||
* 3. Create final list with nix_make_list()
|
||||
* 4. Free builder with nix_list_builder_free()
|
||||
*
|
||||
* @struct ListBuilder
|
||||
* @see nix_make_list_builder, nix_list_builder_free, nix_make_list
|
||||
* @see nix_list_builder_insert
|
||||
*/
|
||||
|
|
@ -63,25 +150,28 @@ typedef struct ListBuilder ListBuilder;
|
|||
/** @brief PrimOp function
|
||||
* @ingroup primops
|
||||
*
|
||||
* Owned by the GC
|
||||
* @see nix_alloc_primop, nix_init_primop
|
||||
* Can be released with nix_gc_decref() when necessary.
|
||||
* @struct PrimOp
|
||||
* @see nix_alloc_primop, nix_init_primop, nix_register_primop
|
||||
*/
|
||||
typedef struct PrimOp PrimOp;
|
||||
/** @brief External Value
|
||||
* @ingroup Externals
|
||||
*
|
||||
* Owned by the GC
|
||||
* Can be released with nix_gc_decref() when necessary.
|
||||
* @struct ExternalValue
|
||||
* @see nix_create_external_value, nix_init_external, nix_get_external
|
||||
*/
|
||||
typedef struct ExternalValue ExternalValue;
|
||||
|
||||
/** @brief String without placeholders, and realised store paths
|
||||
* @struct nix_realised_string
|
||||
* @see nix_string_realise, nix_realised_string_free
|
||||
*/
|
||||
typedef struct nix_realised_string nix_realised_string;
|
||||
|
||||
/** @defgroup primops Adding primops
|
||||
* @{
|
||||
*/
|
||||
/** @brief Function pointer for primops
|
||||
* @ingroup primops
|
||||
*
|
||||
* When you want to return an error, call nix_set_err_msg(context, NIX_ERR_UNKNOWN, "your error message here").
|
||||
*
|
||||
|
|
@ -97,9 +187,9 @@ typedef void (*PrimOpFun)(
|
|||
void * user_data, nix_c_context * context, EvalState * state, nix_value ** args, nix_value * ret);
|
||||
|
||||
/** @brief Allocate a PrimOp
|
||||
* @ingroup primops
|
||||
*
|
||||
* Owned by the garbage collector.
|
||||
* Use nix_gc_decref() when you're done with the returned PrimOp.
|
||||
* Call nix_gc_decref() when you're done with the returned PrimOp.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] fun callback
|
||||
|
|
@ -121,35 +211,38 @@ PrimOp * nix_alloc_primop(
|
|||
void * user_data);
|
||||
|
||||
/** @brief add a primop to the `builtins` attribute set
|
||||
* @ingroup primops
|
||||
*
|
||||
* Only applies to States created after this call.
|
||||
*
|
||||
* Moves your PrimOp content into the global evaluator
|
||||
* registry, meaning your input PrimOp pointer is no longer usable.
|
||||
* You are free to remove your references to it,
|
||||
* after which it will be garbage collected.
|
||||
* Moves your PrimOp content into the global evaluator registry, meaning
|
||||
* your input PrimOp pointer becomes invalid. The PrimOp must not be used
|
||||
* with nix_init_primop() before or after this call, as this would cause
|
||||
* undefined behavior.
|
||||
* You must call nix_gc_decref() on the original PrimOp pointer
|
||||
* after this call to release your reference.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @return primop, or null in case of errors
|
||||
*
|
||||
* @param[in] primOp PrimOp to register
|
||||
* @return error code, NIX_OK on success
|
||||
*/
|
||||
nix_err nix_register_primop(nix_c_context * context, PrimOp * primOp);
|
||||
/** @} */
|
||||
|
||||
// Function prototypes
|
||||
|
||||
/** @brief Allocate a Nix value
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Owned by the GC. Use nix_gc_decref() when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state nix evaluator state
|
||||
* @return value, or null in case of errors
|
||||
*
|
||||
*/
|
||||
nix_value * nix_alloc_value(nix_c_context * context, EvalState * state);
|
||||
|
||||
/**
|
||||
* @brief Increment the garbage collector reference counter for the given `nix_value`.
|
||||
* @ingroup value
|
||||
*
|
||||
* The Nix language evaluator C API keeps track of alive objects by reference counting.
|
||||
* When you're done with a refcounted pointer, call nix_value_decref().
|
||||
|
|
@ -161,21 +254,19 @@ nix_err nix_value_incref(nix_c_context * context, nix_value * value);
|
|||
|
||||
/**
|
||||
* @brief Decrement the garbage collector reference counter for the given object
|
||||
* @ingroup value
|
||||
*
|
||||
* When the counter reaches zero, the `nix_value` object becomes invalid.
|
||||
* The data referenced by `nix_value` may not be deallocated until the memory
|
||||
* garbage collector has run, but deallocation is not guaranteed.
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value The object to stop referencing
|
||||
*/
|
||||
nix_err nix_value_decref(nix_c_context * context, nix_value * value);
|
||||
|
||||
/** @addtogroup value_manip Manipulating values
|
||||
* @brief Functions to inspect and change Nix language values, represented by nix_value.
|
||||
* @{
|
||||
*/
|
||||
/** @anchor getters
|
||||
* @name Getters
|
||||
*/
|
||||
/**@{*/
|
||||
/** @brief Get value type
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return type of nix value
|
||||
|
|
@ -183,14 +274,15 @@ nix_err nix_value_decref(nix_c_context * context, nix_value * value);
|
|||
ValueType nix_get_type(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get type name of value as defined in the evaluator
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return type name, owned string
|
||||
* @todo way to free the result
|
||||
* @return type name string, free with free()
|
||||
*/
|
||||
const char * nix_get_typename(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get boolean value
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return true or false, error info via context
|
||||
|
|
@ -198,6 +290,7 @@ const char * nix_get_typename(nix_c_context * context, const nix_value * value);
|
|||
bool nix_get_bool(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get the raw string
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* This may contain placeholders.
|
||||
*
|
||||
|
|
@ -205,21 +298,21 @@ bool nix_get_bool(nix_c_context * context, const nix_value * value);
|
|||
* @param[in] value Nix value to inspect
|
||||
* @param[in] callback Called with the string value.
|
||||
* @param[in] user_data optional, arbitrary data, passed to the callback when it's called.
|
||||
* @return string
|
||||
* @return error code, NIX_OK on success.
|
||||
*/
|
||||
nix_err
|
||||
nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_callback callback, void * user_data);
|
||||
|
||||
/** @brief Get path as string
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return string, if the type is NIX_TYPE_PATH
|
||||
* @return NULL in case of error.
|
||||
* @return string valid while value is valid, NULL in case of error
|
||||
*/
|
||||
const char * nix_get_path_string(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get the length of a list
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return length of list, error info via context
|
||||
|
|
@ -227,6 +320,7 @@ const char * nix_get_path_string(nix_c_context * context, const nix_value * valu
|
|||
unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get the element count of an attrset
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return attrset element count, error info via context
|
||||
|
|
@ -234,6 +328,7 @@ unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value)
|
|||
unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get float value in 64 bits
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return float contents, error info via context
|
||||
|
|
@ -241,6 +336,7 @@ unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value
|
|||
double nix_get_float(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get int value
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return int contents, error info via context
|
||||
|
|
@ -248,15 +344,18 @@ double nix_get_float(nix_c_context * context, const nix_value * value);
|
|||
int64_t nix_get_int(nix_c_context * context, const nix_value * value);
|
||||
|
||||
/** @brief Get external reference
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @return reference to external, NULL in case of error
|
||||
* @return reference valid while value is valid. Call nix_gc_incref() if you need it to live longer, then only in that
|
||||
* case call nix_gc_decref() when done. NULL in case of error
|
||||
*/
|
||||
ExternalValue * nix_get_external(nix_c_context * context, nix_value * value);
|
||||
|
||||
/** @brief Get the ix'th element of a list
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Owned by the GC. Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @param[in] state nix evaluator state
|
||||
|
|
@ -266,11 +365,12 @@ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value);
|
|||
nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix);
|
||||
|
||||
/** @brief Get the ix'th element of a list without forcing evaluation of the element
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Returns the list element without forcing its evaluation, allowing access to lazy values.
|
||||
* The list value itself must already be evaluated.
|
||||
*
|
||||
* Owned by the GC. Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect (must be an evaluated list)
|
||||
* @param[in] state nix evaluator state
|
||||
|
|
@ -281,8 +381,9 @@ nix_value *
|
|||
nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix);
|
||||
|
||||
/** @brief Get an attr by name
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @param[in] state nix evaluator state
|
||||
|
|
@ -292,11 +393,12 @@ nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalSt
|
|||
nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name);
|
||||
|
||||
/** @brief Get an attribute value by attribute name, without forcing evaluation of the attribute's value
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Returns the attribute value without forcing its evaluation, allowing access to lazy values.
|
||||
* The attribute set value itself must already be evaluated.
|
||||
*
|
||||
* Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect (must be an evaluated attribute set)
|
||||
* @param[in] state nix evaluator state
|
||||
|
|
@ -307,6 +409,7 @@ nix_value *
|
|||
nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalState * state, const char * name);
|
||||
|
||||
/** @brief Check if an attribute name exists on a value
|
||||
* @ingroup value_extract
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @param[in] state nix evaluator state
|
||||
|
|
@ -316,6 +419,7 @@ nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalS
|
|||
bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name);
|
||||
|
||||
/** @brief Get an attribute by index
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Also gives you the name.
|
||||
*
|
||||
|
|
@ -329,18 +433,19 @@ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalS
|
|||
* lexicographic order by Unicode scalar value for valid UTF-8). We recommend
|
||||
* applying this same ordering for consistency.
|
||||
*
|
||||
* Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] i attribute index
|
||||
* @param[out] name will store a pointer to the attribute name
|
||||
* @param[out] name will store a pointer to the attribute name, valid until state is freed
|
||||
* @return value, NULL in case of errors
|
||||
*/
|
||||
nix_value *
|
||||
nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name);
|
||||
|
||||
/** @brief Get an attribute by index, without forcing evaluation of the attribute's value
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Also gives you the name.
|
||||
*
|
||||
|
|
@ -357,18 +462,19 @@ nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state
|
|||
* lexicographic order by Unicode scalar value for valid UTF-8). We recommend
|
||||
* applying this same ordering for consistency.
|
||||
*
|
||||
* Use nix_gc_decref when you're done with the pointer
|
||||
* Call nix_value_decref() when you're done with the pointer
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect (must be an evaluated attribute set)
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] i attribute index
|
||||
* @param[out] name will store a pointer to the attribute name
|
||||
* @param[out] name will store a pointer to the attribute name, valid until state is freed
|
||||
* @return value, NULL in case of errors
|
||||
*/
|
||||
nix_value * nix_get_attr_byidx_lazy(
|
||||
nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name);
|
||||
|
||||
/** @brief Get an attribute name by index
|
||||
* @ingroup value_extract
|
||||
*
|
||||
* Returns the attribute name without forcing evaluation of the attribute's value.
|
||||
*
|
||||
|
|
@ -382,16 +488,14 @@ nix_value * nix_get_attr_byidx_lazy(
|
|||
* lexicographic order by Unicode scalar value for valid UTF-8). We recommend
|
||||
* applying this same ordering for consistency.
|
||||
*
|
||||
* Owned by the nix EvalState
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] value Nix value to inspect
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] i attribute index
|
||||
* @return name, NULL in case of errors
|
||||
* @return name string valid until state is freed, NULL in case of errors
|
||||
*/
|
||||
const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i);
|
||||
|
||||
/**@}*/
|
||||
/** @name Initializers
|
||||
*
|
||||
* Values are typically "returned" by initializing already allocated memory that serves as the return value.
|
||||
|
|
@ -401,6 +505,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value,
|
|||
*/
|
||||
/**@{*/
|
||||
/** @brief Set boolean value
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] b the boolean value
|
||||
|
|
@ -409,6 +514,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value,
|
|||
nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b);
|
||||
|
||||
/** @brief Set a string
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] str the string, copied
|
||||
|
|
@ -417,6 +523,7 @@ nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b);
|
|||
nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * str);
|
||||
|
||||
/** @brief Set a path
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] str the path string, copied
|
||||
|
|
@ -425,6 +532,7 @@ nix_err nix_init_string(nix_c_context * context, nix_value * value, const char *
|
|||
nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * value, const char * str);
|
||||
|
||||
/** @brief Set a float
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] d the float, 64-bits
|
||||
|
|
@ -433,6 +541,7 @@ nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value *
|
|||
nix_err nix_init_float(nix_c_context * context, nix_value * value, double d);
|
||||
|
||||
/** @brief Set an int
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] i the int
|
||||
|
|
@ -441,6 +550,7 @@ nix_err nix_init_float(nix_c_context * context, nix_value * value, double d);
|
|||
|
||||
nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i);
|
||||
/** @brief Set null
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @return error code, NIX_OK on success.
|
||||
|
|
@ -448,6 +558,7 @@ nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i);
|
|||
nix_err nix_init_null(nix_c_context * context, nix_value * value);
|
||||
|
||||
/** @brief Set the value to a thunk that will perform a function application when needed.
|
||||
* @ingroup value_create
|
||||
*
|
||||
* Thunks may be put into attribute sets and lists to perform some computation lazily; on demand.
|
||||
* However, note that in some places, a thunk must not be returned, such as in the return value of a PrimOp.
|
||||
|
|
@ -464,6 +575,7 @@ nix_err nix_init_null(nix_c_context * context, nix_value * value);
|
|||
nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * fn, nix_value * arg);
|
||||
|
||||
/** @brief Set an external value
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] val the external value to set. Will be GC-referenced by the value.
|
||||
|
|
@ -472,18 +584,25 @@ nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * f
|
|||
nix_err nix_init_external(nix_c_context * context, nix_value * value, ExternalValue * val);
|
||||
|
||||
/** @brief Create a list from a list builder
|
||||
* @ingroup value_create
|
||||
*
|
||||
* After this call, the list builder becomes invalid and cannot be used again.
|
||||
* The only necessary next step is to free it with nix_list_builder_free().
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] list_builder list builder to use. Make sure to unref this afterwards.
|
||||
* @param[in] list_builder list builder to use
|
||||
* @param[out] value Nix value to modify
|
||||
* @return error code, NIX_OK on success.
|
||||
* @see nix_list_builder_free
|
||||
*/
|
||||
nix_err nix_make_list(nix_c_context * context, ListBuilder * list_builder, nix_value * value);
|
||||
|
||||
/** @brief Create a list builder
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] capacity how many bindings you'll add. Don't exceed.
|
||||
* @return owned reference to a list builder. Make sure to unref when you're done.
|
||||
* @return list builder. Call nix_list_builder_free() when you're done.
|
||||
*/
|
||||
ListBuilder * nix_make_list_builder(nix_c_context * context, EvalState * state, size_t capacity);
|
||||
|
||||
|
|
@ -505,14 +624,21 @@ nix_list_builder_insert(nix_c_context * context, ListBuilder * list_builder, uns
|
|||
void nix_list_builder_free(ListBuilder * list_builder);
|
||||
|
||||
/** @brief Create an attribute set from a bindings builder
|
||||
* @ingroup value_create
|
||||
*
|
||||
* After this call, the bindings builder becomes invalid and cannot be used again.
|
||||
* The only necessary next step is to free it with nix_bindings_builder_free().
|
||||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] b bindings builder to use. Make sure to unref this afterwards.
|
||||
* @param[in] b bindings builder to use
|
||||
* @return error code, NIX_OK on success.
|
||||
* @see nix_bindings_builder_free
|
||||
*/
|
||||
nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuilder * b);
|
||||
|
||||
/** @brief Set primop
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] op primop, will be gc-referenced by the value
|
||||
|
|
@ -521,6 +647,7 @@ nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuild
|
|||
*/
|
||||
nix_err nix_init_primop(nix_c_context * context, nix_value * value, PrimOp * op);
|
||||
/** @brief Copy from another value
|
||||
* @ingroup value_create
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[out] value Nix value to modify
|
||||
* @param[in] source value to copy from
|
||||
|
|
@ -530,12 +657,11 @@ nix_err nix_copy_value(nix_c_context * context, nix_value * value, const nix_val
|
|||
/**@}*/
|
||||
|
||||
/** @brief Create a bindings builder
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] capacity how many bindings you'll add. Don't exceed.
|
||||
* @return owned reference to a bindings builder. Make sure to unref when you're
|
||||
done.
|
||||
*/
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] state nix evaluator state
|
||||
* @param[in] capacity how many bindings you'll add. Don't exceed.
|
||||
* @return bindings builder. Call nix_bindings_builder_free() when you're done.
|
||||
*/
|
||||
BindingsBuilder * nix_make_bindings_builder(nix_c_context * context, EvalState * state, size_t capacity);
|
||||
|
||||
/** @brief Insert bindings into a builder
|
||||
|
|
@ -554,7 +680,6 @@ nix_bindings_builder_insert(nix_c_context * context, BindingsBuilder * builder,
|
|||
* @param[in] builder the builder to free
|
||||
*/
|
||||
void nix_bindings_builder_free(BindingsBuilder * builder);
|
||||
/**@}*/
|
||||
|
||||
/** @brief Realise a string context.
|
||||
*
|
||||
|
|
@ -571,13 +696,13 @@ void nix_bindings_builder_free(BindingsBuilder * builder);
|
|||
* @param[in] isIFD If true, disallow derivation outputs if setting `allow-import-from-derivation` is false.
|
||||
You should set this to true when this call is part of a primop.
|
||||
You should set this to false when building for your application's purpose.
|
||||
* @return NULL if failed, are a new nix_realised_string, which must be freed with nix_realised_string_free
|
||||
* @return NULL if failed, or a new nix_realised_string, which must be freed with nix_realised_string_free
|
||||
*/
|
||||
nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * state, nix_value * value, bool isIFD);
|
||||
|
||||
/** @brief Start of the string
|
||||
* @param[in] realised_string
|
||||
* @return pointer to the start of the string. It may not be null-terminated.
|
||||
* @return pointer to the start of the string, valid until realised_string is freed. It may not be null-terminated.
|
||||
*/
|
||||
const char * nix_realised_string_get_buffer_start(nix_realised_string * realised_string);
|
||||
|
||||
|
|
@ -596,7 +721,7 @@ size_t nix_realised_string_get_store_path_count(nix_realised_string * realised_s
|
|||
/** @brief Get a store path. The store paths are stored in an arbitrary order.
|
||||
* @param[in] realised_string
|
||||
* @param[in] index index of the store path, must be less than the count
|
||||
* @return store path
|
||||
* @return store path valid until realised_string is freed
|
||||
*/
|
||||
const StorePath * nix_realised_string_get_store_path(nix_realised_string * realised_string, size_t index);
|
||||
|
||||
|
|
@ -610,5 +735,4 @@ void nix_realised_string_free(nix_realised_string * realised_string);
|
|||
}
|
||||
#endif
|
||||
|
||||
/** @} */
|
||||
#endif // NIX_API_VALUE_H
|
||||
|
|
|
|||
|
|
@ -26,11 +26,20 @@ public:
|
|||
}
|
||||
|
||||
protected:
|
||||
LibExprTest()
|
||||
LibExprTest(ref<Store> store, auto && makeEvalSettings)
|
||||
: LibStoreTest()
|
||||
, evalSettings(makeEvalSettings(readOnlyMode))
|
||||
, state({}, store, fetchSettings, evalSettings, nullptr)
|
||||
{
|
||||
evalSettings.nixPath = {};
|
||||
}
|
||||
|
||||
LibExprTest()
|
||||
: LibExprTest(openStore("dummy://"), [](bool & readOnlyMode) {
|
||||
EvalSettings settings{readOnlyMode};
|
||||
settings.nixPath = {};
|
||||
return settings;
|
||||
})
|
||||
{
|
||||
}
|
||||
|
||||
Value eval(std::string input, bool forceValue = true)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ rapidcheck = dependency('rapidcheck')
|
|||
deps_public += rapidcheck
|
||||
|
||||
subdir('nix-meson-build-support/common')
|
||||
subdir('nix-meson-build-support/asan-options')
|
||||
|
||||
sources = files(
|
||||
'tests/value/context.cc',
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
#include "nix/expr/eval.hh"
|
||||
#include "nix/expr/tests/libexpr.hh"
|
||||
#include "nix/util/memory-source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -174,4 +175,41 @@ TEST_F(EvalStateTest, getBuiltin_fail)
|
|||
ASSERT_THROW(state.getBuiltin("nonexistent"), EvalError);
|
||||
}
|
||||
|
||||
class PureEvalTest : public LibExprTest
|
||||
{
|
||||
public:
|
||||
PureEvalTest()
|
||||
: LibExprTest(openStore("dummy://", {{"read-only", "false"}}), [](bool & readOnlyMode) {
|
||||
EvalSettings settings{readOnlyMode};
|
||||
settings.pureEval = true;
|
||||
settings.restrictEval = true;
|
||||
return settings;
|
||||
})
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(PureEvalTest, pathExists)
|
||||
{
|
||||
ASSERT_THAT(eval("builtins.pathExists /."), IsFalse());
|
||||
ASSERT_THAT(eval("builtins.pathExists /nix"), IsFalse());
|
||||
ASSERT_THAT(eval("builtins.pathExists /nix/store"), IsFalse());
|
||||
|
||||
{
|
||||
std::string contents = "Lorem ipsum";
|
||||
|
||||
StringSource s{contents};
|
||||
auto path = state.store->addToStoreFromDump(
|
||||
s, "source", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256);
|
||||
auto printed = store->printStorePath(path);
|
||||
|
||||
ASSERT_THROW(eval(fmt("builtins.readFile %s", printed)), RestrictedPathError);
|
||||
ASSERT_THAT(eval(fmt("builtins.pathExists %s", printed)), IsFalse());
|
||||
|
||||
ASSERT_THROW(eval("builtins.readDir /."), RestrictedPathError);
|
||||
state.allowPath(path); // FIXME: This shouldn't behave this way.
|
||||
ASSERT_THAT(eval("builtins.readDir /."), IsAttrsOfSize(0));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -45,7 +45,6 @@ config_priv_h = configure_file(
|
|||
)
|
||||
|
||||
subdir('nix-meson-build-support/common')
|
||||
subdir('nix-meson-build-support/asan-options')
|
||||
|
||||
sources = files(
|
||||
'derived-path.cc',
|
||||
|
|
@ -83,7 +82,7 @@ this_exe = executable(
|
|||
test(
|
||||
meson.project_name(),
|
||||
this_exe,
|
||||
env : asan_test_options_env + {
|
||||
env : {
|
||||
'_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data',
|
||||
},
|
||||
protocol : 'gtest',
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ mkMesonExecutable (finalAttrs: {
|
|||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export ASAN_OPTIONS=abort_on_error=1:print_summary=1:detect_leaks=0
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
|
|
|
|||
|
|
@ -771,7 +771,7 @@ TEST_F(PrimOpTest, derivation)
|
|||
ASSERT_EQ(v.type(), nFunction);
|
||||
ASSERT_TRUE(v.isLambda());
|
||||
ASSERT_NE(v.lambda().fun, nullptr);
|
||||
ASSERT_TRUE(v.lambda().fun->hasFormals());
|
||||
ASSERT_TRUE(v.lambda().fun->getFormals());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, currentTime)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#include "nix/expr/tests/libexpr.hh"
|
||||
#include "nix/util/tests/gmock-matchers.hh"
|
||||
|
||||
namespace nix {
|
||||
// Testing of trivial expressions
|
||||
|
|
@ -160,7 +161,8 @@ TEST_F(TrivialExpressionTest, assertPassed)
|
|||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
|
||||
class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest, public testing::WithParamInterface<const char *>
|
||||
class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest,
|
||||
public ::testing::WithParamInterface<const char *>
|
||||
{};
|
||||
|
||||
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy)
|
||||
|
|
@ -196,7 +198,7 @@ TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy)
|
|||
INSTANTIATE_TEST_SUITE_P(
|
||||
attrsetMergeLazy,
|
||||
AttrSetMergeTrvialExpressionTest,
|
||||
testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }"));
|
||||
::testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }"));
|
||||
|
||||
// The following macros ultimately define 48 tests (16 variations on three
|
||||
// templates). Each template tests an expression that can be written in 2^4
|
||||
|
|
@ -339,4 +341,18 @@ TEST_F(TrivialExpressionTest, orCantBeUsed)
|
|||
{
|
||||
ASSERT_THROW(eval("let or = 1; in or"), Error);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, tooManyFormals)
|
||||
{
|
||||
std::string expr = "let f = { ";
|
||||
for (uint32_t i = 0; i <= std::numeric_limits<uint16_t>::max(); ++i) {
|
||||
expr += fmt("arg%d, ", i);
|
||||
}
|
||||
expr += " }: 0 in; f {}";
|
||||
ASSERT_THAT(
|
||||
[&]() { eval(expr); },
|
||||
::testing::ThrowsMessage<Error>(::nix::testing::HasSubstrIgnoreANSIMatcher(
|
||||
"too many formal arguments, implementation supports at most 65535")));
|
||||
}
|
||||
|
||||
} /* namespace nix */
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ using namespace testing;
|
|||
struct ValuePrintingTests : LibExprTest
|
||||
{
|
||||
template<class... A>
|
||||
void test(Value v, std::string_view expected, A... args)
|
||||
void test(Value & v, std::string_view expected, A... args)
|
||||
{
|
||||
std::stringstream out;
|
||||
v.print(state, out, args...);
|
||||
|
|
@ -110,9 +110,8 @@ TEST_F(ValuePrintingTests, vLambda)
|
|||
PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
|
||||
auto posIdx = state.positions.add(origin, 0);
|
||||
auto body = ExprInt(0);
|
||||
auto formals = Formals{};
|
||||
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body);
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &body);
|
||||
|
||||
Value vLambda;
|
||||
vLambda.mkLambda(&env, &eLambda);
|
||||
|
|
@ -500,9 +499,8 @@ TEST_F(ValuePrintingTests, ansiColorsLambda)
|
|||
PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
|
||||
auto posIdx = state.positions.add(origin, 0);
|
||||
auto body = ExprInt(0);
|
||||
auto formals = Formals{};
|
||||
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body);
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &body);
|
||||
|
||||
Value vLambda;
|
||||
vLambda.mkLambda(&env, &eLambda);
|
||||
|
|
@ -625,10 +623,11 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided)
|
|||
vThree.mkInt(3);
|
||||
|
||||
builder.insert(state.symbols.create("three"), &vThree);
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
Value vAttrs2;
|
||||
vAttrs2.mkAttrs(builder.finish());
|
||||
|
||||
test(
|
||||
vAttrs,
|
||||
vAttrs2,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }",
|
||||
PrintOptions{.ansiColors = true, .maxAttrs = 1});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -324,7 +324,7 @@ void SampleStack::saveProfile()
|
|||
std::visit([&](auto && info) { info.symbolize(state, os, posCache); }, pos);
|
||||
}
|
||||
os << " " << count;
|
||||
writeLine(profileFd.get(), std::move(os).str());
|
||||
writeLine(profileFd.get(), os.str());
|
||||
/* Clear ostringstream. */
|
||||
os.str("");
|
||||
os.clear();
|
||||
|
|
|
|||
|
|
@ -236,24 +236,17 @@ EvalState::EvalState(
|
|||
{CanonPath(store->storeDir), store->getFSAccessor(settings.pureEval)},
|
||||
}))
|
||||
, rootFS([&] {
|
||||
auto accessor = [&]() -> decltype(rootFS) {
|
||||
/* In pure eval mode, we provide a filesystem that only
|
||||
contains the Nix store. */
|
||||
if (settings.pureEval)
|
||||
return storeFS;
|
||||
/* In pure eval mode, we provide a filesystem that only
|
||||
contains the Nix store.
|
||||
|
||||
/* If we have a chroot store and pure eval is not enabled,
|
||||
use a union accessor to make the chroot store available
|
||||
at its logical location while still having the underlying
|
||||
directory available. This is necessary for instance if
|
||||
we're evaluating a file from the physical /nix/store
|
||||
while using a chroot store. */
|
||||
auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy));
|
||||
if (store->storeDir != realStoreDir)
|
||||
return makeUnionSourceAccessor({getFSSourceAccessor(), storeFS});
|
||||
|
||||
return getFSSourceAccessor();
|
||||
}();
|
||||
Otherwise, use a union accessor to make the augmented store
|
||||
available at its logical location while still having the
|
||||
underlying directory available. This is necessary for
|
||||
instance if we're evaluating a file from the physical
|
||||
/nix/store while using a chroot store, and also for lazy
|
||||
mounted fetchTree. */
|
||||
auto accessor = settings.pureEval ? storeFS.cast<SourceAccessor>()
|
||||
: makeUnionSourceAccessor({getFSSourceAccessor(), storeFS});
|
||||
|
||||
/* Apply access control if needed. */
|
||||
if (settings.restrictEval || settings.pureEval)
|
||||
|
|
@ -268,7 +261,7 @@ EvalState::EvalState(
|
|||
}())
|
||||
, corepkgsFS(make_ref<MemorySourceAccessor>())
|
||||
, internalFS(make_ref<MemorySourceAccessor>())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
, derivationInternal{internalFS->addFile(
|
||||
CanonPath("derivation-internal.nix"),
|
||||
#include "primops/derivation.nix.gen.hh"
|
||||
)}
|
||||
|
|
@ -591,7 +584,7 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||
.name = name,
|
||||
.arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though...
|
||||
.args = {},
|
||||
.doc = makeImmutableString(toView(s)), // NOTE: memory leak when compiled without GC
|
||||
.doc = makeImmutableString(s.view()), // NOTE: memory leak when compiled without GC
|
||||
};
|
||||
}
|
||||
if (isFunctor(v)) {
|
||||
|
|
@ -1341,7 +1334,7 @@ void ExprVar::eval(EvalState & state, Env & env, Value & v)
|
|||
v = *v2;
|
||||
}
|
||||
|
||||
static std::string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPath)
|
||||
static std::string showAttrPath(EvalState & state, Env & env, std::span<const AttrName> attrPath)
|
||||
{
|
||||
std::ostringstream out;
|
||||
bool first = true;
|
||||
|
|
@ -1377,10 +1370,10 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
env,
|
||||
getPos(),
|
||||
"while evaluating the attribute '%1%'",
|
||||
showAttrPath(state, env, attrPath))
|
||||
showAttrPath(state, env, getAttrPath()))
|
||||
: nullptr;
|
||||
|
||||
for (auto & i : attrPath) {
|
||||
for (auto & i : getAttrPath()) {
|
||||
state.nrLookups++;
|
||||
const Attr * j;
|
||||
auto name = getName(i, state, env);
|
||||
|
|
@ -1418,7 +1411,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
auto origin = std::get_if<SourcePath>(&pos2r.origin);
|
||||
if (!(origin && *origin == state.derivationInternal))
|
||||
state.addErrorTrace(
|
||||
e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath));
|
||||
e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, getAttrPath()));
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
|
@ -1429,13 +1422,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
Symbol ExprSelect::evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs)
|
||||
{
|
||||
Value vTmp;
|
||||
Symbol name = getName(attrPath[attrPath.size() - 1], state, env);
|
||||
Symbol name = getName(attrPathStart[nAttrPath - 1], state, env);
|
||||
|
||||
if (attrPath.size() == 1) {
|
||||
if (nAttrPath == 1) {
|
||||
e->eval(state, env, vTmp);
|
||||
} else {
|
||||
ExprSelect init(*this);
|
||||
init.attrPath.pop_back();
|
||||
init.nAttrPath--;
|
||||
init.eval(state, env, vTmp);
|
||||
}
|
||||
attrs = vTmp;
|
||||
|
|
@ -1503,15 +1496,13 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
|
||||
ExprLambda & lambda(*vCur.lambda().fun);
|
||||
|
||||
auto size = (!lambda.arg ? 0 : 1) + (lambda.hasFormals() ? lambda.formals->formals.size() : 0);
|
||||
auto size = (!lambda.arg ? 0 : 1) + (lambda.getFormals() ? lambda.getFormals()->formals.size() : 0);
|
||||
Env & env2(mem.allocEnv(size));
|
||||
env2.up = vCur.lambda().env;
|
||||
|
||||
Displacement displ = 0;
|
||||
|
||||
if (!lambda.hasFormals())
|
||||
env2.values[displ++] = args[0];
|
||||
else {
|
||||
if (auto formals = lambda.getFormals()) {
|
||||
try {
|
||||
forceAttrs(*args[0], lambda.pos, "while evaluating the value passed for the lambda argument");
|
||||
} catch (Error & e) {
|
||||
|
|
@ -1527,7 +1518,7 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
there is no matching actual argument but the formal
|
||||
argument has a default, use the default. */
|
||||
size_t attrsUsed = 0;
|
||||
for (auto & i : lambda.formals->formals) {
|
||||
for (auto & i : formals->formals) {
|
||||
auto j = args[0]->attrs()->get(i.name);
|
||||
if (!j) {
|
||||
if (!i.def) {
|
||||
|
|
@ -1549,13 +1540,13 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
|
||||
/* Check that each actual argument is listed as a formal
|
||||
argument (unless the attribute match specifies a `...'). */
|
||||
if (!lambda.formals->ellipsis && attrsUsed != args[0]->attrs()->size()) {
|
||||
if (!formals->ellipsis && attrsUsed != args[0]->attrs()->size()) {
|
||||
/* Nope, so show the first unexpected argument to the
|
||||
user. */
|
||||
for (auto & i : *args[0]->attrs())
|
||||
if (!lambda.formals->has(i.name)) {
|
||||
if (!formals->has(i.name)) {
|
||||
StringSet formalNames;
|
||||
for (auto & formal : lambda.formals->formals)
|
||||
for (auto & formal : formals->formals)
|
||||
formalNames.insert(std::string(symbols[formal.name]));
|
||||
auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
|
||||
error<TypeError>(
|
||||
|
|
@ -1570,6 +1561,8 @@ void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes,
|
|||
}
|
||||
unreachable();
|
||||
}
|
||||
} else {
|
||||
env2.values[displ++] = args[0];
|
||||
}
|
||||
|
||||
nrFunctionCalls++;
|
||||
|
|
@ -1754,14 +1747,15 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res
|
|||
}
|
||||
}
|
||||
|
||||
if (!fun.isLambda() || !fun.lambda().fun->hasFormals()) {
|
||||
if (!fun.isLambda() || !fun.lambda().fun->getFormals()) {
|
||||
res = fun;
|
||||
return;
|
||||
}
|
||||
auto formals = fun.lambda().fun->getFormals();
|
||||
|
||||
auto attrs = buildBindings(std::max(static_cast<uint32_t>(fun.lambda().fun->formals->formals.size()), args.size()));
|
||||
auto attrs = buildBindings(std::max(static_cast<uint32_t>(formals->formals.size()), args.size()));
|
||||
|
||||
if (fun.lambda().fun->formals->ellipsis) {
|
||||
if (formals->ellipsis) {
|
||||
// If the formals have an ellipsis (eg the function accepts extra args) pass
|
||||
// all available automatic arguments (which includes arguments specified on
|
||||
// the command line via --arg/--argstr)
|
||||
|
|
@ -1769,7 +1763,7 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res
|
|||
attrs.insert(v);
|
||||
} else {
|
||||
// Otherwise, only pass the arguments that the function accepts
|
||||
for (auto & i : fun.lambda().fun->formals->formals) {
|
||||
for (auto & i : formals->formals) {
|
||||
auto j = args.get(i.name);
|
||||
if (j) {
|
||||
attrs.insert(*j);
|
||||
|
|
@ -1811,7 +1805,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
|||
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
|
||||
std::ostringstream out;
|
||||
cond->show(state.symbols, out);
|
||||
auto exprStr = toView(out);
|
||||
auto exprStr = out.view();
|
||||
|
||||
if (auto eq = dynamic_cast<ExprOpEq *>(cond)) {
|
||||
try {
|
||||
|
|
@ -2027,7 +2021,7 @@ void EvalState::concatLists(
|
|||
void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
std::vector<BackedStringView> s;
|
||||
std::vector<BackedStringView> strings;
|
||||
size_t sSize = 0;
|
||||
NixInt n{0};
|
||||
NixFloat nf = 0;
|
||||
|
|
@ -2035,32 +2029,11 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
bool first = !forceString;
|
||||
ValueType firstType = nString;
|
||||
|
||||
const auto str = [&] {
|
||||
std::string result;
|
||||
result.reserve(sSize);
|
||||
for (const auto & part : s)
|
||||
result += *part;
|
||||
return result;
|
||||
};
|
||||
/* c_str() is not str().c_str() because we want to create a string
|
||||
Value. allocating a GC'd string directly and moving it into a
|
||||
Value lets us avoid an allocation and copy. */
|
||||
const auto c_str = [&] {
|
||||
char * result = allocString(sSize + 1);
|
||||
char * tmp = result;
|
||||
for (const auto & part : s) {
|
||||
memcpy(tmp, part->data(), part->size());
|
||||
tmp += part->size();
|
||||
}
|
||||
*tmp = 0;
|
||||
return result;
|
||||
};
|
||||
|
||||
// List of returned strings. References to these Values must NOT be persisted.
|
||||
SmallTemporaryValueVector<conservativeStackReservation> values(es->size());
|
||||
SmallTemporaryValueVector<conservativeStackReservation> values(es.size());
|
||||
Value * vTmpP = values.data();
|
||||
|
||||
for (auto & [i_pos, i] : *es) {
|
||||
for (auto & [i_pos, i] : es) {
|
||||
Value & vTmp = *vTmpP++;
|
||||
i->eval(state, env, vTmp);
|
||||
|
||||
|
|
@ -2103,33 +2076,46 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
.withFrame(env, *this)
|
||||
.debugThrow();
|
||||
} else {
|
||||
if (s.empty())
|
||||
s.reserve(es->size());
|
||||
if (strings.empty())
|
||||
strings.reserve(es.size());
|
||||
/* skip canonization of first path, which would only be not
|
||||
canonized in the first place if it's coming from a ./${foo} type
|
||||
path */
|
||||
auto part = state.coerceToString(
|
||||
i_pos, vTmp, context, "while evaluating a path segment", false, firstType == nString, !first);
|
||||
sSize += part->size();
|
||||
s.emplace_back(std::move(part));
|
||||
strings.emplace_back(std::move(part));
|
||||
}
|
||||
|
||||
first = false;
|
||||
}
|
||||
|
||||
if (firstType == nInt)
|
||||
if (firstType == nInt) {
|
||||
v.mkInt(n);
|
||||
else if (firstType == nFloat)
|
||||
} else if (firstType == nFloat) {
|
||||
v.mkFloat(nf);
|
||||
else if (firstType == nPath) {
|
||||
} else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.error<EvalError>("a string that refers to a store path cannot be appended to a path")
|
||||
.atPos(pos)
|
||||
.withFrame(env, *this)
|
||||
.debugThrow();
|
||||
v.mkPath(state.rootPath(CanonPath(str())));
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
std::string result_str;
|
||||
result_str.reserve(sSize);
|
||||
for (const auto & part : strings) {
|
||||
result_str += *part;
|
||||
}
|
||||
v.mkPath(state.rootPath(CanonPath(result_str)));
|
||||
} else {
|
||||
char * result_str = allocString(sSize + 1);
|
||||
char * tmp = result_str;
|
||||
for (const auto & part : strings) {
|
||||
memcpy(tmp, part->data(), part->size());
|
||||
tmp += part->size();
|
||||
}
|
||||
*tmp = 0;
|
||||
v.mkStringMove(result_str, context);
|
||||
}
|
||||
}
|
||||
|
||||
void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
||||
|
|
@ -2167,30 +2153,28 @@ void EvalState::forceValueDeep(Value & v)
|
|||
{
|
||||
std::set<const Value *> seen;
|
||||
|
||||
std::function<void(Value & v)> recurse;
|
||||
|
||||
recurse = [&](Value & v) {
|
||||
[&, &state(*this)](this const auto & recurse, Value & v) {
|
||||
if (!seen.insert(&v).second)
|
||||
return;
|
||||
|
||||
forceValue(v, v.determinePos(noPos));
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
|
||||
if (v.type() == nAttrs) {
|
||||
for (auto & i : *v.attrs())
|
||||
try {
|
||||
// If the value is a thunk, we're evaling. Otherwise no trace necessary.
|
||||
auto dts = debugRepl && i.value->isThunk() ? makeDebugTraceStacker(
|
||||
*this,
|
||||
*i.value->thunk().expr,
|
||||
*i.value->thunk().env,
|
||||
i.pos,
|
||||
"while evaluating the attribute '%1%'",
|
||||
symbols[i.name])
|
||||
: nullptr;
|
||||
auto dts = state.debugRepl && i.value->isThunk() ? makeDebugTraceStacker(
|
||||
state,
|
||||
*i.value->thunk().expr,
|
||||
*i.value->thunk().env,
|
||||
i.pos,
|
||||
"while evaluating the attribute '%1%'",
|
||||
state.symbols[i.name])
|
||||
: nullptr;
|
||||
|
||||
recurse(*i.value);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", symbols[i.name]);
|
||||
state.addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", state.symbols[i.name]);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
|
@ -2199,9 +2183,7 @@ void EvalState::forceValueDeep(Value & v)
|
|||
for (auto v2 : v.listView())
|
||||
recurse(*v2);
|
||||
}
|
||||
};
|
||||
|
||||
recurse(v);
|
||||
}(v);
|
||||
}
|
||||
|
||||
NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
|
|
@ -3074,7 +3056,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
|||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, const std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
|
|
@ -3082,8 +3064,8 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<Sta
|
|||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
Expr *
|
||||
EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
Expr * EvalState::parseExprFromString(
|
||||
std::string s_, const SourcePath & basePath, const std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
// NOTE this method (and parseStdin) must take care to *fully copy* their input
|
||||
// into their respective Pos::Origin until the parser stops overwriting its input
|
||||
|
|
@ -3133,6 +3115,11 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
|
|||
auto res = (r / CanonPath(suffix)).resolveSymlinks();
|
||||
if (res.pathExists())
|
||||
return res;
|
||||
|
||||
// Backward compatibility hack: throw an exception if access
|
||||
// to this path is not allowed.
|
||||
if (auto accessor = res.accessor.dynamic_pointer_cast<FilteringSourceAccessor>())
|
||||
accessor->checkAccess(res.path);
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
|
|
@ -3199,6 +3186,11 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
|
|||
if (path.resolveSymlinks().pathExists())
|
||||
return finish(std::move(path));
|
||||
else {
|
||||
// Backward compatibility hack: throw an exception if access
|
||||
// to this path is not allowed.
|
||||
if (auto accessor = path.accessor.dynamic_pointer_cast<FilteringSourceAccessor>())
|
||||
accessor->checkAccess(path.path);
|
||||
|
||||
logWarning({.msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value)});
|
||||
}
|
||||
}
|
||||
|
|
@ -3207,7 +3199,11 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
|
|||
}
|
||||
|
||||
Expr * EvalState::parse(
|
||||
char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
const std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
DocCommentMap tmpDocComments; // Only used when not origin is not a SourcePath
|
||||
DocCommentMap * docComments = &tmpDocComments;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue