From d60a8ee8b0d66ff75a32dedec13ef4c81c13e3f1 Mon Sep 17 00:00:00 2001 From: Matthew Kenigsberg Date: Mon, 9 Jun 2025 16:46:20 -0600 Subject: [PATCH 001/382] Improve database lock permission error context Add helpful context when opening the Nix database lock fails due to permission errors. Instead of just showing "Permission denied", now provides guidance about possible causes: - Running as non-root in a single-user Nix installation - Nix daemon may have crashed --- src/libstore/local-store.cc | 11 ++++++++++- tests/functional/read-only-store.sh | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 76fadba86..da266fa7e 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -221,7 +221,16 @@ LocalStore::LocalStore(ref config) schema upgrade is in progress. */ if (!config->readOnly) { Path globalLockPath = dbDir + "/big-lock"; - globalLock = openLockFile(globalLockPath.c_str(), true); + try { + globalLock = openLockFile(globalLockPath.c_str(), true); + } catch (SysError & e) { + if (e.errNo == EACCES || e.errNo == EPERM) { + e.addTrace({}, + "This command may have been run as non-root in a single-user Nix installation,\n" + "or the Nix daemon may have crashed."); + } + throw; + } } if (!config->readOnly && !lockFile(globalLock.get(), ltRead, false)) { diff --git a/tests/functional/read-only-store.sh b/tests/functional/read-only-store.sh index f6b6eaf32..ea96bba41 100755 --- a/tests/functional/read-only-store.sh +++ b/tests/functional/read-only-store.sh @@ -42,7 +42,8 @@ chmod -R -w $TEST_ROOT/var # Make sure we fail on add operations on the read-only store # This is only for adding files that are not *already* in the store -expectStderr 1 nix-store --add eval.nix | grepQuiet "error: opening lock file '$(readlink -e $TEST_ROOT)/var/nix/db/big-lock'" +# Should show enhanced error message with helpful context +expectStderr 1 nix-store --add eval.nix | grepQuiet "This command may have been run as non-root in a single-user Nix installation" expectStderr 1 nix-store --store local?read-only=true --add eval.nix | grepQuiet "Permission denied" # Test the same operations from before should again succeed From fb5e22e3184ccd18c038eecf67e9fd350a8d9b4f Mon Sep 17 00:00:00 2001 From: DavHau Date: Fri, 27 Jun 2025 15:35:46 +0700 Subject: [PATCH 002/382] build-cores: detect cores automatically if set to 0 This changes makes nix detect a machines available cores automatically whenever build-cores is set to 0. So far, nix simply passed NIX_BUILD_CORES=0 whenever build-cores is set to 0. (only when build-cores is unset it was detecting cores automatically) The behavior of passing NIX_BUILD_CORES=0 leads to a performance penalty when sourcing nixpkgs' generic builder's `setup.sh`, as setup.sh has to execute `nproc`. This significantly slows down sourcing of setup.sh --- doc/manual/rl-next/build-cores-auto-detect.md | 6 ++++ src/libstore/globals.cc | 2 +- src/libstore/include/nix/store/globals.hh | 12 +++---- src/libstore/unix/build/derivation-builder.cc | 2 +- src/nix-build/nix-build.cc | 2 +- tests/functional/build-cores.nix | 11 +++++++ tests/functional/build-cores.sh | 32 +++++++++++++++++++ tests/functional/meson.build | 1 + 8 files changed, 58 insertions(+), 10 deletions(-) create mode 100644 doc/manual/rl-next/build-cores-auto-detect.md create mode 100644 tests/functional/build-cores.nix create mode 100755 tests/functional/build-cores.sh diff --git a/doc/manual/rl-next/build-cores-auto-detect.md b/doc/manual/rl-next/build-cores-auto-detect.md new file mode 100644 index 000000000..67ab6995b --- /dev/null +++ b/doc/manual/rl-next/build-cores-auto-detect.md @@ -0,0 +1,6 @@ +--- +synopsis: "`build-cores = 0` now auto-detects CPU cores" +prs: [13402] +--- + +When `build-cores` is set to `0`, nix now automatically detects the number of available CPU cores and passes this value via `NIX_BUILD_CORES`, instead of passing `0` directly. This matches the behavior when `build-cores` is unset. This prevents the builder from having to detect the number of cores. diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index de5128347..1f80cb379 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -140,7 +140,7 @@ std::vector getUserConfigFiles() return files; } -unsigned int Settings::getDefaultCores() +unsigned int Settings::getDefaultCores() const { const unsigned int concurrency = std::max(1U, std::thread::hardware_concurrency()); const unsigned int maxCPU = getMaxCPU(); diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 0ac689b55..8dfdf11cb 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -43,8 +43,6 @@ const uint32_t maxIdsPerBuild = class Settings : public Config { - unsigned int getDefaultCores(); - StringSet getDefaultSystemFeatures(); StringSet getDefaultExtraPlatforms(); @@ -57,6 +55,8 @@ public: Settings(); + unsigned int getDefaultCores() const; + Path nixPrefix; /** @@ -153,7 +153,7 @@ public: Setting buildCores{ this, - getDefaultCores(), + 0, "cores", R"( Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation. @@ -166,15 +166,13 @@ public: --> For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it passes the `-j${NIX_BUILD_CORES}` flag to GNU Make. - The value `0` means that the `builder` should use all available CPU cores in the system. + If set to `0`, nix will detect the number of CPU cores and pass this number via NIX_BUILD_CORES. > **Note** > > The number of parallel local Nix build jobs is independently controlled with the [`max-jobs`](#conf-max-jobs) setting. )", - {"build-cores"}, - // Don't document the machine-specific default value - false}; + {"build-cores"}}; /** * Read-only mode. Don't copy stuff to the store, don't change diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index fd62aa664..cf6c0a5b1 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1083,7 +1083,7 @@ void DerivationBuilderImpl::initEnv() env["NIX_STORE"] = store.storeDir; /* The maximum number of cores to utilize for parallel building. */ - env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores); + env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); /* In non-structured mode, set all bindings either directory in the environment or via a file, as specified by diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index cde9d6742..04ea44c3d 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -542,7 +542,7 @@ static void main_nix_build(int argc, char * * argv) env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDir.path().string(); env["NIX_STORE"] = store->storeDir; - env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); + env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); auto parsedDrv = StructuredAttrs::tryParse(drv.env); DerivationOptions drvOptions; diff --git a/tests/functional/build-cores.nix b/tests/functional/build-cores.nix new file mode 100644 index 000000000..9b763f7d8 --- /dev/null +++ b/tests/functional/build-cores.nix @@ -0,0 +1,11 @@ +with import ./config.nix; + +{ + # Test derivation that checks the NIX_BUILD_CORES environment variable + testCores = mkDerivation { + name = "test-build-cores"; + buildCommand = '' + echo "$NIX_BUILD_CORES" > $out + ''; + }; +} diff --git a/tests/functional/build-cores.sh b/tests/functional/build-cores.sh new file mode 100755 index 000000000..a226774c6 --- /dev/null +++ b/tests/functional/build-cores.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +source common.sh + +clearStoreIfPossible + +echo "Testing build-cores configuration behavior..." + +# Test 1: When build-cores is set to a non-zero value, NIX_BUILD_CORES should have that value +echo "Testing build-cores=4..." +rm -f "$TEST_ROOT"/build-cores-output +nix-build --cores 4 build-cores.nix -A testCores -o "$TEST_ROOT"/build-cores-output +result=$(cat "$(readlink "$TEST_ROOT"/build-cores-output)") +if [[ "$result" != "4" ]]; then + echo "FAIL: Expected NIX_BUILD_CORES=4, got $result" + exit 1 +fi +echo "PASS: build-cores=4 correctly sets NIX_BUILD_CORES=4" +rm -f "$TEST_ROOT"/build-cores-output + +# Test 2: When build-cores is set to 0, NIX_BUILD_CORES should be resolved to getDefaultCores() +echo "Testing build-cores=0..." +nix-build --cores 0 build-cores.nix -A testCores -o "$TEST_ROOT"/build-cores-output +result=$(cat "$(readlink "$TEST_ROOT"/build-cores-output)") +if [[ "$result" == "0" ]]; then + echo "FAIL: NIX_BUILD_CORES should not be 0 when build-cores=0" + exit 1 +fi +echo "PASS: build-cores=0 resolves to NIX_BUILD_CORES=$result (should be > 0)" +rm -f "$TEST_ROOT"/build-cores-output + +echo "All build-cores tests passed!" diff --git a/tests/functional/meson.build b/tests/functional/meson.build index cd1bc6319..501ed45c7 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -145,6 +145,7 @@ suites = [ 'placeholders.sh', 'ssh-relay.sh', 'build.sh', + 'build-cores.sh', 'build-delete.sh', 'output-normalization.sh', 'selfref-gc.sh', From 01388b3e78582553cc30ee772db5b5aba8d89edd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 16 Jun 2025 12:09:43 +0200 Subject: [PATCH 003/382] Give unit tests access to a $HOME directory Also, don't try to access cache.nixos.org in the libstore unit tests. --- src/libflake-tests/meson.build | 1 + src/libflake-tests/package.nix | 20 ++++++++------------ src/libstore-tests/meson.build | 1 + src/libstore-tests/nix_api_store.cc | 16 +--------------- src/libstore-tests/package.nix | 18 +++++++----------- 5 files changed, 18 insertions(+), 38 deletions(-) diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 593b0e18d..8c082c7e0 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -60,6 +60,7 @@ test( env : { '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', 'NIX_CONFIG': 'extra-experimental-features = flakes', + 'HOME': meson.current_build_dir() / 'test-home', }, protocol : 'gtest', ) diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index 714f3791a..1a8afd6ea 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -56,18 +56,14 @@ mkMesonExecutable (finalAttrs: { { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; } - ( - lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' - + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - export NIX_CONFIG="extra-experimental-features = flakes" - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - '' - ); + ('' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + export NIX_CONFIG="extra-experimental-features = flakes" + export HOME="$TMPDIR/home" + mkdir -p "$HOME" + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + ''); }; }; diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 8a1ff40f0..8b9893b23 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -100,6 +100,7 @@ test( this_exe, env : { '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + 'HOME': meson.current_build_dir() / 'test-home', }, protocol : 'gtest', ) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 3d9f7908b..05373cb88 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -28,10 +28,6 @@ TEST_F(nix_api_store_test, nix_store_get_uri) TEST_F(nix_api_util_context, nix_store_get_storedir_default) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // skipping test in sandbox because nix_store_open tries to create /nix/var/nix/profiles - GTEST_SKIP(); - } nix_libstore_init(ctx); Store * store = nix_store_open(ctx, nullptr, nullptr); assert_ctx_ok(); @@ -141,10 +137,6 @@ TEST_F(nix_api_store_test, nix_store_real_path) TEST_F(nix_api_util_context, nix_store_real_path_relocated) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // Can't open default store from within sandbox - GTEST_SKIP(); - } auto tmp = nix::createTempDir(); std::string storeRoot = tmp + "/store"; std::string stateDir = tmp + "/state"; @@ -184,13 +176,7 @@ TEST_F(nix_api_util_context, nix_store_real_path_relocated) TEST_F(nix_api_util_context, nix_store_real_path_binary_cache) { - if (nix::getEnv("HOME").value_or("") == "/homeless-shelter") { - // TODO: override NIX_CACHE_HOME? - // skipping test in sandbox because narinfo cache can't be written - GTEST_SKIP(); - } - - Store * store = nix_store_open(ctx, "https://cache.nixos.org", nullptr); + Store * store = nix_store_open(ctx, nix::fmt("file://%s/binary-cache", nix::createTempDir()).c_str(), nullptr); assert_ctx_ok(); ASSERT_NE(store, nullptr); diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index b39ee7fa7..1f3701c7f 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -73,17 +73,13 @@ mkMesonExecutable (finalAttrs: { { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; } - ( - lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' - + '' - export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - '' - ); + ('' + export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} + export HOME="$TMPDIR/home" + mkdir -p "$HOME" + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + ''); }; }; From f29acd5bbc0505ea6a78a51e4828a22630dd0ff1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Jul 2025 11:12:55 +0200 Subject: [PATCH 004/382] Use writableTmpDirAsHomeHook --- src/libflake-tests/package.nix | 4 ++-- src/libstore-tests/package.nix | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libflake-tests/package.nix b/src/libflake-tests/package.nix index 1a8afd6ea..397ef4192 100644 --- a/src/libflake-tests/package.nix +++ b/src/libflake-tests/package.nix @@ -3,6 +3,7 @@ buildPackages, stdenv, mkMesonExecutable, + writableTmpDirAsHomeHook, nix-flake, nix-flake-c, @@ -55,12 +56,11 @@ mkMesonExecutable (finalAttrs: { runCommand "${finalAttrs.pname}-run" { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + buildInputs = [ writableTmpDirAsHomeHook ]; } ('' export _NIX_TEST_UNIT_DATA=${resolvePath ./data} export NIX_CONFIG="extra-experimental-features = flakes" - export HOME="$TMPDIR/home" - mkdir -p "$HOME" ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out ''); diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 1f3701c7f..62c7e136b 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -3,6 +3,7 @@ buildPackages, stdenv, mkMesonExecutable, + writableTmpDirAsHomeHook, nix-store, nix-store-c, @@ -72,11 +73,10 @@ mkMesonExecutable (finalAttrs: { runCommand "${finalAttrs.pname}-run" { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + buildInputs = [ writableTmpDirAsHomeHook ]; } ('' export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} - export HOME="$TMPDIR/home" - mkdir -p "$HOME" ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out ''); From 778156072444fdd06812b89e86169bf45f2425df Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Jul 2025 11:26:06 +0200 Subject: [PATCH 005/382] libstore-tests: Fix impurity trying to access the Nix daemon This failed on macOS: nix-store-tests-run> C++ exception with description "../nix_api_store.cc:33: nix_err_code(ctx) != NIX_OK, message: error: getting status of '/nix/var/nix/daemon-socket/socket': Operation not permitted" thrown in the test body. --- src/libstore-tests/meson.build | 1 + src/libstore-tests/package.nix | 1 + 2 files changed, 2 insertions(+) diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 8b9893b23..79f21620e 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -101,6 +101,7 @@ test( env : { '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', 'HOME': meson.current_build_dir() / 'test-home', + 'NIX_REMOTE': meson.current_build_dir() / 'test-home' / 'store', }, protocol : 'gtest', ) diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 62c7e136b..f606604ba 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -77,6 +77,7 @@ mkMesonExecutable (finalAttrs: { } ('' export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} + export NIX_REMOTE=$HOME/store ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out ''); From 87299e466daca97fd48d3d446bb587e4f9d46d9a Mon Sep 17 00:00:00 2001 From: John Soo Date: Mon, 7 Jul 2025 11:14:12 -0600 Subject: [PATCH 006/382] installers, tests: remove --preserve=mode from cp invocations -p preserves xattrs and acls which can be incompatible between filesystems Unfortunately keep -p on darwin because the bsd coreutils do not support --preserve. Fixes #13426 --- scripts/install-multi-user.sh | 9 +++++++-- scripts/install-nix-from-tarball.sh | 6 +++++- tests/nixos/github-flakes.nix | 2 +- tests/nixos/sourcehut-flakes.nix | 2 +- tests/nixos/tarball-flakes.nix | 2 +- 5 files changed, 15 insertions(+), 6 deletions(-) diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index f051ccc46..e9ddfc014 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -834,8 +834,13 @@ install_from_extracted_nix() { ( cd "$EXTRACTED_NIX_PATH" - _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ - cp -RPp ./store/* "$NIX_ROOT/store/" + if is_os_darwin; then + _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ + cp -RPp ./store/* "$NIX_ROOT/store/" + else + _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ + cp -RP --preserve=ownership,timestamps ./store/* "$NIX_ROOT/store/" + fi _sudo "to make the new store non-writable at $NIX_ROOT/store" \ chmod -R ugo-w "$NIX_ROOT/store/" diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index 8d127a9c5..ec3264793 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -167,7 +167,11 @@ for i in $(cd "$self/store" >/dev/null && echo ./*); do rm -rf "$i_tmp" fi if ! [ -e "$dest/store/$i" ]; then - cp -RPp "$self/store/$i" "$i_tmp" + if [ "$(uname -s)" = "Darwin" ]; then + cp -RPp "$self/store/$i" "$i_tmp" + else + cp -RP --preserve=ownership,timestamps "$self/store/$i" "$i_tmp" + fi chmod -R a-w "$i_tmp" chmod +w "$i_tmp" mv "$i_tmp" "$dest/store/$i" diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 06142c2ef..91fd6b062 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -81,7 +81,7 @@ let mkdir -p $out/archive dir=NixOS-nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir + cp -rd --preserve=ownership,timestamps ${nixpkgs} $dir # Set the correct timestamp in the tarball. find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ builtins.substring 12 2 nixpkgs.lastModifiedDate diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 61670ccf3..3f05130d6 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -48,7 +48,7 @@ let nixpkgs-repo = pkgs.runCommand "nixpkgs-flake" { } '' dir=NixOS-nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir + cp -rd --preserve=ownership,timestamps ${nixpkgs} $dir # Set the correct timestamp in the tarball. find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ diff --git a/tests/nixos/tarball-flakes.nix b/tests/nixos/tarball-flakes.nix index 7b3638b64..26c20cb1a 100644 --- a/tests/nixos/tarball-flakes.nix +++ b/tests/nixos/tarball-flakes.nix @@ -13,7 +13,7 @@ let set -x dir=nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir + cp -rd --preserve=ownership,timestamps ${nixpkgs} $dir # Set the correct timestamp in the tarball. find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${ builtins.substring 12 2 nixpkgs.lastModifiedDate From 723903da3cb4cac844dbfe1d7865080c868a4ad7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 8 Jul 2025 16:14:50 +0200 Subject: [PATCH 007/382] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 6a6900382..bafceb320 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.30.0 +2.31.0 From 4fa99d743ea365681011c56f1901f6d0c2ce83ab Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 8 Jul 2025 16:15:45 +0200 Subject: [PATCH 008/382] release-process.md: Remove unnecessary step --- maintainers/release-process.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/maintainers/release-process.md b/maintainers/release-process.md index f2c61302b..37b38fb9f 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -39,10 +39,6 @@ release: * Proof-read / edit / rearrange the release notes if needed. Breaking changes and highlights should go to the top. -* Run `maintainers/release-credits` to make sure the credits script works - and produces a sensible output. Some emails might not automatically map to - a GitHub handle. - * Push. ```console From 06665e27f48ca04ac877b2b09749fbdc3de0a7eb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 8 Jul 2025 16:18:53 +0200 Subject: [PATCH 009/382] Update .mergify.yml --- .mergify.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.mergify.yml b/.mergify.yml index 8941711a9..f49144113 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -150,3 +150,14 @@ pull_request_rules: labels: - automatic backport - merge-queue + + - name: backport patches to 2.30 + conditions: + - label=backport 2.30-maintenance + actions: + backport: + branches: + - "2.30-maintenance" + labels: + - automatic backport + - merge-queue From a16491375a55ee257ecce84c434750744247abd0 Mon Sep 17 00:00:00 2001 From: Connor Baker Date: Tue, 8 Jul 2025 12:59:16 -0700 Subject: [PATCH 010/382] globals.hh: fix broken link to nspawn example The substitution included the `.` at the end of the URL, breaking it. --- src/libstore/include/nix/store/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 0ac689b55..2e67c7bbf 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -873,7 +873,7 @@ public: On Linux, Nix can run builds in a user namespace where they run as root (UID 0) and have 65,536 UIDs available. This is primarily useful for running containers such as `systemd-nspawn` inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn]. - [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix. + [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix Included by default on Linux if the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting is enabled. )", From 8a1f471b6607e4626e2cd8ca1e02401578e0044d Mon Sep 17 00:00:00 2001 From: h0nIg Date: Wed, 9 Jul 2025 09:30:11 +0200 Subject: [PATCH 011/382] docker: fix nixConf --- docker.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker.nix b/docker.nix index c6e8e478e..2addd0458 100644 --- a/docker.nix +++ b/docker.nix @@ -184,11 +184,11 @@ let } " = "; }; - nixConfContents = toConf { + nixConfContents = toConf ({ sandbox = false; build-users-group = "nixbld"; trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; - }; + } // nixConf); userHome = if uid == 0 then "/root" else "/home/${uname}"; From 9857c0bb52cfb62f324ce598214f20cc3521e3a8 Mon Sep 17 00:00:00 2001 From: h0nIg Date: Wed, 9 Jul 2025 09:34:50 +0200 Subject: [PATCH 012/382] docker: fix nixConf - fmt --- docker.nix | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docker.nix b/docker.nix index 2addd0458..f59492025 100644 --- a/docker.nix +++ b/docker.nix @@ -184,11 +184,14 @@ let } " = "; }; - nixConfContents = toConf ({ - sandbox = false; - build-users-group = "nixbld"; - trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; - } // nixConf); + nixConfContents = toConf ( + { + sandbox = false; + build-users-group = "nixbld"; + trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; + } + // nixConf + ); userHome = if uid == 0 then "/root" else "/home/${uname}"; From 95437b90fc68bd3fff5a47bd4ac6e5186eb51a00 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 9 Jul 2025 17:00:49 +0200 Subject: [PATCH 013/382] lockFlake(): When updating a lock, respect the input's lock file --- src/libflake/flake.cc | 10 +++----- tests/functional/flakes/flakes.sh | 38 +++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index 322abaa4a..7a11e6047 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -715,16 +715,12 @@ LockedFlake lockFlake( Finally cleanup([&]() { parents.pop_back(); }); /* Recursively process the inputs of this - flake. Also, unless we already have this flake - in the top-level lock file, use this flake's - own lock file. */ + flake, using its own lock file. */ nodePaths.emplace(childNode, inputFlake.path.parent()); computeLocks( inputFlake.inputs, childNode, inputAttrPath, - oldLock - ? std::dynamic_pointer_cast(oldLock) - : readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(), - oldLock ? followsPrefix : inputAttrPath, + readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(), + inputAttrPath, inputFlake.path, false); } diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index ce695a6cb..7fd9dc9b5 100755 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -432,3 +432,41 @@ nix flake metadata "$flake2Dir" --reference-lock-file $TEST_ROOT/flake2-overridd # reference-lock-file can only be used if allow-dirty is set. expectStderr 1 nix flake metadata "$flake2Dir" --no-allow-dirty --reference-lock-file $TEST_ROOT/flake2-overridden.lock + +# After changing an input (flake2 from newFlake2Rev to prevFlake2Rev), we should have the transitive inputs locked by revision $prevFlake2Rev of flake2. +prevFlake1Rev=$(nix flake metadata --json "$flake1Dir" | jq -r .revision) +prevFlake2Rev=$(nix flake metadata --json "$flake2Dir" | jq -r .revision) + +echo "# bla" >> "$flake1Dir/flake.nix" +git -C "$flake1Dir" commit flake.nix -m 'bla' + +nix flake update --flake "$flake2Dir" +git -C "$flake2Dir" commit flake.lock -m 'bla' + +newFlake1Rev=$(nix flake metadata --json "$flake1Dir" | jq -r .revision) +newFlake2Rev=$(nix flake metadata --json "$flake2Dir" | jq -r .revision) + +cat > "$flake3Dir/flake.nix" < "$flake3Dir/flake.nix" < Date: Thu, 10 Jul 2025 11:41:32 +0200 Subject: [PATCH 014/382] fetchClosure: Fix gcc warning MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: [261/394] Linking target src/libexpr/libnixexpr.so In function ‘copy’, inlined from ‘__ct ’ at /nix/store/24sdvjs6rfqs69d21gdn437mb3vc0svh-gcc-14.2.1.20250322/include/c++/14.2.1.20250322/bits/basic_string.h:688:23, inlined from ‘operator+’ at /nix/store/24sdvjs6rfqs69d21gdn437mb3vc0svh-gcc-14.2.1.20250322/include/c++/14.2.1.20250322/bits/basic_string.h:3735:43, inlined from ‘operator()’ at ../src/libexpr/primops/fetchClosure.cc:127:58, inlined from ‘prim_fetchClosure’ at ../src/libexpr/primops/fetchClosure.cc:132:88: /nix/store/24sdvjs6rfqs69d21gdn437mb3vc0svh-gcc-14.2.1.20250322/include/c++/14.2.1.20250322/bits/char_traits.h:427:56: warning: ‘__builtin_memcpy’ writing 74 bytes into a region of size 16 overflows the destination [-Wstringop-overflow=] 427 | return static_cast(__builtin_memcpy(__s1, __s2, __n)); | ^ ../src/libexpr/primops/fetchClosure.cc: In function ‘prim_fetchClosure’: ../src/libexpr/primops/fetchClosure.cc:132:88: note: at offset 16 into destination object ‘’ of size 32 132 | fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint()); | ^ --- src/libexpr/primops/fetchClosure.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index ea6145f6f..4be4dac8f 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -124,7 +124,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg for (auto & attr : *args[0]->attrs()) { const auto & attrName = state.symbols[attr.name]; auto attrHint = [&]() -> std::string { - return "while evaluating the '" + attrName + "' attribute passed to builtins.fetchClosure"; + return fmt("while evaluating the attribute '%s' passed to builtins.fetchClosure", attrName); }; if (attrName == "fromPath") { From 74a144ce9831c65371f7482fc6ae748872df679d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 10 Jul 2025 11:53:36 +0200 Subject: [PATCH 015/382] Require Boost 1.81.0 or higher Note: this version of Boost was released in December 2022. --- src/libexpr/include/nix/expr/symbol-table.hh | 14 +------------- src/libexpr/meson.build | 1 + src/libutil/serialise.cc | 4 ---- 3 files changed, 2 insertions(+), 17 deletions(-) diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index 20a05a09d..1249bdb88 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -7,12 +7,7 @@ #include "nix/util/error.hh" #include -#define USE_FLAT_SYMBOL_SET (BOOST_VERSION >= 108100) -#if USE_FLAT_SYMBOL_SET -# include -#else -# include -#endif +#include namespace nix { @@ -214,12 +209,7 @@ private: * Transparent lookup of string view for a pointer to a ChunkedVector entry -> return offset into the store. * ChunkedVector references are never invalidated. */ -#if USE_FLAT_SYMBOL_SET boost::unordered_flat_set symbols{SymbolStr::chunkSize}; -#else - using SymbolValueAlloc = std::pmr::polymorphic_allocator; - boost::unordered_set symbols{SymbolStr::chunkSize, {&buffer}}; -#endif public: @@ -287,5 +277,3 @@ struct std::hash return std::hash{}(s.id); } }; - -#undef USE_FLAT_SYMBOL_SET diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index f5adafae0..533030359 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -41,6 +41,7 @@ boost = dependency( 'boost', modules : ['container', 'context'], include_type: 'system', + version: '>=1.81.0' ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 55397c6d4..a74531582 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -194,10 +194,6 @@ size_t StringSource::read(char * data, size_t len) } -#if BOOST_VERSION >= 106300 && BOOST_VERSION < 106600 -#error Coroutines are broken in this version of Boost! -#endif - std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink From ca9f2028b020447559d509252c31a145f92dfaff Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 10 Jul 2025 12:27:17 +0200 Subject: [PATCH 016/382] Simplify SymbolTable::create() --- src/libexpr/include/nix/expr/symbol-table.hh | 14 +------------- src/libexpr/meson.build | 2 +- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index 1249bdb88..4dedf3d91 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -220,19 +220,7 @@ public: // Most symbols are looked up more than once, so we trade off insertion performance // for lookup performance. // FIXME: make this thread-safe. - return [&](T && key) -> Symbol { - if constexpr (requires { symbols.insert(key); }) { - auto [it, _] = symbols.insert(key); - return Symbol(*it); - } else { - auto it = symbols.find(key); - if (it != symbols.end()) - return Symbol(*it); - - it = symbols.emplace(key).first; - return Symbol(*it); - } - }(SymbolStr::Key{store, s, stringAlloc}); + return Symbol(*symbols.insert(SymbolStr::Key{store, s, stringAlloc}).first); } std::vector resolve(const std::vector & symbols) const diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 533030359..43e4b9c98 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -41,7 +41,7 @@ boost = dependency( 'boost', modules : ['container', 'context'], include_type: 'system', - version: '>=1.81.0' + version: '>=1.82.0' ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. From fc03b89ff4d6bdd47eb128385fa997a660c03c32 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 10 Jul 2025 17:50:43 +0200 Subject: [PATCH 017/382] Fix lessThan doc --- src/libexpr/primops.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index f9f834a62..99ca19d7e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4122,9 +4122,9 @@ static RegisterPrimOp primop_lessThan({ .name = "__lessThan", .args = {"e1", "e2"}, .doc = R"( - Return `true` if the number *e1* is less than the number *e2*, and - `false` otherwise. Evaluation aborts if either *e1* or *e2* does not - evaluate to a number. + Return `true` if the value *e1* is less than the value *e2*, and `false` otherwise. + Evaluation aborts if either *e1* or *e2* does not evaluate to a number, string or path. + Furthermore, it aborts if *e2* does not match *e1*'s type according to the aforementioned classification of number, string or path. )", .fun = prim_lessThan, }); From a17f377f69c33e5aa553a9098974af7972f2662f Mon Sep 17 00:00:00 2001 From: Elliot Cameron <130508846+de11n@users.noreply.github.com> Date: Thu, 10 Jul 2025 16:19:43 -0400 Subject: [PATCH 018/382] Fix documentation for GC w.r.t. symlinks --- .../source/package-management/garbage-collector-roots.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/manual/source/package-management/garbage-collector-roots.md b/doc/manual/source/package-management/garbage-collector-roots.md index 30c5b7f8d..925a33162 100644 --- a/doc/manual/source/package-management/garbage-collector-roots.md +++ b/doc/manual/source/package-management/garbage-collector-roots.md @@ -12,7 +12,7 @@ $ ln -s /nix/store/d718ef...-foo /nix/var/nix/gcroots/bar That is, after this command, the garbage collector will not remove `/nix/store/d718ef...-foo` or any of its dependencies. -Subdirectories of `prefix/nix/var/nix/gcroots` are also searched for -symlinks. Symlinks to non-store paths are followed and searched for -roots, but symlinks to non-store paths *inside* the paths reached in -that way are not followed to prevent infinite recursion. +Subdirectories of `prefix/nix/var/nix/gcroots` are searched +recursively. Symlinks to store paths count as roots. Symlinks to +non-store paths are ignored, unless the non-store path is itself a +symlink to a store path. \ No newline at end of file From 6e78cc90d3415694ec15bd273b47d21bb1be96ad Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 11 Jul 2025 20:20:48 +0300 Subject: [PATCH 019/382] libexpr: Fix invalid handling of errors for imported functions c39cc004043b95d55a0c2c2bdba58d6d3e0db846 has added assertions for all Value accesses and the following case has started failing with an `unreachable`: (/tmp/fun.nix): ```nix {a}: a ``` ``` $ nix eval --impure --expr 'import /tmp/fun.nix {a="a";b="b";}' ``` This would crash: ``` terminating due to unexpected unrecoverable internal error: Unexpected condition in getStorage at ../include/nix/expr/value.hh:844 ``` This is not a regression, but rather surfaces an existing problem, which previously was left undiagnosed. In the case of an import `fun` is the `import` primOp, so that read is invalid and previously this resulted in an access into an inactive union member, which is UB. The correct thing to use is `vCur`. Identical problem also affected the case of a missing argument. Add previously failing test cases to the functional/lang test suite. Fixes #13448. --- src/libexpr/eval.cc | 4 ++-- .../lang/eval-fail-missing-arg-import.err.exp | 12 ++++++++++++ .../lang/eval-fail-missing-arg-import.nix | 1 + .../lang/eval-fail-undeclared-arg-import.err.exp | 13 +++++++++++++ .../lang/eval-fail-undeclared-arg-import.nix | 4 ++++ .../lang/non-eval-trivial-lambda-formals.nix | 1 + 6 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 tests/functional/lang/eval-fail-missing-arg-import.err.exp create mode 100644 tests/functional/lang/eval-fail-missing-arg-import.nix create mode 100644 tests/functional/lang/eval-fail-undeclared-arg-import.err.exp create mode 100644 tests/functional/lang/eval-fail-undeclared-arg-import.nix create mode 100644 tests/functional/lang/non-eval-trivial-lambda-formals.nix diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 1321e00a5..47cc35daa 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1602,7 +1602,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") - .withFrame(*fun.lambda().env, lambda) + .withFrame(*vCur.lambda().env, lambda) .debugThrow(); } env2.values[displ++] = i.def->maybeThunk(*this, env2); @@ -1629,7 +1629,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, .atPos(lambda.pos) .withTrace(pos, "from call site") .withSuggestions(suggestions) - .withFrame(*fun.lambda().env, lambda) + .withFrame(*vCur.lambda().env, lambda) .debugThrow(); } unreachable(); diff --git a/tests/functional/lang/eval-fail-missing-arg-import.err.exp b/tests/functional/lang/eval-fail-missing-arg-import.err.exp new file mode 100644 index 000000000..45774f003 --- /dev/null +++ b/tests/functional/lang/eval-fail-missing-arg-import.err.exp @@ -0,0 +1,12 @@ +error: + … from call site + at /pwd/lang/eval-fail-missing-arg-import.nix:1:1: + 1| import ./non-eval-trivial-lambda-formals.nix { } + | ^ + 2| + + error: function 'anonymous lambda' called without required argument 'a' + at /pwd/lang/non-eval-trivial-lambda-formals.nix:1:1: + 1| { a }: a + | ^ + 2| diff --git a/tests/functional/lang/eval-fail-missing-arg-import.nix b/tests/functional/lang/eval-fail-missing-arg-import.nix new file mode 100644 index 000000000..7cb33f2b5 --- /dev/null +++ b/tests/functional/lang/eval-fail-missing-arg-import.nix @@ -0,0 +1 @@ +import ./non-eval-trivial-lambda-formals.nix { } diff --git a/tests/functional/lang/eval-fail-undeclared-arg-import.err.exp b/tests/functional/lang/eval-fail-undeclared-arg-import.err.exp new file mode 100644 index 000000000..ca797d3ec --- /dev/null +++ b/tests/functional/lang/eval-fail-undeclared-arg-import.err.exp @@ -0,0 +1,13 @@ +error: + … from call site + at /pwd/lang/eval-fail-undeclared-arg-import.nix:1:1: + 1| import ./non-eval-trivial-lambda-formals.nix { + | ^ + 2| a = "a"; + + error: function 'anonymous lambda' called with unexpected argument 'b' + at /pwd/lang/non-eval-trivial-lambda-formals.nix:1:1: + 1| { a }: a + | ^ + 2| + Did you mean a? diff --git a/tests/functional/lang/eval-fail-undeclared-arg-import.nix b/tests/functional/lang/eval-fail-undeclared-arg-import.nix new file mode 100644 index 000000000..e8454c725 --- /dev/null +++ b/tests/functional/lang/eval-fail-undeclared-arg-import.nix @@ -0,0 +1,4 @@ +import ./non-eval-trivial-lambda-formals.nix { + a = "a"; + b = "b"; +} diff --git a/tests/functional/lang/non-eval-trivial-lambda-formals.nix b/tests/functional/lang/non-eval-trivial-lambda-formals.nix new file mode 100644 index 000000000..46a7ea4f4 --- /dev/null +++ b/tests/functional/lang/non-eval-trivial-lambda-formals.nix @@ -0,0 +1 @@ +{ a }: a From e2ef2cfcbc83ea01308ee64c38a58707ab23dec3 Mon Sep 17 00:00:00 2001 From: gustavderdrache Date: Fri, 11 Jul 2025 18:00:26 -0400 Subject: [PATCH 020/382] Address ifdef problem with macOS/BSD sandboxing --- src/libstore/unix/user-lock.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/unix/user-lock.cc b/src/libstore/unix/user-lock.cc index 6a07cb7cc..f5d164e5b 100644 --- a/src/libstore/unix/user-lock.cc +++ b/src/libstore/unix/user-lock.cc @@ -197,7 +197,7 @@ bool useBuildUsers() #ifdef __linux__ static bool b = (settings.buildUsersGroup != "" || settings.autoAllocateUids) && isRootUser(); return b; - #elif defined(__APPLE__) && defined(__FreeBSD__) + #elif defined(__APPLE__) || defined(__FreeBSD__) static bool b = settings.buildUsersGroup != "" && isRootUser(); return b; #else From 8e5814d972642def9842fba3f8a6116f6b9e5c96 Mon Sep 17 00:00:00 2001 From: gustavderdrache Date: Fri, 11 Jul 2025 18:38:51 -0400 Subject: [PATCH 021/382] CI: Roll nix version to 2.29.1 This works around the macOS issue that the prior commit addresses. --- .github/workflows/ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 29cb33f56..ac749bc3f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,6 +14,8 @@ jobs: with: fetch-depth: 0 - uses: cachix/install-nix-action@v31 + with: + install_url: "https://releases.nixos.org/nix/nix-2.29.1/install" - run: nix --experimental-features 'nix-command flakes' flake show --all-systems --json tests: @@ -36,6 +38,7 @@ jobs: fetch-depth: 0 - uses: cachix/install-nix-action@v31 with: + install_url: "https://releases.nixos.org/nix/nix-2.29.1/install" # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: | sandbox = true From a48632f2e00ccab40c94d498b10591beb76ec6bf Mon Sep 17 00:00:00 2001 From: Justin Bailey Date: Fri, 11 Jul 2025 17:30:03 -0700 Subject: [PATCH 022/382] Better Handling for Expired Credentials When AWS credentials expired, in some scenarios they led to the nix process aborting with an error similar to ' Unable to parse ExceptionName: ExpiredToken'. This change updates the S3 handling code such that those errors are treated like 403s or 404s. Closes #13459 --- src/libstore/s3-binary-cache-store.cc | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 618112d1c..b1cba3358 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -37,10 +37,11 @@ namespace nix { struct S3Error : public Error { Aws::S3::S3Errors err; + Aws::String exceptionName; template - S3Error(Aws::S3::S3Errors err, const Args & ... args) - : Error(args...), err(err) { }; + S3Error(Aws::S3::S3Errors err, Aws::String exceptionName, const Args & ... args) + : Error(args...), err(err), exceptionName(exceptionName) { }; }; /* Helper: given an Outcome, return R in case of success, or @@ -51,6 +52,7 @@ R && checkAws(std::string_view s, Aws::Utils::Outcome && outcome) if (!outcome.IsSuccess()) throw S3Error( outcome.GetError().GetErrorType(), + outcome.GetError().GetExceptionName(), fmt( "%s: %s (request id: %s)", s, @@ -226,7 +228,13 @@ S3Helper::FileTransferResult S3Helper::getObject( } catch (S3Error & e) { if ((e.err != Aws::S3::S3Errors::NO_SUCH_KEY) && - (e.err != Aws::S3::S3Errors::ACCESS_DENIED)) throw; + (e.err != Aws::S3::S3Errors::ACCESS_DENIED) && + // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. + // + // AWS unwilling to provide a specific error type for the situation (https://github.com/aws/aws-sdk-cpp/issues/1843) + // so use this hack + (e.exceptionName != "ExpiredToken") + ) throw; } auto now2 = std::chrono::steady_clock::now(); @@ -325,15 +333,22 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore { stats.head++; + // error: AWS error fetching 'vjgpmfn7s6vkynymnk8jfx2fcxnsbd6b.narinfo': Unable to parse ExceptionName: ExpiredToken Message: The provided token has expired. auto res = s3Helper.client->HeadObject( Aws::S3::Model::HeadObjectRequest() .WithBucket(config->bucketName) .WithKey(path)); + printError("Checking for file"); + if (!res.IsSuccess()) { auto & error = res.GetError(); if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND || error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY + // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. + // AWS unwilling to provide a specific error type for the situation (https://github.com/aws/aws-sdk-cpp/issues/1843) + // so use this hack + || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN && error.GetExceptionName() == "ExpiredToken") // If bucket listing is disabled, 404s turn into 403s || error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED) return false; From 22d6969d669d492d4948967f76432c1c6a74efd5 Mon Sep 17 00:00:00 2001 From: m4dc4p Date: Sat, 12 Jul 2025 08:05:52 -0700 Subject: [PATCH 023/382] Update src/libstore/s3-binary-cache-store.cc Co-authored-by: Eelco Dolstra --- src/libstore/s3-binary-cache-store.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index b1cba3358..f3d029c5e 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -333,7 +333,6 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore { stats.head++; - // error: AWS error fetching 'vjgpmfn7s6vkynymnk8jfx2fcxnsbd6b.narinfo': Unable to parse ExceptionName: ExpiredToken Message: The provided token has expired. auto res = s3Helper.client->HeadObject( Aws::S3::Model::HeadObjectRequest() .WithBucket(config->bucketName) From f786c0b8d1ae75a4ecdd83332ec1f8d6da45b0f3 Mon Sep 17 00:00:00 2001 From: m4dc4p Date: Sat, 12 Jul 2025 08:06:09 -0700 Subject: [PATCH 024/382] Update src/libstore/s3-binary-cache-store.cc Co-authored-by: Eelco Dolstra --- src/libstore/s3-binary-cache-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index f3d029c5e..cbb47c063 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -338,8 +338,6 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore .WithBucket(config->bucketName) .WithKey(path)); - printError("Checking for file"); - if (!res.IsSuccess()) { auto & error = res.GetError(); if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND From 5cd94436f526976950fef72c4d856347107162dc Mon Sep 17 00:00:00 2001 From: Emily Date: Fri, 27 Jun 2025 14:42:07 +0100 Subject: [PATCH 025/382] libstore: fix Unix sockets in the build directory on sandboxed macOS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We’re already allowing `/tmp` anyway, so this should be harmless, and it fixes a regression in the default configuration caused by moving the build directories out of `temp-dir`. (For instance, that broke the Lix `guessOrInventPath.sockets` test.) Note that removing `/tmp` breaks quite a few builds, so although it may be a good idea in general it would require work on the Nixpkgs side. Fixes: 749afbbe99fd7b45f828b72628252feba9241362 Change-Id: I6a6a69645f429bc50d4cb24283feda3d3091f534 (This is a cherry-pick of commit d1db3e5fa3faa43b3d2f2e2e843e9cfc1e6e1b71) Lix patch: https://gerrit.lix.systems/c/lix/+/3500 --- src/libstore/unix/build/darwin-derivation-builder.cc | 2 ++ src/libstore/unix/build/sandbox-defaults.sb | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libstore/unix/build/darwin-derivation-builder.cc b/src/libstore/unix/build/darwin-derivation-builder.cc index 5e06dbe55..3985498c1 100644 --- a/src/libstore/unix/build/darwin-derivation-builder.cc +++ b/src/libstore/unix/build/darwin-derivation-builder.cc @@ -160,6 +160,8 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { Strings sandboxArgs; + sandboxArgs.push_back("_NIX_BUILD_TOP"); + sandboxArgs.push_back(tmpDir); sandboxArgs.push_back("_GLOBAL_TMP_DIR"); sandboxArgs.push_back(globalTmpDir); if (drvOptions.allowLocalNetworking) { diff --git a/src/libstore/unix/build/sandbox-defaults.sb b/src/libstore/unix/build/sandbox-defaults.sb index 15cd6daf5..dd6a064c1 100644 --- a/src/libstore/unix/build/sandbox-defaults.sb +++ b/src/libstore/unix/build/sandbox-defaults.sb @@ -29,12 +29,14 @@ R""( ; Allow getpwuid. (allow mach-lookup (global-name "com.apple.system.opendirectoryd.libinfo")) -; Access to /tmp. +; Access to /tmp and the build directory. ; The network-outbound/network-inbound ones are for unix domain sockets, which ; we allow access to in TMPDIR (but if we allow them more broadly, you could in ; theory escape the sandbox) (allow file* process-exec network-outbound network-inbound - (literal "/tmp") (subpath TMPDIR)) + (literal "/tmp") + (subpath TMPDIR) + (subpath (param "_NIX_BUILD_TOP"))) ; Some packages like to read the system version. (allow file-read* From 3e9a100bdf64664bea84aa9eb8ae620945d2651b Mon Sep 17 00:00:00 2001 From: Pol Dellaiera Date: Sun, 13 Jul 2025 22:49:06 +0200 Subject: [PATCH 026/382] docker: set default parameters values --- docker.nix | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/docker.nix b/docker.nix index f59492025..410e4a178 100644 --- a/docker.nix +++ b/docker.nix @@ -1,10 +1,10 @@ { # Core dependencies - pkgs, - lib, - dockerTools, - runCommand, - buildPackages, + pkgs ? import { }, + lib ? pkgs.lib, + dockerTools ? pkgs.dockerTools, + runCommand ? pkgs.runCommand, + buildPackages ? pkgs.buildPackages, # Image configuration name ? "nix", tag ? "latest", @@ -28,24 +28,24 @@ }, Cmd ? [ (lib.getExe bashInteractive) ], # Default Packages - nix, - bashInteractive, - coreutils-full, - gnutar, - gzip, - gnugrep, - which, - curl, - less, - wget, - man, - cacert, - findutils, - iana-etc, - gitMinimal, - openssh, + nix ? pkgs.nix, + bashInteractive ? pkgs.bashInteractive, + coreutils-full ? pkgs.coreutils-full, + gnutar ? pkgs.gnutar, + gzip ? pkgs.gzip, + gnugrep ? pkgs.gnugrep, + which ? pkgs.which, + curl ? pkgs.curl, + less ? pkgs.less, + wget ? pkgs.wget, + man ? pkgs.man, + cacert ? pkgs.cacert, + findutils ? pkgs.findutils, + iana-etc ? pkgs.iana-etc, + gitMinimal ? pkgs.gitMinimal, + openssh ? pkgs.openssh, # Other dependencies - shadow, + shadow ? pkgs.shadow, }: let defaultPkgs = [ From 04f6974d2c47ae3cc44733adb707107a675e2c92 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 13 Jul 2025 15:21:01 +0300 Subject: [PATCH 027/382] ci: Dogfood Nix from master --- .../actions/install-nix-action/action.yaml | 50 +++++++++++++++++++ .github/workflows/ci.yml | 14 ++++-- 2 files changed, 59 insertions(+), 5 deletions(-) create mode 100644 .github/actions/install-nix-action/action.yaml diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml new file mode 100644 index 000000000..28103f589 --- /dev/null +++ b/.github/actions/install-nix-action/action.yaml @@ -0,0 +1,50 @@ +name: "Install Nix" +description: "Helper action for installing Nix with support for dogfooding from master" +inputs: + dogfood: + description: "Whether to use Nix installed from the latest artifact from master branch" + required: true # Be explicit about the fact that we are using unreleased artifacts + extra_nix_config: + description: "Gets appended to `/etc/nix/nix.conf` if passed." + install_url: + description: "URL of the Nix installer" + required: false + default: "https://releases.nixos.org/nix/nix-2.30.1/install" + github_token: + description: "Github token" + required: true +runs: + using: "composite" + steps: + - name: "Download nix install artifact from master" + shell: bash + id: download-nix-installer + if: ${{ inputs.dogfood }} + run: | + RUN_ID=$(gh run list --repo "$DOGFOOD_REPO" --workflow ci.yml --branch master --status success --json databaseId --jq ".[0].databaseId") + + if [ "$RUNNER_OS" == "Linux" ]; then + INSTALLER_ARTIFACT="installer-linux" + elif [ "$RUNNER_OS" == "macOS" ]; then + INSTALLER_ARTIFACT="installer-darwin" + else + echo "::error ::Unsupported RUNNER_OS: $RUNNER_OS" + exit 1 + fi + + INSTALLER_DOWNLOAD_DIR="$GITHUB_WORKSPACE/$INSTALLER_ARTIFACT" + mkdir -p "$INSTALLER_DOWNLOAD_DIR" + + gh run download "$RUN_ID" --repo "$DOGFOOD_REPO" -n "$INSTALLER_ARTIFACT" -D "$INSTALLER_DOWNLOAD_DIR" + echo "installer-path=file://$INSTALLER_DOWNLOAD_DIR" >> "$GITHUB_OUTPUT" + + echo "::notice ::Dogfooding Nix installer from master (https://github.com/$DOGFOOD_REPO/actions/runs/$RUN_ID)" + env: + GH_TOKEN: ${{ inputs.github_token }} + DOGFOOD_REPO: "NixOS/nix" + - uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1 + with: + # Ternary operator in GHA: https://www.github.com/actions/runner/issues/409#issuecomment-752775072 + install_url: ${{ inputs.dogfood && format('{0}/install', steps.download-nix-installer.outputs.installer-path) || inputs.install_url }} + install_options: ${{ inputs.dogfood && format('--tarball-url-prefix {0}', steps.download-nix-installer.outputs.installer-path) || '' }} + extra_nix_config: ${{ inputs.extra_nix_config }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac749bc3f..2531ee020 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,10 +13,13 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v31 + - uses: ./.github/actions/install-nix-action with: - install_url: "https://releases.nixos.org/nix/nix-2.29.1/install" - - run: nix --experimental-features 'nix-command flakes' flake show --all-systems --json + dogfood: true + extra_nix_config: + experimental-features = nix-command flakes + github_token: ${{ secrets.GITHUB_TOKEN }} + - run: nix flake show --all-systems --json tests: strategy: @@ -36,9 +39,10 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v31 + - uses: ./.github/actions/install-nix-action with: - install_url: "https://releases.nixos.org/nix/nix-2.29.1/install" + github_token: ${{ secrets.GITHUB_TOKEN }} + dogfood: true # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: | sandbox = true From 3b3c02160dce1110ed9856aa6234fd37fa5c9347 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 13 Jul 2025 16:05:05 +0300 Subject: [PATCH 028/382] ci: Dogfood nix from master for `vm_tests` and `flake_regressions` This should provide more coverage for the build from master that is being dogfooded. --- .github/workflows/ci.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2531ee020..da6f35907 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -182,7 +182,12 @@ jobs: runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main + - uses: ./.github/actions/install-nix-action + with: + dogfood: true + extra_nix_config: + experimental-features = nix-command flakes + github_token: ${{ secrets.GITHUB_TOKEN }} - uses: DeterminateSystems/magic-nix-cache-action@main - run: | nix build -L \ @@ -208,6 +213,11 @@ jobs: with: repository: NixOS/flake-regressions-data path: flake-regressions/tests - - uses: DeterminateSystems/nix-installer-action@main + - uses: ./.github/actions/install-nix-action + with: + dogfood: true + extra_nix_config: + experimental-features = nix-command flakes + github_token: ${{ secrets.GITHUB_TOKEN }} - uses: DeterminateSystems/magic-nix-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh From 6abc29bba526c426bdaf4477d0740d877ac53294 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 15 Jul 2025 15:17:33 +0200 Subject: [PATCH 029/382] Move boost version check to libutil --- src/libexpr/meson.build | 1 - src/libutil/meson.build | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 43e4b9c98..f5adafae0 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -41,7 +41,6 @@ boost = dependency( 'boost', modules : ['container', 'context'], include_type: 'system', - version: '>=1.82.0' ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. diff --git a/src/libutil/meson.build b/src/libutil/meson.build index f5ad2b1f6..f48c8f3d7 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -59,6 +59,7 @@ boost = dependency( 'boost', modules : ['context', 'coroutine', 'iostreams'], include_type: 'system', + version: '>=1.82.0' ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. From fde606887430344dd91dbbf907b8cc3bad1f2125 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 15 Jul 2025 18:09:06 +0200 Subject: [PATCH 030/382] Improve rendering of ignored exceptions Instead of error (ignored): error: SQLite database '...' is busy we now get error (ignored): SQLite database '...' is busy --- src/libutil/util.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index c9cc80fef..23dafe8c9 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -190,8 +190,10 @@ void ignoreExceptionInDestructor(Verbosity lvl) try { try { throw; + } catch (Error & e) { + printMsg(lvl, ANSI_RED "error (ignored):" ANSI_NORMAL " %s", e.info().msg); } catch (std::exception & e) { - printMsg(lvl, "error (ignored): %1%", e.what()); + printMsg(lvl, ANSI_RED "error (ignored):" ANSI_NORMAL " %s", e.what()); } } catch (...) { } } @@ -202,8 +204,10 @@ void ignoreExceptionExceptInterrupt(Verbosity lvl) throw; } catch (const Interrupted & e) { throw; + } catch (Error & e) { + printMsg(lvl, ANSI_RED "error (ignored):" ANSI_NORMAL " %s", e.info().msg); } catch (std::exception & e) { - printMsg(lvl, "error (ignored): %1%", e.what()); + printMsg(lvl, ANSI_RED "error (ignored):" ANSI_NORMAL " %s", e.what()); } } From 7b2f24d68837b178015020f577a4b3abe2d99b92 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 15 Jul 2025 18:10:07 +0200 Subject: [PATCH 031/382] Improve handleSQLiteBusy() message Closes https://github.com/NixOS/nix/pull/10319. --- src/libstore/sqlite.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 55b967ed6..c3fb1f413 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -250,7 +250,7 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning) if (now > nextWarning) { nextWarning = now + 10; logWarning({ - .msg = HintFmt(e.what()) + .msg = e.info().msg }); } From 8e98f62a6eb4aa868615504ff5294999d5d6108e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 15 Jul 2025 19:49:07 +0200 Subject: [PATCH 032/382] PosixSourceAccessor: Use concurrent_flat_map --- src/libutil/posix-source-accessor.cc | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 2ce7c88e4..7c2d1c296 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -3,7 +3,7 @@ #include "nix/util/signals.hh" #include "nix/util/sync.hh" -#include +#include namespace nix { @@ -90,23 +90,21 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path) std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) { - static SharedSync>> _cache; + using Cache = boost::concurrent_flat_map>; + static Cache cache; // Note: we convert std::filesystem::path to Path because the // former is not hashable on libc++. Path absPath = makeAbsPath(path).string(); - { - auto cache(_cache.readLock()); - auto i = cache->find(absPath); - if (i != cache->end()) return i->second; - } + std::optional res; + cache.cvisit(absPath, [&](auto & x) { res.emplace(x.second); }); + if (res) return *res; auto st = nix::maybeLstat(absPath.c_str()); - auto cache(_cache.lock()); - if (cache->size() >= 16384) cache->clear(); - cache->emplace(absPath, st); + if (cache.size() >= 16384) cache.clear(); + cache.emplace(absPath, st); return st; } From 8e8416387c935f2dec5bd24ceac3e3553cae0d59 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Fri, 11 Jul 2025 09:34:06 -0700 Subject: [PATCH 033/382] Add error message when git returns non-0 for fetch Users have complained that fetchGit is flaky however the culprit is likely that `git fetch` was unable itself to download the repository for whatever reason (i.e. poor network etc..) Nothing was checking the status of `git fetch` and the error message that would eventually surface to the users were that the commit was not found. Add explicit error checking for status code from `git fetch` and return a message earlier on to indicate that the failure was from that point. fixes #10431 --- src/libfetchers/git-utils.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 9fe271fe8..563c2180d 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -545,7 +545,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this append(gitArgs, {"--depth", "1"}); append(gitArgs, {std::string("--"), url, refspec}); - runProgram(RunOptions { + auto [status, output] = runProgram(RunOptions { .program = "git", .lookupPath = true, // FIXME: git stderr messes up our progress indicator, so @@ -554,6 +554,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this .input = {}, .isInteractive = true }); + + if (status > 0) { + throw Error("Failed to fetch git repository %s: %s", url, output); + } } void verifyCommit( From fb6f494d35656556465f05db9f4ee8dbdb1d3bf5 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Fri, 11 Jul 2025 09:39:34 -0700 Subject: [PATCH 034/382] merge stderr to stdout so we can emit it --- src/libfetchers/git-utils.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 563c2180d..d76f6879d 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -552,11 +552,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // we're using --quiet for now. Should process its stderr. .args = gitArgs, .input = {}, + .mergeStderrToStdout = true, .isInteractive = true }); - + if (status > 0) { - throw Error("Failed to fetch git repository %s: %s", url, output); + throw Error("Failed to fetch git repository %s : %s", url, output); } } From a4f548fed1b6fe7c7f1882c3214175d4147ddfe4 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Fri, 11 Jul 2025 13:28:04 -0700 Subject: [PATCH 035/382] Fix FetchGit test --- src/libfetchers/git.cc | 14 +++++++++++-- tests/functional/fetchGit.sh | 39 +++++++++++++++++++----------------- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index cf255c001..88fe2e83d 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -444,7 +444,11 @@ struct GitInputScheme : InputScheme // repo, treat as a remote URI to force a clone. static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing auto url = parseURL(getStrAttr(input.attrs, "url")); - bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git"); + + // Why are we checking for bare repository? + // well if it's a bare repository we want to force a git fetch rather than copying the folder + bool isBareRepository = url.scheme == "file" && pathExists(url.path) && + !pathExists(url.path + "/.git"); // // FIXME: here we turn a possibly relative path into an absolute path. // This allows relative git flake inputs to be resolved against the @@ -462,6 +466,12 @@ struct GitInputScheme : InputScheme "See https://github.com/NixOS/nix/issues/12281 for details.", url); } + + // If we don't check here for the path existence, then we can give libgit2 any directory + // and it will initialize them as git directories. + if (!pathExists(url.path)) { + throw Error("The path '%s' does not exist.", url.path); + } repoInfo.location = std::filesystem::absolute(url.path); } else { if (url.scheme == "file") @@ -599,7 +609,7 @@ struct GitInputScheme : InputScheme ? cacheDir / ref : cacheDir / "refs/heads" / ref; - bool doFetch; + bool doFetch = false; time_t now = time(0); /* If a rev was specified, we need to fetch if it's not in the diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index a41aa35c0..dc5d8f818 100755 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -53,6 +53,27 @@ rm -rf $TEST_HOME/.cache/nix path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") [[ $(cat $path/hello) = world ]] +# Fetch again. This should be cached. +# NOTE: This has to be done before the test case below which tries to pack-refs +# the reason being that the lookup on the cache uses the ref-file `/refs/heads/master` +# which does not exist after packing. +mv $repo ${repo}-tmp +path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") +[[ $path = $path2 ]] + +[[ $(nix eval --impure --expr "(builtins.fetchGit file://$repo).revCount") = 2 ]] +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).rev") = $rev2 ]] +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).shortRev") = ${rev2:0:7} ]] + +# Fetching with a explicit hash should succeed. +path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") +[[ $path = $path2 ]] + +path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath") +[[ $(cat $path2/hello) = utrecht ]] + +mv ${repo}-tmp $repo + # Fetch when the cache has packed-refs # Regression test of #8822 git -C $TEST_HOME/.cache/nix/gitv3/*/ pack-refs --all @@ -83,24 +104,6 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" # But without a hash, it fails. expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' doesn't fetch unlocked input" -# Fetch again. This should be cached. -mv $repo ${repo}-tmp -path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") -[[ $path = $path2 ]] - -[[ $(nix eval --impure --expr "(builtins.fetchGit file://$repo).revCount") = 2 ]] -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).rev") = $rev2 ]] -[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).shortRev") = ${rev2:0:7} ]] - -# Fetching with a explicit hash should succeed. -path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") -[[ $path = $path2 ]] - -path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath") -[[ $(cat $path2/hello) = utrecht ]] - -mv ${repo}-tmp $repo - # Using a clean working tree should produce the same result. path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") [[ $path = $path2 ]] From 196c21c5a0e2f9b3149689ea36789cf0478d893a Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Wed, 16 Jul 2025 21:09:59 -0700 Subject: [PATCH 036/382] Add helpful messages when file:// used as tarball When `file://` is used accidentally in a flake as the source it is expected to be a tarball by default. Add some friendlier error messages to either inform the user this is not in fact a tarball or if it's a git directory, let them know they can use `git+file`. fixes #12935 --- src/libfetchers/tarball.cc | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index b0822cc33..59316eabd 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -111,6 +111,25 @@ static DownloadTarballResult downloadTarball_( const Headers & headers, const std::string & displayPrefix) { + + // Some friendly error messages for common mistakes. + // Namely lets catch when the url is a local file path, but + // it is not in fact a tarball. + if (url.rfind("file://", 0) == 0) { + // Remove "file://" prefix to get the local file path + std::string localPath = url.substr(7); + if (!std::filesystem::exists(localPath)) { + throw Error("tarball '%s' does not exist.", localPath); + } + if (std::filesystem::is_directory(localPath)) { + if (std::filesystem::exists(localPath + "/.git")) { + throw Error( + "tarball '%s' is a git repository, not a tarball. Please use `git+file` as the scheme.", localPath); + } + throw Error("tarball '%s' is a directory, not a file.", localPath); + } + } + Cache::Key cacheKey{"tarball", {{"url", url}}}; auto cached = settings.getCache()->lookupExpired(cacheKey); From 6681933643e4e80617d3fdc1cb2ea2358acc9a19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 12:26:50 +0200 Subject: [PATCH 037/382] Fix Windows header inclusions for clang-tidy Move windows-error.hh includes inside _WIN32 guards to prevent clang-tidy errors when analyzing these files on non-Windows platforms. --- src/libutil/windows/windows-async-pipe.cc | 5 +++-- src/libutil/windows/windows-error.cc | 3 +-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libutil/windows/windows-async-pipe.cc b/src/libutil/windows/windows-async-pipe.cc index d47930a1b..da47c37a8 100644 --- a/src/libutil/windows/windows-async-pipe.cc +++ b/src/libutil/windows/windows-async-pipe.cc @@ -1,7 +1,8 @@ -#include "nix/util/windows-async-pipe.hh" -#include "nix/util/windows-error.hh" + #ifdef _WIN32 +# include "nix/util/windows-async-pipe.hh" +# include "nix/util/windows-error.hh" namespace nix::windows { diff --git a/src/libutil/windows/windows-error.cc b/src/libutil/windows/windows-error.cc index 1e7aff830..0761bdfd5 100644 --- a/src/libutil/windows/windows-error.cc +++ b/src/libutil/windows/windows-error.cc @@ -1,6 +1,5 @@ -#include "nix/util/windows-error.hh" - #ifdef _WIN32 +#include "nix/util/windows-error.hh" #include #define WIN32_LEAN_AND_MEAN #include From f12f96bcbbd5edba64c71e1ea8e45a388490d80f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 11:17:59 +0200 Subject: [PATCH 038/382] Fix virtual method calls during construction in S3BinaryCacheStoreImpl Move init() call from constructor to openStore() method to avoid calling virtual methods during object construction. This prevents undefined behavior when virtual methods are called before the object is fully constructed. --- src/libstore/s3-binary-cache-store.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index cbb47c063..9bb47a010 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -289,8 +289,6 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore , s3Helper(config->profile, config->region, config->scheme, config->endpoint) { diskCache = getNarInfoDiskCache(); - - init(); } std::string getUri() override @@ -597,10 +595,12 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore ref S3BinaryCacheStoreImpl::Config::openStore() const { - return make_ref(ref{ + auto store = make_ref(ref{ // FIXME we shouldn't actually need a mutable config std::const_pointer_cast(shared_from_this()) }); + store->init(); + return store; } static RegisterStoreImplementation regS3BinaryCacheStore; From 44963da7872821983a1b28e79a31c9ea305d0830 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 11:15:51 +0200 Subject: [PATCH 039/382] Fix virtual method calls during construction in LocalBinaryCacheStore Move init() call from constructor to openStore() method to avoid calling virtual methods during object construction. This prevents undefined behavior when virtual methods are called before the object is fully constructed. --- src/libstore/local-binary-cache-store.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index 2f23135fa..03a9bd055 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -39,7 +39,6 @@ struct LocalBinaryCacheStore : , BinaryCacheStore{*config} , config{config} { - init(); } void init() override; @@ -126,10 +125,12 @@ StringSet LocalBinaryCacheStoreConfig::uriSchemes() } ref LocalBinaryCacheStoreConfig::openStore() const { - return make_ref(ref{ + auto store = make_ref(ref{ // FIXME we shouldn't actually need a mutable config std::const_pointer_cast(shared_from_this()) }); + store->init(); + return store; } static RegisterStoreImplementation regLocalBinaryCacheStore; From 6e733b0544e9d9c4d549c4eb71104158bf69455b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 12:38:53 +0200 Subject: [PATCH 040/382] Fix SIZE_MAX undefined warning in fchmodat2-compat.hh Include to ensure SIZE_MAX is defined --- src/libstore/linux/fchmodat2-compat.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/linux/fchmodat2-compat.hh b/src/libstore/linux/fchmodat2-compat.hh index 42b3f3a35..907695c31 100644 --- a/src/libstore/linux/fchmodat2-compat.hh +++ b/src/libstore/linux/fchmodat2-compat.hh @@ -1,4 +1,5 @@ #include "store-config-private.hh" +#include /* * Determine the syscall number for `fchmodat2`. From 4ba3b15a101735b4d75215145b59a12366aafcf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 12:38:21 +0200 Subject: [PATCH 041/382] Fix s3.hh to include public config header The s3.hh public header was incorrectly including store-config-private.hh instead of the public config.hh. Since NIX_WITH_S3_SUPPORT is defined in the public config, this caused clang-tidy to report it as undefined. --- src/libstore/include/nix/store/s3.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index 9c159ba0f..e017b7c6b 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -1,6 +1,6 @@ #pragma once ///@file -#include "store-config-private.hh" +#include "nix/store/config.hh" #if NIX_WITH_S3_SUPPORT #include "nix/util/ref.hh" From 6bf940d6366cb800b402ab070ee0fbcff1a9460f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 11:36:55 +0200 Subject: [PATCH 042/382] Fix clang-tidy uninitialized value warning in derivation-options.cc Make lambda capture explicit to avoid clang-analyzer-core.CallAndMessage warning --- src/libstore/derivation-options.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index f6bac2868..40c4e6d15 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -138,7 +138,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt if (auto maxClosureSize = get(output, "maxClosureSize")) checks.maxClosureSize = maxClosureSize->get(); - auto get_ = [&](const std::string & name) -> std::optional { + auto get_ = [&output = output](const std::string & name) -> std::optional { if (auto i = get(output, name)) { StringSet res; for (auto j = i->begin(); j != i->end(); ++j) { From 3c0cd73418433c85bc6410f646ac41b2a9720a9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 17 Jul 2025 10:45:05 +0200 Subject: [PATCH 043/382] Fix uninitialized field in Attr constructor The default constructor for Attr was not initializing the value pointer, which could lead to undefined behavior when the uninitialized pointer is accessed. This was caught by clang-tidy's UninitializedObject check. This fixes the warning: 1 uninitialized field at the end of the constructor call [clang-analyzer-optin.cplusplus.UninitializedObject] --- src/libexpr/include/nix/expr/attr-set.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 283786f4d..c44e8a6b9 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -23,7 +23,7 @@ struct Attr way we keep Attr size at two words with no wasted space. */ Symbol name; PosIdx pos; - Value * value; + Value * value = nullptr; Attr(Symbol name, Value * value, PosIdx pos = noPos) : name(name), pos(pos), value(value) { }; Attr() { }; From d678b071d69569786db4a4cc8110ee0cd4496e2f Mon Sep 17 00:00:00 2001 From: Oleksandr Knyshuk Date: Thu, 17 Jul 2025 17:26:56 +0200 Subject: [PATCH 044/382] Make nix help shell work by handling aliases properly Previously, `nix help shell` failed with "Nix has no subcommand 'shell'" despite `nix shell --help` working correctly. This happened because the `shell` command is actually an alias for `env shell`, and the help system wasn't resolving aliases when looking up documentation. This patch modifies the `showHelp` function to check for and resolve aliases before generating the manpage name, ensuring that shorthand commands like `shell` get proper help documentation. Closes: #13431 --- src/nix/main.cc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/nix/main.cc b/src/nix/main.cc index 6144f746f..502e04e60 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -212,6 +212,14 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs lowdown. */ static void showHelp(std::vector subcommand, NixArgs & toplevel) { + // Check for aliases if subcommand has exactly one element + if (subcommand.size() == 1) { + auto alias = toplevel.aliases.find(subcommand[0]); + if (alias != toplevel.aliases.end()) { + subcommand = alias->second.replacement; + } + } + auto mdName = subcommand.empty() ? "nix" : fmt("nix3-%s", concatStringsSep("-", subcommand)); evalSettings.restrictEval = false; From cfb8a318853d95d5a5346c1ad5a975c0c07fed26 Mon Sep 17 00:00:00 2001 From: Oleksandr Knyshuk Date: Thu, 17 Jul 2025 21:58:15 +0200 Subject: [PATCH 045/382] Require rsync in nix-manual meson.build Closes: #13313 --- doc/manual/meson.build | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/meson.build b/doc/manual/meson.build index 6fe2374a7..0779cd267 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -8,6 +8,7 @@ nix = find_program('nix', native : true) mdbook = find_program('mdbook', native : true) bash = find_program('bash', native : true) +rsync = find_program('rsync', required: true, native: true) pymod = import('python') python = pymod.find_installation('python3') @@ -84,7 +85,7 @@ manual = custom_target( @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ @0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md sed -e 's|@version@|@3@|g' < @INPUT2@ > @2@/book.toml - rsync -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ + @4@ -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 rm -rf @2@/manual mv @2@/html @2@/manual @@ -94,6 +95,7 @@ manual = custom_target( mdbook.full_path(), meson.current_build_dir(), meson.project_version(), + rsync.full_path(), ), ], input : [ From ee9b57cbf526cddb4800937293bce7f5242b5729 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Tue, 20 May 2025 11:53:03 -0400 Subject: [PATCH 046/382] format.sh: support looping until it is happy --- maintainers/format.sh | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/maintainers/format.sh b/maintainers/format.sh index a2a6d8b41..b2902e6dc 100755 --- a/maintainers/format.sh +++ b/maintainers/format.sh @@ -1,11 +1,16 @@ #!/usr/bin/env bash if ! type -p pre-commit &>/dev/null; then - echo "format.sh: pre-commit not found. Please use \`nix develop\`."; + echo "format.sh: pre-commit not found. Please use \`nix develop -c ./maintainers/format.sh\`."; exit 1; fi; if test -z "$_NIX_PRE_COMMIT_HOOKS_CONFIG"; then - echo "format.sh: _NIX_PRE_COMMIT_HOOKS_CONFIG not set. Please use \`nix develop\`."; + echo "format.sh: _NIX_PRE_COMMIT_HOOKS_CONFIG not set. Please use \`nix develop -c ./maintainers/format.sh\`."; exit 1; fi; -pre-commit run --config "$_NIX_PRE_COMMIT_HOOKS_CONFIG" --all-files + +while ! pre-commit run --config "$_NIX_PRE_COMMIT_HOOKS_CONFIG" --all-files; do + if [ "${1:-}" != "--until-stable" ]; then + exit 1 + fi +done From 6896761d793137195f71c494048970fcf0384583 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Tue, 20 May 2025 12:44:10 -0400 Subject: [PATCH 047/382] Add sed --- packaging/dev-shell.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 8d3fa3852..e01a0ed8f 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -113,6 +113,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ) pkgs.buildPackages.mesonEmulatorHook ++ [ pkgs.buildPackages.cmake + pkgs.buildPackages.gnused pkgs.buildPackages.shellcheck pkgs.buildPackages.changelog-d modular.pre-commit.settings.package From 41bf87ec70eb58f88602c14a22a2df42beba2b7a Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 17 Jul 2025 12:09:33 -0400 Subject: [PATCH 048/382] Update clang-format with fixing namespace coments, and separate definition blocks --- .clang-format | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.clang-format b/.clang-format index 4f191fc18..1aadf2cad 100644 --- a/.clang-format +++ b/.clang-format @@ -8,7 +8,7 @@ BraceWrapping: AfterUnion: true SplitEmptyRecord: false PointerAlignment: Middle -FixNamespaceComments: false +FixNamespaceComments: true SortIncludes: Never #IndentPPDirectives: BeforeHash SpaceAfterCStyleCast: true @@ -32,3 +32,4 @@ IndentPPDirectives: AfterHash PPIndentWidth: 2 BinPackArguments: false BreakBeforeTernaryOperators: true +SeparateDefinitionBlocks: Always From e7af2e6566bcac97c32c3547a8821b3c2ba178e2 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 17 Jul 2025 11:07:01 -0400 Subject: [PATCH 049/382] Drop a ton of files that should just get formatted --- maintainers/flake-module.nix | 461 ----------------------------------- 1 file changed, 461 deletions(-) diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 1058d6334..ee9a8bdad 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -189,467 +189,6 @@ # Don't format vendored code ''^doc/manual/redirects\.js$'' ''^doc/manual/theme/highlight\.js$'' - - # We haven't applied formatting to these files yet - ''^doc/manual/redirects\.js$'' - ''^doc/manual/theme/highlight\.js$'' - ''^src/build-remote/build-remote\.cc$'' - ''^src/libcmd/built-path\.cc$'' - ''^src/libcmd/include/nix/cmd/built-path\.hh$'' - ''^src/libcmd/common-eval-args\.cc$'' - ''^src/libcmd/include/nix/cmd/common-eval-args\.hh$'' - ''^src/libcmd/editor-for\.cc$'' - ''^src/libcmd/installable-attr-path\.cc$'' - ''^src/libcmd/include/nix/cmd/installable-attr-path\.hh$'' - ''^src/libcmd/installable-derived-path\.cc$'' - ''^src/libcmd/include/nix/cmd/installable-derived-path\.hh$'' - ''^src/libcmd/installable-flake\.cc$'' - ''^src/libcmd/include/nix/cmd/installable-flake\.hh$'' - ''^src/libcmd/installable-value\.cc$'' - ''^src/libcmd/include/nix/cmd/installable-value\.hh$'' - ''^src/libcmd/installables\.cc$'' - ''^src/libcmd/include/nix/cmd/installables\.hh$'' - ''^src/libcmd/include/nix/cmd/legacy\.hh$'' - ''^src/libcmd/markdown\.cc$'' - ''^src/libcmd/misc-store-flags\.cc$'' - ''^src/libcmd/repl-interacter\.cc$'' - ''^src/libcmd/include/nix/cmd/repl-interacter\.hh$'' - ''^src/libcmd/repl\.cc$'' - ''^src/libcmd/include/nix/cmd/repl\.hh$'' - ''^src/libexpr-c/nix_api_expr\.cc$'' - ''^src/libexpr-c/nix_api_external\.cc$'' - ''^src/libexpr/attr-path\.cc$'' - ''^src/libexpr/include/nix/expr/attr-path\.hh$'' - ''^src/libexpr/attr-set\.cc$'' - ''^src/libexpr/include/nix/expr/attr-set\.hh$'' - ''^src/libexpr/eval-cache\.cc$'' - ''^src/libexpr/include/nix/expr/eval-cache\.hh$'' - ''^src/libexpr/eval-error\.cc$'' - ''^src/libexpr/include/nix/expr/eval-inline\.hh$'' - ''^src/libexpr/eval-settings\.cc$'' - ''^src/libexpr/include/nix/expr/eval-settings\.hh$'' - ''^src/libexpr/eval\.cc$'' - ''^src/libexpr/include/nix/expr/eval\.hh$'' - ''^src/libexpr/function-trace\.cc$'' - ''^src/libexpr/include/nix/expr/gc-small-vector\.hh$'' - ''^src/libexpr/get-drvs\.cc$'' - ''^src/libexpr/include/nix/expr/get-drvs\.hh$'' - ''^src/libexpr/json-to-value\.cc$'' - ''^src/libexpr/nixexpr\.cc$'' - ''^src/libexpr/include/nix/expr/nixexpr\.hh$'' - ''^src/libexpr/include/nix/expr/parser-state\.hh$'' - ''^src/libexpr/primops\.cc$'' - ''^src/libexpr/include/nix/expr/primops\.hh$'' - ''^src/libexpr/primops/context\.cc$'' - ''^src/libexpr/primops/fetchClosure\.cc$'' - ''^src/libexpr/primops/fetchMercurial\.cc$'' - ''^src/libexpr/primops/fetchTree\.cc$'' - ''^src/libexpr/primops/fromTOML\.cc$'' - ''^src/libexpr/print-ambiguous\.cc$'' - ''^src/libexpr/include/nix/expr/print-ambiguous\.hh$'' - ''^src/libexpr/include/nix/expr/print-options\.hh$'' - ''^src/libexpr/print\.cc$'' - ''^src/libexpr/include/nix/expr/print\.hh$'' - ''^src/libexpr/search-path\.cc$'' - ''^src/libexpr/include/nix/expr/symbol-table\.hh$'' - ''^src/libexpr/value-to-json\.cc$'' - ''^src/libexpr/include/nix/expr/value-to-json\.hh$'' - ''^src/libexpr/value-to-xml\.cc$'' - ''^src/libexpr/include/nix/expr/value-to-xml\.hh$'' - ''^src/libexpr/value/context\.cc$'' - ''^src/libexpr/include/nix/expr/value/context\.hh$'' - ''^src/libfetchers/attrs\.cc$'' - ''^src/libfetchers/cache\.cc$'' - ''^src/libfetchers/include/nix/fetchers/cache\.hh$'' - ''^src/libfetchers/fetch-settings\.cc$'' - ''^src/libfetchers/include/nix/fetchers/fetch-settings\.hh$'' - ''^src/libfetchers/fetch-to-store\.cc$'' - ''^src/libfetchers/fetchers\.cc$'' - ''^src/libfetchers/include/nix/fetchers/fetchers\.hh$'' - ''^src/libfetchers/filtering-source-accessor\.cc$'' - ''^src/libfetchers/include/nix/fetchers/filtering-source-accessor\.hh$'' - ''^src/libfetchers/fs-source-accessor\.cc$'' - ''^src/libfetchers/include/nix/fs-source-accessor\.hh$'' - ''^src/libfetchers/git-utils\.cc$'' - ''^src/libfetchers/include/nix/fetchers/git-utils\.hh$'' - ''^src/libfetchers/github\.cc$'' - ''^src/libfetchers/indirect\.cc$'' - ''^src/libfetchers/memory-source-accessor\.cc$'' - ''^src/libfetchers/path\.cc$'' - ''^src/libfetchers/registry\.cc$'' - ''^src/libfetchers/include/nix/fetchers/registry\.hh$'' - ''^src/libfetchers/tarball\.cc$'' - ''^src/libfetchers/include/nix/fetchers/tarball\.hh$'' - ''^src/libfetchers/git\.cc$'' - ''^src/libfetchers/mercurial\.cc$'' - ''^src/libflake/config\.cc$'' - ''^src/libflake/flake\.cc$'' - ''^src/libflake/include/nix/flake/flake\.hh$'' - ''^src/libflake/flakeref\.cc$'' - ''^src/libflake/include/nix/flake/flakeref\.hh$'' - ''^src/libflake/lockfile\.cc$'' - ''^src/libflake/include/nix/flake/lockfile\.hh$'' - ''^src/libflake/url-name\.cc$'' - ''^src/libmain/common-args\.cc$'' - ''^src/libmain/include/nix/main/common-args\.hh$'' - ''^src/libmain/loggers\.cc$'' - ''^src/libmain/include/nix/main/loggers\.hh$'' - ''^src/libmain/progress-bar\.cc$'' - ''^src/libmain/shared\.cc$'' - ''^src/libmain/include/nix/main/shared\.hh$'' - ''^src/libmain/unix/stack\.cc$'' - ''^src/libstore/binary-cache-store\.cc$'' - ''^src/libstore/include/nix/store/binary-cache-store\.hh$'' - ''^src/libstore/include/nix/store/build-result\.hh$'' - ''^src/libstore/include/nix/store/builtins\.hh$'' - ''^src/libstore/builtins/buildenv\.cc$'' - ''^src/libstore/include/nix/store/builtins/buildenv\.hh$'' - ''^src/libstore/include/nix/store/common-protocol-impl\.hh$'' - ''^src/libstore/common-protocol\.cc$'' - ''^src/libstore/include/nix/store/common-protocol\.hh$'' - ''^src/libstore/include/nix/store/common-ssh-store-config\.hh$'' - ''^src/libstore/content-address\.cc$'' - ''^src/libstore/include/nix/store/content-address\.hh$'' - ''^src/libstore/daemon\.cc$'' - ''^src/libstore/include/nix/store/daemon\.hh$'' - ''^src/libstore/derivations\.cc$'' - ''^src/libstore/include/nix/store/derivations\.hh$'' - ''^src/libstore/derived-path-map\.cc$'' - ''^src/libstore/include/nix/store/derived-path-map\.hh$'' - ''^src/libstore/derived-path\.cc$'' - ''^src/libstore/include/nix/store/derived-path\.hh$'' - ''^src/libstore/downstream-placeholder\.cc$'' - ''^src/libstore/include/nix/store/downstream-placeholder\.hh$'' - ''^src/libstore/dummy-store\.cc$'' - ''^src/libstore/export-import\.cc$'' - ''^src/libstore/filetransfer\.cc$'' - ''^src/libstore/include/nix/store/filetransfer\.hh$'' - ''^src/libstore/include/nix/store/gc-store\.hh$'' - ''^src/libstore/globals\.cc$'' - ''^src/libstore/include/nix/store/globals\.hh$'' - ''^src/libstore/http-binary-cache-store\.cc$'' - ''^src/libstore/legacy-ssh-store\.cc$'' - ''^src/libstore/include/nix/store/legacy-ssh-store\.hh$'' - ''^src/libstore/include/nix/store/length-prefixed-protocol-helper\.hh$'' - ''^src/libstore/linux/personality\.cc$'' - ''^src/libstore/linux/include/nix/store/personality\.hh$'' - ''^src/libstore/local-binary-cache-store\.cc$'' - ''^src/libstore/local-fs-store\.cc$'' - ''^src/libstore/include/nix/store/local-fs-store\.hh$'' - ''^src/libstore/log-store\.cc$'' - ''^src/libstore/include/nix/store/log-store\.hh$'' - ''^src/libstore/machines\.cc$'' - ''^src/libstore/include/nix/store/machines\.hh$'' - ''^src/libstore/make-content-addressed\.cc$'' - ''^src/libstore/include/nix/store/make-content-addressed\.hh$'' - ''^src/libstore/misc\.cc$'' - ''^src/libstore/names\.cc$'' - ''^src/libstore/include/nix/store/names\.hh$'' - ''^src/libstore/nar-accessor\.cc$'' - ''^src/libstore/include/nix/store/nar-accessor\.hh$'' - ''^src/libstore/nar-info-disk-cache\.cc$'' - ''^src/libstore/include/nix/store/nar-info-disk-cache\.hh$'' - ''^src/libstore/nar-info\.cc$'' - ''^src/libstore/include/nix/store/nar-info\.hh$'' - ''^src/libstore/outputs-spec\.cc$'' - ''^src/libstore/include/nix/store/outputs-spec\.hh$'' - ''^src/libstore/parsed-derivations\.cc$'' - ''^src/libstore/path-info\.cc$'' - ''^src/libstore/include/nix/store/path-info\.hh$'' - ''^src/libstore/path-references\.cc$'' - ''^src/libstore/include/nix/store/path-regex\.hh$'' - ''^src/libstore/path-with-outputs\.cc$'' - ''^src/libstore/path\.cc$'' - ''^src/libstore/include/nix/store/path\.hh$'' - ''^src/libstore/pathlocks\.cc$'' - ''^src/libstore/include/nix/store/pathlocks\.hh$'' - ''^src/libstore/profiles\.cc$'' - ''^src/libstore/include/nix/store/profiles\.hh$'' - ''^src/libstore/realisation\.cc$'' - ''^src/libstore/include/nix/store/realisation\.hh$'' - ''^src/libstore/remote-fs-accessor\.cc$'' - ''^src/libstore/include/nix/store/remote-fs-accessor\.hh$'' - ''^src/libstore/include/nix/store/remote-store-connection\.hh$'' - ''^src/libstore/remote-store\.cc$'' - ''^src/libstore/include/nix/store/remote-store\.hh$'' - ''^src/libstore/s3-binary-cache-store\.cc$'' - ''^src/libstore/include/nix/store/s3\.hh$'' - ''^src/libstore/serve-protocol-impl\.cc$'' - ''^src/libstore/include/nix/store/serve-protocol-impl\.hh$'' - ''^src/libstore/serve-protocol\.cc$'' - ''^src/libstore/include/nix/store/serve-protocol\.hh$'' - ''^src/libstore/sqlite\.cc$'' - ''^src/libstore/include/nix/store/sqlite\.hh$'' - ''^src/libstore/ssh-store\.cc$'' - ''^src/libstore/ssh\.cc$'' - ''^src/libstore/include/nix/store/ssh\.hh$'' - ''^src/libstore/store-api\.cc$'' - ''^src/libstore/include/nix/store/store-api\.hh$'' - ''^src/libstore/include/nix/store/store-dir-config\.hh$'' - ''^src/libstore/build/derivation-building-goal\.cc$'' - ''^src/libstore/include/nix/store/build/derivation-building-goal\.hh$'' - ''^src/libstore/build/derivation-goal\.cc$'' - ''^src/libstore/include/nix/store/build/derivation-goal\.hh$'' - ''^src/libstore/build/drv-output-substitution-goal\.cc$'' - ''^src/libstore/include/nix/store/build/drv-output-substitution-goal\.hh$'' - ''^src/libstore/build/entry-points\.cc$'' - ''^src/libstore/build/goal\.cc$'' - ''^src/libstore/include/nix/store/build/goal\.hh$'' - ''^src/libstore/unix/build/hook-instance\.cc$'' - ''^src/libstore/unix/build/derivation-builder\.cc$'' - ''^src/libstore/unix/include/nix/store/build/derivation-builder\.hh$'' - ''^src/libstore/build/substitution-goal\.cc$'' - ''^src/libstore/include/nix/store/build/substitution-goal\.hh$'' - ''^src/libstore/build/worker\.cc$'' - ''^src/libstore/include/nix/store/build/worker\.hh$'' - ''^src/libstore/builtins/fetchurl\.cc$'' - ''^src/libstore/builtins/unpack-channel\.cc$'' - ''^src/libstore/gc\.cc$'' - ''^src/libstore/local-overlay-store\.cc$'' - ''^src/libstore/include/nix/store/local-overlay-store\.hh$'' - ''^src/libstore/local-store\.cc$'' - ''^src/libstore/include/nix/store/local-store\.hh$'' - ''^src/libstore/unix/user-lock\.cc$'' - ''^src/libstore/unix/include/nix/store/user-lock\.hh$'' - ''^src/libstore/optimise-store\.cc$'' - ''^src/libstore/unix/pathlocks\.cc$'' - ''^src/libstore/posix-fs-canonicalise\.cc$'' - ''^src/libstore/include/nix/store/posix-fs-canonicalise\.hh$'' - ''^src/libstore/uds-remote-store\.cc$'' - ''^src/libstore/include/nix/store/uds-remote-store\.hh$'' - ''^src/libstore/windows/build\.cc$'' - ''^src/libstore/include/nix/store/worker-protocol-impl\.hh$'' - ''^src/libstore/worker-protocol\.cc$'' - ''^src/libstore/include/nix/store/worker-protocol\.hh$'' - ''^src/libutil-c/nix_api_util_internal\.h$'' - ''^src/libutil/archive\.cc$'' - ''^src/libutil/include/nix/util/archive\.hh$'' - ''^src/libutil/args\.cc$'' - ''^src/libutil/include/nix/util/args\.hh$'' - ''^src/libutil/include/nix/util/args/root\.hh$'' - ''^src/libutil/include/nix/util/callback\.hh$'' - ''^src/libutil/canon-path\.cc$'' - ''^src/libutil/include/nix/util/canon-path\.hh$'' - ''^src/libutil/include/nix/util/chunked-vector\.hh$'' - ''^src/libutil/include/nix/util/closure\.hh$'' - ''^src/libutil/include/nix/util/comparator\.hh$'' - ''^src/libutil/compute-levels\.cc$'' - ''^src/libutil/include/nix/util/config-impl\.hh$'' - ''^src/libutil/configuration\.cc$'' - ''^src/libutil/include/nix/util/configuration\.hh$'' - ''^src/libutil/current-process\.cc$'' - ''^src/libutil/include/nix/util/current-process\.hh$'' - ''^src/libutil/english\.cc$'' - ''^src/libutil/include/nix/util/english\.hh$'' - ''^src/libutil/error\.cc$'' - ''^src/libutil/include/nix/util/error\.hh$'' - ''^src/libutil/include/nix/util/exit\.hh$'' - ''^src/libutil/experimental-features\.cc$'' - ''^src/libutil/include/nix/util/experimental-features\.hh$'' - ''^src/libutil/file-content-address\.cc$'' - ''^src/libutil/include/nix/util/file-content-address\.hh$'' - ''^src/libutil/file-descriptor\.cc$'' - ''^src/libutil/include/nix/util/file-descriptor\.hh$'' - ''^src/libutil/include/nix/util/file-path-impl\.hh$'' - ''^src/libutil/include/nix/util/file-path\.hh$'' - ''^src/libutil/file-system\.cc$'' - ''^src/libutil/include/nix/util/file-system\.hh$'' - ''^src/libutil/include/nix/util/finally\.hh$'' - ''^src/libutil/include/nix/util/fmt\.hh$'' - ''^src/libutil/fs-sink\.cc$'' - ''^src/libutil/include/nix/util/fs-sink\.hh$'' - ''^src/libutil/git\.cc$'' - ''^src/libutil/include/nix/util/git\.hh$'' - ''^src/libutil/hash\.cc$'' - ''^src/libutil/include/nix/util/hash\.hh$'' - ''^src/libutil/hilite\.cc$'' - ''^src/libutil/include/nix/util/hilite\.hh$'' - ''^src/libutil/source-accessor\.hh$'' - ''^src/libutil/include/nix/util/json-impls\.hh$'' - ''^src/libutil/json-utils\.cc$'' - ''^src/libutil/include/nix/util/json-utils\.hh$'' - ''^src/libutil/linux/cgroup\.cc$'' - ''^src/libutil/linux/linux-namespaces\.cc$'' - ''^src/libutil/logging\.cc$'' - ''^src/libutil/include/nix/util/logging\.hh$'' - ''^src/libutil/memory-source-accessor\.cc$'' - ''^src/libutil/include/nix/util/memory-source-accessor\.hh$'' - ''^src/libutil/include/nix/util/pool\.hh$'' - ''^src/libutil/position\.cc$'' - ''^src/libutil/include/nix/util/position\.hh$'' - ''^src/libutil/posix-source-accessor\.cc$'' - ''^src/libutil/include/nix/util/posix-source-accessor\.hh$'' - ''^src/libutil/include/nix/util/processes\.hh$'' - ''^src/libutil/include/nix/util/ref\.hh$'' - ''^src/libutil/references\.cc$'' - ''^src/libutil/include/nix/util/references\.hh$'' - ''^src/libutil/regex-combinators\.hh$'' - ''^src/libutil/serialise\.cc$'' - ''^src/libutil/include/nix/util/serialise\.hh$'' - ''^src/libutil/include/nix/util/signals\.hh$'' - ''^src/libutil/signature/local-keys\.cc$'' - ''^src/libutil/include/nix/util/signature/local-keys\.hh$'' - ''^src/libutil/signature/signer\.cc$'' - ''^src/libutil/include/nix/util/signature/signer\.hh$'' - ''^src/libutil/source-accessor\.cc$'' - ''^src/libutil/include/nix/util/source-accessor\.hh$'' - ''^src/libutil/source-path\.cc$'' - ''^src/libutil/include/nix/util/source-path\.hh$'' - ''^src/libutil/include/nix/util/split\.hh$'' - ''^src/libutil/suggestions\.cc$'' - ''^src/libutil/include/nix/util/suggestions\.hh$'' - ''^src/libutil/include/nix/util/sync\.hh$'' - ''^src/libutil/terminal\.cc$'' - ''^src/libutil/include/nix/util/terminal\.hh$'' - ''^src/libutil/thread-pool\.cc$'' - ''^src/libutil/include/nix/util/thread-pool\.hh$'' - ''^src/libutil/include/nix/util/topo-sort\.hh$'' - ''^src/libutil/include/nix/util/types\.hh$'' - ''^src/libutil/unix/file-descriptor\.cc$'' - ''^src/libutil/unix/file-path\.cc$'' - ''^src/libutil/unix/processes\.cc$'' - ''^src/libutil/unix/include/nix/util/signals-impl\.hh$'' - ''^src/libutil/unix/signals\.cc$'' - ''^src/libutil/unix-domain-socket\.cc$'' - ''^src/libutil/unix/users\.cc$'' - ''^src/libutil/include/nix/util/url-parts\.hh$'' - ''^src/libutil/url\.cc$'' - ''^src/libutil/include/nix/util/url\.hh$'' - ''^src/libutil/users\.cc$'' - ''^src/libutil/include/nix/util/users\.hh$'' - ''^src/libutil/util\.cc$'' - ''^src/libutil/include/nix/util/util\.hh$'' - ''^src/libutil/include/nix/util/variant-wrapper\.hh$'' - ''^src/libutil/widecharwidth/widechar_width\.h$'' # vendored source - ''^src/libutil/windows/file-descriptor\.cc$'' - ''^src/libutil/windows/file-path\.cc$'' - ''^src/libutil/windows/processes\.cc$'' - ''^src/libutil/windows/users\.cc$'' - ''^src/libutil/windows/windows-error\.cc$'' - ''^src/libutil/windows/include/nix/util/windows-error\.hh$'' - ''^src/libutil/xml-writer\.cc$'' - ''^src/libutil/include/nix/util/xml-writer\.hh$'' - ''^src/nix-build/nix-build\.cc$'' - ''^src/nix-channel/nix-channel\.cc$'' - ''^src/nix-collect-garbage/nix-collect-garbage\.cc$'' - ''^src/nix-env/buildenv.nix$'' - ''^src/nix-env/nix-env\.cc$'' - ''^src/nix-env/user-env\.cc$'' - ''^src/nix-env/user-env\.hh$'' - ''^src/nix-instantiate/nix-instantiate\.cc$'' - ''^src/nix-store/dotgraph\.cc$'' - ''^src/nix-store/graphml\.cc$'' - ''^src/nix-store/nix-store\.cc$'' - ''^src/nix/add-to-store\.cc$'' - ''^src/nix/app\.cc$'' - ''^src/nix/build\.cc$'' - ''^src/nix/bundle\.cc$'' - ''^src/nix/cat\.cc$'' - ''^src/nix/config-check\.cc$'' - ''^src/nix/config\.cc$'' - ''^src/nix/copy\.cc$'' - ''^src/nix/derivation-add\.cc$'' - ''^src/nix/derivation-show\.cc$'' - ''^src/nix/derivation\.cc$'' - ''^src/nix/develop\.cc$'' - ''^src/nix/diff-closures\.cc$'' - ''^src/nix/dump-path\.cc$'' - ''^src/nix/edit\.cc$'' - ''^src/nix/eval\.cc$'' - ''^src/nix/flake\.cc$'' - ''^src/nix/fmt\.cc$'' - ''^src/nix/hash\.cc$'' - ''^src/nix/log\.cc$'' - ''^src/nix/ls\.cc$'' - ''^src/nix/main\.cc$'' - ''^src/nix/make-content-addressed\.cc$'' - ''^src/nix/nar\.cc$'' - ''^src/nix/optimise-store\.cc$'' - ''^src/nix/path-from-hash-part\.cc$'' - ''^src/nix/path-info\.cc$'' - ''^src/nix/prefetch\.cc$'' - ''^src/nix/profile\.cc$'' - ''^src/nix/realisation\.cc$'' - ''^src/nix/registry\.cc$'' - ''^src/nix/repl\.cc$'' - ''^src/nix/run\.cc$'' - ''^src/nix/run\.hh$'' - ''^src/nix/search\.cc$'' - ''^src/nix/sigs\.cc$'' - ''^src/nix/store-copy-log\.cc$'' - ''^src/nix/store-delete\.cc$'' - ''^src/nix/store-gc\.cc$'' - ''^src/nix/store-info\.cc$'' - ''^src/nix/store-repair\.cc$'' - ''^src/nix/store\.cc$'' - ''^src/nix/unix/daemon\.cc$'' - ''^src/nix/upgrade-nix\.cc$'' - ''^src/nix/verify\.cc$'' - ''^src/nix/why-depends\.cc$'' - - ''^tests/functional/plugins/plugintest\.cc'' - ''^tests/functional/test-libstoreconsumer/main\.cc'' - ''^tests/nixos/ca-fd-leak/sender\.c'' - ''^tests/nixos/ca-fd-leak/smuggler\.c'' - ''^tests/nixos/user-sandboxing/attacker\.c'' - ''^src/libexpr-test-support/include/nix/expr/tests/libexpr\.hh'' - ''^src/libexpr-test-support/tests/value/context\.cc'' - ''^src/libexpr-test-support/include/nix/expr/tests/value/context\.hh'' - ''^src/libexpr-tests/derived-path\.cc'' - ''^src/libexpr-tests/error_traces\.cc'' - ''^src/libexpr-tests/eval\.cc'' - ''^src/libexpr-tests/json\.cc'' - ''^src/libexpr-tests/main\.cc'' - ''^src/libexpr-tests/primops\.cc'' - ''^src/libexpr-tests/search-path\.cc'' - ''^src/libexpr-tests/trivial\.cc'' - ''^src/libexpr-tests/value/context\.cc'' - ''^src/libexpr-tests/value/print\.cc'' - ''^src/libfetchers-tests/public-key\.cc'' - ''^src/libflake-tests/flakeref\.cc'' - ''^src/libflake-tests/url-name\.cc'' - ''^src/libstore-test-support/tests/derived-path\.cc'' - ''^src/libstore-test-support/include/nix/store/tests/derived-path\.hh'' - ''^src/libstore-test-support/include/nix/store/tests/nix_api_store\.hh'' - ''^src/libstore-test-support/tests/outputs-spec\.cc'' - ''^src/libstore-test-support/include/nix/store/tests/outputs-spec\.hh'' - ''^src/libstore-test-support/path\.cc'' - ''^src/libstore-test-support/include/nix/store/tests/path\.hh'' - ''^src/libstore-test-support/include/nix/store/tests/protocol\.hh'' - ''^src/libstore-tests/common-protocol\.cc'' - ''^src/libstore-tests/content-address\.cc'' - ''^src/libstore-tests/derivation\.cc'' - ''^src/libstore-tests/derived-path\.cc'' - ''^src/libstore-tests/downstream-placeholder\.cc'' - ''^src/libstore-tests/machines\.cc'' - ''^src/libstore-tests/nar-info-disk-cache\.cc'' - ''^src/libstore-tests/nar-info\.cc'' - ''^src/libstore-tests/outputs-spec\.cc'' - ''^src/libstore-tests/path-info\.cc'' - ''^src/libstore-tests/path\.cc'' - ''^src/libstore-tests/serve-protocol\.cc'' - ''^src/libstore-tests/worker-protocol\.cc'' - ''^src/libutil-test-support/include/nix/util/tests/characterization\.hh'' - ''^src/libutil-test-support/hash\.cc'' - ''^src/libutil-test-support/include/nix/util/tests/hash\.hh'' - ''^src/libutil-tests/args\.cc'' - ''^src/libutil-tests/canon-path\.cc'' - ''^src/libutil-tests/chunked-vector\.cc'' - ''^src/libutil-tests/closure\.cc'' - ''^src/libutil-tests/compression\.cc'' - ''^src/libutil-tests/config\.cc'' - ''^src/libutil-tests/file-content-address\.cc'' - ''^src/libutil-tests/git\.cc'' - ''^src/libutil-tests/hash\.cc'' - ''^src/libutil-tests/hilite\.cc'' - ''^src/libutil-tests/json-utils\.cc'' - ''^src/libutil-tests/logging\.cc'' - ''^src/libutil-tests/lru-cache\.cc'' - ''^src/libutil-tests/pool\.cc'' - ''^src/libutil-tests/references\.cc'' - ''^src/libutil-tests/suggestions\.cc'' - ''^src/libutil-tests/url\.cc'' - ''^src/libutil-tests/xml-writer\.cc'' ]; }; shellcheck = { From e4f62e46088919428a68bd8014201dc8e379fed7 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 18 Jul 2025 12:47:27 -0400 Subject: [PATCH 050/382] Apply clang-format universally. * It is tough to contribute to a project that doesn't use a formatter, * It is extra hard to contribute to a project which has configured the formatter, but ignores it for some files * Code formatting makes it harder to hide obscure / weird bugs by accident or on purpose, Let's rip the bandaid off? Note that PRs currently in flight should be able to be merged relatively easily by applying `clang-format` to their tip prior to merge. --- src/build-remote/build-remote.cc | 83 +- src/libcmd/built-path.cc | 64 +- src/libcmd/command-installable-value.cc | 2 +- src/libcmd/command.cc | 2 +- src/libcmd/common-eval-args.cc | 83 +- src/libcmd/editor-for.cc | 10 +- src/libcmd/include/nix/cmd/built-path.hh | 51 +- .../nix/cmd/command-installable-value.hh | 2 +- .../include/nix/cmd/common-eval-args.hh | 31 +- .../include/nix/cmd/compatibility-settings.hh | 2 +- src/libcmd/include/nix/cmd/editor-for.hh | 2 +- .../include/nix/cmd/installable-attr-path.hh | 7 +- .../nix/cmd/installable-derived-path.hh | 14 +- .../include/nix/cmd/installable-flake.hh | 25 +- .../include/nix/cmd/installable-value.hh | 29 +- src/libcmd/include/nix/cmd/installables.hh | 34 +- src/libcmd/include/nix/cmd/legacy.hh | 7 +- src/libcmd/include/nix/cmd/markdown.hh | 2 +- .../include/nix/cmd/misc-store-flags.hh | 6 +- src/libcmd/include/nix/cmd/network-proxy.hh | 2 +- src/libcmd/include/nix/cmd/repl-interacter.hh | 10 +- src/libcmd/include/nix/cmd/repl.hh | 14 +- src/libcmd/installable-attr-path.cc | 55 +- src/libcmd/installable-derived-path.cc | 50 +- src/libcmd/installable-flake.cc | 101 +- src/libcmd/installable-value.cc | 29 +- src/libcmd/installables.cc | 319 +- src/libcmd/markdown.cc | 17 +- src/libcmd/misc-store-flags.cc | 78 +- src/libcmd/network-proxy.cc | 2 +- src/libcmd/repl-interacter.cc | 13 +- src/libcmd/repl.cc | 283 +- src/libexpr-c/nix_api_expr.cc | 32 +- src/libexpr-c/nix_api_external.cc | 14 +- .../include/nix/expr/tests/libexpr.hh | 254 +- .../include/nix/expr/tests/nix_api_expr.hh | 3 +- .../include/nix/expr/tests/value/context.hh | 14 +- .../tests/value/context.cc | 2 +- src/libexpr-tests/derived-path.cc | 22 +- src/libexpr-tests/error_traces.cc | 2684 ++++++++--------- src/libexpr-tests/eval.cc | 39 +- src/libexpr-tests/json.cc | 114 +- src/libexpr-tests/main.cc | 18 +- src/libexpr-tests/nix_api_expr.cc | 1 + src/libexpr-tests/nix_api_external.cc | 3 +- src/libexpr-tests/nix_api_value.cc | 3 +- src/libexpr-tests/primops.cc | 1575 +++++----- src/libexpr-tests/search-path.cc | 96 +- src/libexpr-tests/trivial.cc | 471 +-- src/libexpr-tests/value/context.cc | 79 +- src/libexpr-tests/value/print.cc | 331 +- src/libexpr/attr-path.cc | 52 +- src/libexpr/attr-set.cc | 13 +- src/libexpr/eval-cache.cc | 283 +- src/libexpr/eval-error.cc | 18 +- src/libexpr/eval-profiler-settings.cc | 2 +- src/libexpr/eval-profiler.cc | 2 +- src/libexpr/eval-settings.cc | 25 +- src/libexpr/eval.cc | 1169 ++++--- src/libexpr/function-trace.cc | 2 +- src/libexpr/get-drvs.cc | 206 +- src/libexpr/include/nix/expr/attr-path.hh | 9 +- src/libexpr/include/nix/expr/attr-set.hh | 71 +- src/libexpr/include/nix/expr/eval-cache.hh | 33 +- src/libexpr/include/nix/expr/eval-error.hh | 3 +- src/libexpr/include/nix/expr/eval-inline.hh | 43 +- .../nix/expr/eval-profiler-settings.hh | 2 +- src/libexpr/include/nix/expr/eval-profiler.hh | 2 +- src/libexpr/include/nix/expr/eval-settings.hh | 96 +- src/libexpr/include/nix/expr/eval.hh | 178 +- .../include/nix/expr/function-trace.hh | 2 +- .../include/nix/expr/gc-small-vector.hh | 8 +- src/libexpr/include/nix/expr/get-drvs.hh | 50 +- src/libexpr/include/nix/expr/json-to-value.hh | 2 +- src/libexpr/include/nix/expr/nixexpr.hh | 391 ++- src/libexpr/include/nix/expr/parser-state.hh | 96 +- src/libexpr/include/nix/expr/primops.hh | 6 +- .../include/nix/expr/print-ambiguous.hh | 8 +- src/libexpr/include/nix/expr/print-options.hh | 4 +- src/libexpr/include/nix/expr/print.hh | 26 +- .../include/nix/expr/repl-exit-status.hh | 2 +- src/libexpr/include/nix/expr/search-path.hh | 2 +- src/libexpr/include/nix/expr/symbol-table.hh | 47 +- src/libexpr/include/nix/expr/value-to-json.hh | 17 +- src/libexpr/include/nix/expr/value-to-xml.hh | 10 +- src/libexpr/include/nix/expr/value.hh | 9 +- src/libexpr/include/nix/expr/value/context.hh | 21 +- src/libexpr/json-to-value.cc | 68 +- src/libexpr/lexer-helpers.hh | 2 +- src/libexpr/nixexpr.cc | 106 +- src/libexpr/paths.cc | 2 +- src/libexpr/primops.cc | 1644 +++++----- src/libexpr/primops/context.cc | 228 +- src/libexpr/primops/fetchClosure.cc | 158 +- src/libexpr/primops/fetchMercurial.cc | 53 +- src/libexpr/primops/fetchTree.cc | 231 +- src/libexpr/primops/fromTOML.cc | 143 +- src/libexpr/print-ambiguous.cc | 8 +- src/libexpr/print.cc | 89 +- src/libexpr/search-path.cc | 29 +- src/libexpr/value-to-json.cc | 152 +- src/libexpr/value-to-xml.cc | 236 +- src/libexpr/value/context.cc | 78 +- src/libfetchers-tests/access-tokens.cc | 3 +- src/libfetchers-tests/public-key.cc | 48 +- src/libfetchers/attrs.cc | 19 +- src/libfetchers/cache.cc | 58 +- src/libfetchers/fetch-settings.cc | 6 +- src/libfetchers/fetch-to-store.cc | 31 +- src/libfetchers/fetchers.cc | 111 +- src/libfetchers/filtering-source-accessor.cc | 22 +- src/libfetchers/git-utils.cc | 409 ++- src/libfetchers/git.cc | 330 +- src/libfetchers/github.cc | 241 +- src/libfetchers/include/nix/fetchers/attrs.hh | 2 +- src/libfetchers/include/nix/fetchers/cache.hh | 31 +- .../include/nix/fetchers/fetch-settings.hh | 26 +- .../include/nix/fetchers/fetch-to-store.hh | 2 +- .../include/nix/fetchers/fetchers.hh | 80 +- .../nix/fetchers/filtering-source-accessor.hh | 2 +- .../include/nix/fetchers/git-utils.hh | 52 +- .../include/nix/fetchers/input-cache.hh | 2 +- .../include/nix/fetchers/registry.hh | 28 +- .../nix/fetchers/store-path-accessor.hh | 2 +- .../include/nix/fetchers/tarball.hh | 9 +- src/libfetchers/indirect.cc | 44 +- src/libfetchers/input-cache.cc | 2 +- src/libfetchers/mercurial.cc | 128 +- src/libfetchers/path.cc | 23 +- src/libfetchers/registry.cc | 50 +- src/libfetchers/store-path-accessor.cc | 2 +- src/libfetchers/tarball.cc | 130 +- src/libflake-tests/flakeref.cc | 100 +- src/libflake-tests/url-name.cc | 123 +- src/libflake/config.cc | 43 +- src/libflake/flake.cc | 433 ++- src/libflake/flakeref.cc | 70 +- .../include/nix/flake/flake-primops.hh | 2 +- src/libflake/include/nix/flake/flake.hh | 22 +- src/libflake/include/nix/flake/flakeref.hh | 22 +- src/libflake/include/nix/flake/lockfile.hh | 21 +- src/libflake/include/nix/flake/settings.hh | 2 +- src/libflake/include/nix/flake/url-name.hh | 2 +- src/libflake/lockfile.cc | 84 +- src/libflake/settings.cc | 2 +- src/libflake/url-name.cc | 12 +- src/libmain/common-args.cc | 29 +- src/libmain/include/nix/main/common-args.hh | 11 +- src/libmain/include/nix/main/loggers.hh | 12 +- src/libmain/include/nix/main/plugin.hh | 3 +- src/libmain/include/nix/main/shared.hh | 44 +- src/libmain/loggers.cc | 2 +- src/libmain/plugin.cc | 2 +- src/libmain/progress-bar.cc | 178 +- src/libmain/shared.cc | 177 +- src/libmain/unix/stack.cc | 21 +- src/libstore-test-support/derived-path.cc | 2 +- .../include/nix/store/tests/derived-path.hh | 17 +- .../include/nix/store/tests/nix_api_store.hh | 7 +- .../include/nix/store/tests/outputs-spec.hh | 5 +- .../include/nix/store/tests/path.hh | 13 +- .../include/nix/store/tests/protocol.hh | 25 +- src/libstore-test-support/outputs-spec.cc | 2 +- src/libstore-test-support/path.cc | 36 +- src/libstore-tests/common-protocol.cc | 132 +- src/libstore-tests/content-address.cc | 31 +- .../derivation-advanced-attrs.cc | 2 +- src/libstore-tests/derivation.cc | 308 +- src/libstore-tests/derived-path.cc | 47 +- src/libstore-tests/downstream-placeholder.cc | 20 +- src/libstore-tests/legacy-ssh-store.cc | 2 +- src/libstore-tests/machines.cc | 100 +- src/libstore-tests/nar-info-disk-cache.cc | 9 +- src/libstore-tests/nar-info.cc | 72 +- src/libstore-tests/outputs-spec.cc | 189 +- src/libstore-tests/path-info.cc | 72 +- src/libstore-tests/path.cc | 138 +- src/libstore-tests/references.cc | 2 +- src/libstore-tests/serve-protocol.cc | 350 ++- src/libstore-tests/store-reference.cc | 2 +- src/libstore-tests/worker-protocol.cc | 605 ++-- src/libstore/binary-cache-store.cc | 291 +- src/libstore/build-result.cc | 2 +- .../build/derivation-building-goal.cc | 539 ++-- src/libstore/build/derivation-goal.cc | 119 +- .../build/derivation-trampoline-goal.cc | 2 +- .../build/drv-output-substitution-goal.cc | 51 +- src/libstore/build/entry-points.cc | 45 +- src/libstore/build/goal.cc | 58 +- src/libstore/build/substitution-goal.cc | 81 +- src/libstore/build/worker.cc | 202 +- src/libstore/builtins/buildenv.cc | 44 +- src/libstore/builtins/fetchurl.cc | 14 +- src/libstore/builtins/unpack-channel.cc | 5 +- src/libstore/common-protocol.cc | 42 +- src/libstore/common-ssh-store-config.cc | 2 +- src/libstore/content-address.cc | 66 +- src/libstore/daemon.cc | 187 +- src/libstore/derivation-options.cc | 4 +- src/libstore/derivations.cc | 926 +++--- src/libstore/derived-path-map.cc | 63 +- src/libstore/derived-path.cc | 201 +- src/libstore/downstream-placeholder.cc | 46 +- src/libstore/dummy-store.cc | 51 +- src/libstore/export-import.cc | 36 +- src/libstore/filetransfer.cc | 322 +- src/libstore/gc.cc | 152 +- src/libstore/globals.cc | 130 +- src/libstore/http-binary-cache-store.cc | 54 +- .../include/nix/store/binary-cache-store.hh | 71 +- .../include/nix/store/build-result.hh | 63 +- .../store/build/derivation-building-goal.hh | 23 +- .../store/build/derivation-building-misc.hh | 4 +- .../nix/store/build/derivation-goal.hh | 28 +- .../store/build/derivation-trampoline-goal.hh | 2 +- .../build/drv-output-substitution-goal.hh | 19 +- src/libstore/include/nix/store/build/goal.hh | 113 +- .../nix/store/build/substitution-goal.hh | 28 +- .../include/nix/store/build/worker.hh | 34 +- src/libstore/include/nix/store/builtins.hh | 5 +- .../include/nix/store/builtins/buildenv.hh | 35 +- .../include/nix/store/common-protocol-impl.hh | 24 +- .../include/nix/store/common-protocol.hh | 17 +- .../nix/store/common-ssh-store-config.hh | 22 +- .../include/nix/store/content-address.hh | 36 +- src/libstore/include/nix/store/daemon.hh | 9 +- .../include/nix/store/derivation-options.hh | 2 +- src/libstore/include/nix/store/derivations.hh | 129 +- .../include/nix/store/derived-path-map.hh | 15 +- .../include/nix/store/derived-path.hh | 64 +- .../nix/store/downstream-placeholder.hh | 7 +- .../include/nix/store/filetransfer.hh | 48 +- src/libstore/include/nix/store/gc-store.hh | 6 +- src/libstore/include/nix/store/globals.hh | 355 ++- .../nix/store/http-binary-cache-store.hh | 2 +- .../include/nix/store/indirect-root-store.hh | 2 +- .../include/nix/store/legacy-ssh-store.hh | 94 +- .../store/length-prefixed-protocol-helper.hh | 59 +- .../nix/store/local-binary-cache-store.hh | 2 +- .../include/nix/store/local-fs-store.hh | 28 +- .../include/nix/store/local-overlay-store.hh | 40 +- src/libstore/include/nix/store/local-store.hh | 77 +- src/libstore/include/nix/store/log-store.hh | 3 +- src/libstore/include/nix/store/machines.hh | 5 +- .../nix/store/make-content-addressed.hh | 12 +- src/libstore/include/nix/store/names.hh | 5 +- .../include/nix/store/nar-accessor.hh | 6 +- .../include/nix/store/nar-info-disk-cache.hh | 28 +- src/libstore/include/nix/store/nar-info.hh | 33 +- .../include/nix/store/outputs-spec.hh | 36 +- .../include/nix/store/parsed-derivations.hh | 2 +- src/libstore/include/nix/store/path-info.hh | 41 +- .../include/nix/store/path-references.hh | 2 +- src/libstore/include/nix/store/path-regex.hh | 4 +- .../include/nix/store/path-with-outputs.hh | 2 +- src/libstore/include/nix/store/path.hh | 17 +- src/libstore/include/nix/store/pathlocks.hh | 9 +- .../nix/store/posix-fs-canonicalise.hh | 8 +- src/libstore/include/nix/store/profiles.hh | 22 +- src/libstore/include/nix/store/realisation.hh | 67 +- .../include/nix/store/remote-fs-accessor.hh | 7 +- .../nix/store/remote-store-connection.hh | 22 +- .../include/nix/store/remote-store.hh | 81 +- .../include/nix/store/restricted-store.hh | 2 +- .../nix/store/s3-binary-cache-store.hh | 2 +- src/libstore/include/nix/store/s3.hh | 33 +- .../nix/store/serve-protocol-connection.hh | 2 +- .../include/nix/store/serve-protocol-impl.hh | 27 +- .../include/nix/store/serve-protocol.hh | 31 +- src/libstore/include/nix/store/sqlite.hh | 66 +- src/libstore/include/nix/store/ssh-store.hh | 2 +- src/libstore/include/nix/store/ssh.hh | 10 +- src/libstore/include/nix/store/store-api.hh | 206 +- src/libstore/include/nix/store/store-cast.hh | 2 +- .../include/nix/store/store-dir-config.hh | 16 +- src/libstore/include/nix/store/store-open.hh | 2 +- .../include/nix/store/store-reference.hh | 2 +- .../include/nix/store/store-registration.hh | 2 +- .../include/nix/store/uds-remote-store.hh | 35 +- .../nix/store/worker-protocol-connection.hh | 2 +- .../include/nix/store/worker-protocol-impl.hh | 27 +- .../include/nix/store/worker-protocol.hh | 45 +- src/libstore/indirect-root-store.cc | 2 +- src/libstore/keys.cc | 2 +- src/libstore/legacy-ssh-store.cc | 164 +- .../linux/include/nix/store/personality.hh | 2 - src/libstore/linux/personality.cc | 49 +- src/libstore/local-binary-cache-store.cc | 35 +- src/libstore/local-fs-store.cc | 34 +- src/libstore/local-overlay-store.cc | 70 +- src/libstore/local-store.cc | 541 ++-- src/libstore/log-store.cc | 5 +- src/libstore/machines.cc | 81 +- src/libstore/make-content-addressed.cc | 18 +- src/libstore/misc.cc | 475 ++- src/libstore/names.cc | 64 +- src/libstore/nar-accessor.cc | 76 +- src/libstore/nar-info-disk-cache.cc | 217 +- src/libstore/nar-info.cc | 79 +- src/libstore/optimise-store.cc | 94 +- src/libstore/outputs-spec.cc | 164 +- src/libstore/parsed-derivations.cc | 27 +- src/libstore/path-info.cc | 127 +- src/libstore/path-references.cc | 21 +- src/libstore/path-with-outputs.cc | 99 +- src/libstore/path.cc | 23 +- src/libstore/pathlocks.cc | 7 +- src/libstore/posix-fs-canonicalise.cc | 48 +- src/libstore/profiles.cc | 73 +- src/libstore/realisation.cc | 59 +- src/libstore/remote-fs-accessor.cc | 41 +- src/libstore/remote-store.cc | 410 ++- src/libstore/restricted-store.cc | 2 +- src/libstore/s3-binary-cache-store.cc | 289 +- src/libstore/serve-protocol-connection.cc | 2 +- src/libstore/serve-protocol.cc | 62 +- src/libstore/sqlite.cc | 36 +- src/libstore/ssh-store.cc | 45 +- src/libstore/ssh.cc | 111 +- src/libstore/store-api.cc | 498 ++- src/libstore/store-dir-config.cc | 2 +- src/libstore/store-reference.cc | 2 +- src/libstore/store-registration.cc | 2 +- src/libstore/uds-remote-store.cc | 40 +- src/libstore/unix/build/child.cc | 2 +- .../unix/build/darwin-derivation-builder.cc | 2 +- src/libstore/unix/build/derivation-builder.cc | 770 +++-- src/libstore/unix/build/hook-instance.cc | 10 +- .../unix/build/linux-derivation-builder.cc | 2 +- .../unix/include/nix/store/build/child.hh | 3 +- .../nix/store/build/derivation-builder.hh | 9 +- .../include/nix/store/build/hook-instance.hh | 2 +- .../unix/include/nix/store/user-lock.hh | 4 +- src/libstore/unix/pathlocks.cc | 37 +- src/libstore/unix/user-lock.cc | 76 +- src/libstore/windows/pathlocks.cc | 2 +- src/libstore/worker-protocol-connection.cc | 2 +- src/libstore/worker-protocol.cc | 161 +- src/libutil-c/nix_api_util_internal.h | 16 +- src/libutil-test-support/hash.cc | 20 +- .../nix/util/tests/characterization.hh | 45 +- .../nix/util/tests/gtest-with-params.hh | 2 +- .../include/nix/util/tests/hash.hh | 5 +- .../include/nix/util/tests/nix_api_util.hh | 4 +- .../include/nix/util/tests/string_callback.hh | 2 +- src/libutil-test-support/string_callback.cc | 2 +- src/libutil-tests/args.cc | 184 +- src/libutil-tests/canon-path.cc | 342 ++- src/libutil-tests/checked-arithmetic.cc | 4 +- src/libutil-tests/chunked-vector.cc | 96 +- src/libutil-tests/closure.cc | 54 +- src/libutil-tests/compression.cc | 188 +- src/libutil-tests/config.cc | 505 ++-- src/libutil-tests/executable-path.cc | 2 +- src/libutil-tests/file-content-address.cc | 42 +- src/libutil-tests/file-system.cc | 2 +- src/libutil-tests/git.cc | 124 +- src/libutil-tests/hash.cc | 204 +- src/libutil-tests/hilite.cc | 110 +- src/libutil-tests/json-utils.cc | 85 +- src/libutil-tests/logging.cc | 8 +- src/libutil-tests/lru-cache.cc | 259 +- src/libutil-tests/monitorfdhup.cc | 2 +- src/libutil-tests/nix_api_util.cc | 2 +- src/libutil-tests/pool.cc | 222 +- src/libutil-tests/position.cc | 2 + src/libutil-tests/references.cc | 31 +- src/libutil-tests/spawn.cc | 2 +- src/libutil-tests/suggestions.cc | 70 +- src/libutil-tests/url.cc | 639 ++-- src/libutil-tests/xml-writer.cc | 189 +- src/libutil/archive.cc | 71 +- src/libutil/args.cc | 361 +-- src/libutil/canon-path.cc | 51 +- src/libutil/compression.cc | 8 +- src/libutil/compute-levels.cc | 16 +- src/libutil/config-global.cc | 2 +- src/libutil/configuration.cc | 139 +- src/libutil/current-process.cc | 63 +- src/libutil/english.cc | 9 +- src/libutil/environment-variables.cc | 2 +- src/libutil/error.cc | 150 +- src/libutil/exit.cc | 2 +- src/libutil/experimental-features.cc | 20 +- src/libutil/file-content-address.cc | 37 +- src/libutil/file-descriptor.cc | 53 +- src/libutil/file-system.cc | 259 +- src/libutil/freebsd/freebsd-jail.cc | 2 +- .../freebsd/include/nix/util/freebsd-jail.hh | 2 +- src/libutil/fs-sink.cc | 81 +- src/libutil/git.cc | 126 +- src/libutil/hash.cc | 177 +- src/libutil/hilite.cc | 14 +- .../nix/util/abstract-setting-to-json.hh | 2 +- src/libutil/include/nix/util/ansicolor.hh | 3 +- src/libutil/include/nix/util/archive.hh | 12 +- src/libutil/include/nix/util/args.hh | 110 +- src/libutil/include/nix/util/args/root.hh | 5 +- src/libutil/include/nix/util/callback.hh | 10 +- src/libutil/include/nix/util/canon-path.hh | 104 +- .../include/nix/util/checked-arithmetic.hh | 10 +- .../include/nix/util/chunked-vector.hh | 14 +- src/libutil/include/nix/util/closure.hh | 29 +- src/libutil/include/nix/util/comparator.hh | 21 +- src/libutil/include/nix/util/compression.hh | 2 +- src/libutil/include/nix/util/config-global.hh | 2 +- src/libutil/include/nix/util/config-impl.hh | 57 +- src/libutil/include/nix/util/configuration.hh | 103 +- .../include/nix/util/current-process.hh | 4 +- src/libutil/include/nix/util/english.hh | 9 +- .../include/nix/util/environment-variables.hh | 2 +- src/libutil/include/nix/util/error.hh | 121 +- src/libutil/include/nix/util/exec.hh | 2 +- src/libutil/include/nix/util/exit.hh | 15 +- .../include/nix/util/experimental-features.hh | 12 +- .../include/nix/util/file-content-address.hh | 22 +- .../include/nix/util/file-descriptor.hh | 23 +- .../include/nix/util/file-path-impl.hh | 28 +- src/libutil/include/nix/util/file-path.hh | 19 +- src/libutil/include/nix/util/file-system.hh | 108 +- src/libutil/include/nix/util/finally.hh | 10 +- src/libutil/include/nix/util/fmt.hh | 51 +- src/libutil/include/nix/util/fs-sink.hh | 38 +- src/libutil/include/nix/util/git.hh | 46 +- src/libutil/include/nix/util/hash.hh | 13 +- src/libutil/include/nix/util/hilite.hh | 9 +- src/libutil/include/nix/util/json-impls.hh | 17 +- src/libutil/include/nix/util/json-utils.hh | 47 +- src/libutil/include/nix/util/logging.hh | 153 +- src/libutil/include/nix/util/lru-cache.hh | 2 +- .../nix/util/memory-source-accessor.hh | 62 +- src/libutil/include/nix/util/muxable-pipe.hh | 2 +- src/libutil/include/nix/util/os-string.hh | 2 +- src/libutil/include/nix/util/pool.hh | 32 +- src/libutil/include/nix/util/pos-idx.hh | 2 +- src/libutil/include/nix/util/pos-table.hh | 2 +- src/libutil/include/nix/util/position.hh | 88 +- .../include/nix/util/posix-source-accessor.hh | 7 +- src/libutil/include/nix/util/processes.hh | 24 +- src/libutil/include/nix/util/ref.hh | 23 +- src/libutil/include/nix/util/references.hh | 18 +- .../include/nix/util/regex-combinators.hh | 2 +- src/libutil/include/nix/util/repair-flag.hh | 1 + src/libutil/include/nix/util/serialise.hh | 217 +- src/libutil/include/nix/util/signals.hh | 8 +- .../include/nix/util/signature/local-keys.hh | 18 +- .../include/nix/util/signature/signer.hh | 4 +- src/libutil/include/nix/util/sort.hh | 2 +- .../include/nix/util/source-accessor.hh | 60 +- src/libutil/include/nix/util/source-path.hh | 28 +- src/libutil/include/nix/util/split.hh | 10 +- src/libutil/include/nix/util/strings.hh | 2 +- src/libutil/include/nix/util/suggestions.hh | 32 +- src/libutil/include/nix/util/sync.hh | 85 +- src/libutil/include/nix/util/tarfile.hh | 2 +- src/libutil/include/nix/util/terminal.hh | 7 +- src/libutil/include/nix/util/thread-pool.hh | 33 +- src/libutil/include/nix/util/topo-sort.hh | 15 +- src/libutil/include/nix/util/types.hh | 64 +- .../include/nix/util/unix-domain-socket.hh | 2 +- src/libutil/include/nix/util/url-parts.hh | 5 +- src/libutil/include/nix/util/url.hh | 11 +- src/libutil/include/nix/util/users.hh | 5 +- src/libutil/include/nix/util/util.hh | 152 +- .../include/nix/util/variant-wrapper.hh | 25 +- src/libutil/include/nix/util/xml-writer.hh | 17 +- src/libutil/json-utils.cc | 27 +- src/libutil/linux/cgroup.cc | 24 +- src/libutil/linux/include/nix/util/cgroup.hh | 2 +- .../include/nix/util/linux-namespaces.hh | 2 +- src/libutil/linux/linux-namespaces.cc | 52 +- src/libutil/logging.cc | 150 +- src/libutil/memory-source-accessor.cc | 85 +- src/libutil/mounted-source-accessor.cc | 2 +- src/libutil/pos-table.cc | 2 +- src/libutil/position.cc | 59 +- src/libutil/posix-source-accessor.cc | 83 +- src/libutil/references.cc | 45 +- src/libutil/serialise.cc | 155 +- src/libutil/signature/local-keys.cc | 17 +- src/libutil/signature/signer.cc | 5 +- src/libutil/source-accessor.cc | 50 +- src/libutil/source-path.cc | 58 +- src/libutil/subdir-source-accessor.cc | 2 +- src/libutil/suggestions.cc | 63 +- src/libutil/tarfile.cc | 4 +- src/libutil/tee-logger.cc | 2 +- src/libutil/terminal.cc | 52 +- src/libutil/thread-pool.cc | 16 +- src/libutil/union-source-accessor.cc | 2 +- src/libutil/unix-domain-socket.cc | 30 +- src/libutil/unix/environment-variables.cc | 2 +- src/libutil/unix/file-descriptor.cc | 54 +- src/libutil/unix/file-path.cc | 4 +- src/libutil/unix/file-system.cc | 2 +- .../unix/include/nix/util/monitor-fd.hh | 2 +- .../unix/include/nix/util/signals-impl.hh | 10 +- src/libutil/unix/muxable-pipe.cc | 2 +- src/libutil/unix/os-string.cc | 2 +- src/libutil/unix/processes.cc | 162 +- src/libutil/unix/signals.cc | 18 +- src/libutil/unix/users.cc | 21 +- src/libutil/url.cc | 60 +- src/libutil/users.cc | 7 +- src/libutil/util.cc | 79 +- src/libutil/widecharwidth/widechar_width.h | 1754 ++--------- src/libutil/windows/environment-variables.cc | 2 +- src/libutil/windows/file-descriptor.cc | 67 +- src/libutil/windows/file-path.cc | 15 +- src/libutil/windows/file-system.cc | 2 +- .../windows/include/nix/util/signals-impl.hh | 2 +- .../include/nix/util/windows-async-pipe.hh | 2 +- .../windows/include/nix/util/windows-error.hh | 15 +- src/libutil/windows/muxable-pipe.cc | 2 +- src/libutil/windows/os-string.cc | 2 +- src/libutil/windows/processes.cc | 15 +- src/libutil/windows/users.cc | 12 +- src/libutil/windows/windows-async-pipe.cc | 2 +- src/libutil/windows/windows-error.cc | 34 +- src/libutil/xml-writer.cc | 62 +- src/nix-build/nix-build.cc | 228 +- src/nix-channel/nix-channel.cc | 135 +- .../nix-collect-garbage.cc | 20 +- src/nix-env/nix-env.cc | 719 +++-- src/nix-env/user-env.cc | 41 +- src/nix-env/user-env.hh | 7 +- src/nix-instantiate/nix-instantiate.cc | 45 +- src/nix-store/dotgraph.cc | 29 +- src/nix-store/graphml.cc | 16 +- src/nix-store/nix-store.cc | 884 +++--- src/nix/add-to-store.cc | 14 +- src/nix/app.cc | 94 +- src/nix/build.cc | 69 +- src/nix/bundle.cc | 48 +- src/nix/cat.cc | 20 +- src/nix/config-check.cc | 40 +- src/nix/config.cc | 19 +- src/nix/copy.cc | 15 +- src/nix/crash-handler.cc | 4 +- src/nix/crash-handler.hh | 3 +- src/nix/derivation-add.cc | 9 +- src/nix/derivation-show.cc | 15 +- src/nix/derivation.cc | 11 +- src/nix/develop.cc | 133 +- src/nix/diff-closures.cc | 36 +- src/nix/dump-path.cc | 20 +- src/nix/edit.cc | 12 +- src/nix/eval.cc | 44 +- src/nix/flake-command.hh | 2 +- src/nix/flake.cc | 848 +++--- src/nix/hash.cc | 138 +- src/nix/log.cc | 24 +- src/nix/ls.cc | 41 +- src/nix/main.cc | 181 +- src/nix/make-content-addressed.cc | 12 +- src/nix/man-pages.cc | 2 +- src/nix/man-pages.hh | 2 +- src/nix/nar.cc | 15 +- src/nix/optimise-store.cc | 4 +- src/nix/path-from-hash-part.cc | 4 +- src/nix/path-info.cc | 29 +- src/nix/prefetch.cc | 104 +- src/nix/profile.cc | 346 ++- src/nix/realisation.cc | 25 +- src/nix/registry.cc | 78 +- src/nix/repl.cc | 47 +- src/nix/run.cc | 55 +- src/nix/run.hh | 10 +- src/nix/search.cc | 57 +- src/nix/self-exe.cc | 2 +- src/nix/self-exe.hh | 2 +- src/nix/sigs.cc | 34 +- src/nix/store-copy-log.cc | 4 +- src/nix/store-delete.cc | 6 +- src/nix/store-gc.cc | 4 +- src/nix/store-info.cc | 8 +- src/nix/store-repair.cc | 4 +- src/nix/store.cc | 10 +- src/nix/unix/daemon.cc | 166 +- src/nix/upgrade-nix.cc | 35 +- src/nix/verify.cc | 29 +- src/nix/why-depends.cc | 93 +- tests/functional/plugins/plugintest.cc | 5 +- .../functional/test-libstoreconsumer/main.cc | 10 +- tests/nixos/ca-fd-leak/sender.c | 28 +- tests/nixos/ca-fd-leak/smuggler.c | 33 +- tests/nixos/user-sandboxing/attacker.c | 128 +- 587 files changed, 23258 insertions(+), 23135 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index cd13e6670..786085106 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -6,7 +6,7 @@ #include #include #ifdef __APPLE__ -#include +# include #endif #include "nix/store/machines.hh" @@ -26,8 +26,7 @@ using namespace nix; using std::cin; -static void handleAlarm(int sig) { -} +static void handleAlarm(int sig) {} std::string escapeUri(std::string uri) { @@ -42,13 +41,15 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot) return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri.render()), slot), true); } -static bool allSupportedLocally(Store & store, const StringSet& requiredFeatures) { +static bool allSupportedLocally(Store & store, const StringSet & requiredFeatures) +{ for (auto & feature : requiredFeatures) - if (!store.config.systemFeatures.get().count(feature)) return false; + if (!store.config.systemFeatures.get().count(feature)) + return false; return true; } -static int main_build_remote(int argc, char * * argv) +static int main_build_remote(int argc, char ** argv) { { logger = makeJSONLogger(getStandardError()); @@ -85,7 +86,7 @@ static int main_build_remote(int argc, char * * argv) that gets cleared on reboot, but it wouldn't work on macOS. */ auto currentLoadName = "/current-load"; if (auto localStore = store.dynamic_pointer_cast()) - currentLoad = std::string { localStore->config.stateDir } + currentLoadName; + currentLoad = std::string{localStore->config.stateDir} + currentLoadName; else currentLoad = settings.nixStateDir + currentLoadName; @@ -107,8 +108,11 @@ static int main_build_remote(int argc, char * * argv) try { auto s = readString(source); - if (s != "try") return 0; - } catch (EndOfFile &) { return 0; } + if (s != "try") + return 0; + } catch (EndOfFile &) { + return 0; + } auto amWilling = readInt(source); auto neededSystem = readString(source); @@ -117,10 +121,10 @@ static int main_build_remote(int argc, char * * argv) /* It would be possible to build locally after some builds clear out, so don't show the warning now: */ - bool couldBuildLocally = maxBuildJobs > 0 - && ( neededSystem == settings.thisSystem - || settings.extraPlatforms.get().count(neededSystem) > 0) - && allSupportedLocally(*store, requiredFeatures); + bool couldBuildLocally = + maxBuildJobs > 0 + && (neededSystem == settings.thisSystem || settings.extraPlatforms.get().count(neededSystem) > 0) + && allSupportedLocally(*store, requiredFeatures); /* It's possible to build this locally right now: */ bool canBuildLocally = amWilling && couldBuildLocally; @@ -139,11 +143,8 @@ static int main_build_remote(int argc, char * * argv) for (auto & m : machines) { debug("considering building on remote machine '%s'", m.storeUri.render()); - if (m.enabled && - m.systemSupported(neededSystem) && - m.allSupported(requiredFeatures) && - m.mandatoryMet(requiredFeatures)) - { + if (m.enabled && m.systemSupported(neededSystem) && m.allSupported(requiredFeatures) + && m.mandatoryMet(requiredFeatures)) { rightType = true; AutoCloseFD free; uint64_t load = 0; @@ -185,8 +186,7 @@ static int main_build_remote(int argc, char * * argv) if (!bestSlotLock) { if (rightType && !canBuildLocally) std::cerr << "# postpone\n"; - else - { + else { // build the hint template. std::string errorText = "Failed to find a machine for remote build!\n" @@ -205,16 +205,11 @@ static int main_build_remote(int argc, char * * argv) drvstr = ""; auto error = HintFmt::fromFormatString(errorText); - error - % drvstr - % neededSystem - % concatStringsSep(", ", requiredFeatures) + error % drvstr % neededSystem % concatStringsSep(", ", requiredFeatures) % machines.size(); for (auto & m : machines) - error - % concatStringsSep(", ", m.systemTypes) - % m.maxJobs + error % concatStringsSep(", ", m.systemTypes) % m.maxJobs % concatStringsSep(", ", m.supportedFeatures) % concatStringsSep(", ", m.mandatoryFeatures); @@ -242,9 +237,7 @@ static int main_build_remote(int argc, char * * argv) sshStore->connect(); } catch (std::exception & e) { auto msg = chomp(drainFD(5, false)); - printError("cannot build on '%s': %s%s", - storeUri, e.what(), - msg.empty() ? "" : ": " + msg); + printError("cannot build on '%s': %s%s", storeUri, e.what(), msg.empty() ? "" : ": " + msg); bestMachine->enabled = false; continue; } @@ -253,7 +246,7 @@ static int main_build_remote(int argc, char * * argv) } } -connected: + connected: close(5); assert(sshStore); @@ -265,13 +258,14 @@ connected: AutoCloseFD uploadLock; { - auto setUpdateLock = [&](auto && fileName){ + auto setUpdateLock = [&](auto && fileName) { uploadLock = openLockFile(currentLoad + "/" + escapeUri(fileName) + ".upload-lock", true); }; try { setUpdateLock(storeUri); } catch (SysError & e) { - if (e.errNo != ENAMETOOLONG) throw; + if (e.errNo != ENAMETOOLONG) + throw; // Try again hashing the store URL so we have a shorter path auto h = hashString(HashAlgorithm::MD5, storeUri); setUpdateLock(h.to_string(HashFormat::Base64, false)); @@ -315,7 +309,7 @@ connected: // // This condition mirrors that: that code enforces the "rules" outlined there; // we do the best we can given those "rules". - if (trustedOrLegacy || drv.type().isCA()) { + if (trustedOrLegacy || drv.type().isCA()) { // Hijack the inputs paths of the derivation to include all // the paths that come from the `inputDrvs` set. We don’t do // that for the derivations whose `inputDrvs` is empty @@ -335,32 +329,29 @@ connected: "The failed build directory was kept on the remote builder due to `--keep-failed`.%s", (settings.thisSystem == drv.platform || settings.extraPlatforms.get().count(drv.platform) > 0) ? " You can re-run the command with `--builders ''` to disable remote building for this invocation." - : "" - ); + : ""); } - throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); + throw Error( + "build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); } } else { - copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute); - auto res = sshStore->buildPathsWithResults({ - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(*drvPath), - .outputs = OutputsSpec::All {}, - } - }); + copyClosure(*store, *sshStore, StorePathSet{*drvPath}, NoRepair, NoCheckSigs, substitute); + auto res = sshStore->buildPathsWithResults({DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All{}, + }}); // One path to build should produce exactly one build result assert(res.size() == 1); optResult = std::move(res[0]); } - auto outputHashes = staticOutputHashes(*store, drv); std::set missingRealisations; StorePathSet missingPaths; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) { for (auto & outputName : wantedOutputs) { auto thisOutputHash = outputHashes.at(outputName); - auto thisOutputId = DrvOutput{ thisOutputHash, outputName }; + auto thisOutputId = DrvOutput{thisOutputHash, outputName}; if (!store->queryRealisation(thisOutputId)) { debug("missing output %s", outputName); assert(optResult); diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 1238f9422..80d97dc3e 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -10,23 +10,13 @@ namespace nix { // Custom implementation to avoid `ref` ptr equality -GENERATE_CMP_EXT( - , - std::strong_ordering, - SingleBuiltPathBuilt, - *me->drvPath, - me->output); +GENERATE_CMP_EXT(, std::strong_ordering, SingleBuiltPathBuilt, *me->drvPath, me->output); // Custom implementation to avoid `ref` ptr equality // TODO no `GENERATE_CMP_EXT` because no `std::set::operator<=>` on // Darwin, per header. -GENERATE_EQUAL( - , - BuiltPathBuilt ::, - BuiltPathBuilt, - *me->drvPath, - me->outputs); +GENERATE_EQUAL(, BuiltPathBuilt ::, BuiltPathBuilt, *me->drvPath, me->outputs); StorePath SingleBuiltPath::outPath() const { @@ -34,8 +24,8 @@ StorePath SingleBuiltPath::outPath() const overloaded{ [](const SingleBuiltPath::Opaque & p) { return p.path; }, [](const SingleBuiltPath::Built & b) { return b.output.second; }, - }, raw() - ); + }, + raw()); } StorePathSet BuiltPath::outPaths() const @@ -49,13 +39,13 @@ StorePathSet BuiltPath::outPaths() const res.insert(path); return res; }, - }, raw() - ); + }, + raw()); } SingleDerivedPath::Built SingleBuiltPath::Built::discardOutputPath() const { - return SingleDerivedPath::Built { + return SingleDerivedPath::Built{ .drvPath = make_ref(drvPath->discardOutputPath()), .output = output.first, }; @@ -65,14 +55,10 @@ SingleDerivedPath SingleBuiltPath::discardOutputPath() const { return std::visit( overloaded{ - [](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath { - return p; - }, - [](const SingleBuiltPath::Built & b) -> SingleDerivedPath { - return b.discardOutputPath(); - }, - }, raw() - ); + [](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath { return p; }, + [](const SingleBuiltPath::Built & b) -> SingleDerivedPath { return b.discardOutputPath(); }, + }, + raw()); } nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const @@ -97,16 +83,12 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) cons nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const { - return std::visit([&](const auto & buildable) { - return buildable.toJSON(store); - }, raw()); + return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); } nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const { - return std::visit([&](const auto & buildable) { - return buildable.toJSON(store); - }, raw()); + return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); } RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const @@ -116,20 +98,18 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const overloaded{ [&](const BuiltPath::Opaque & p) { res.insert(p.path); }, [&](const BuiltPath::Built & p) { - auto drvHashes = - staticOutputHashes(store, store.readDerivation(p.drvPath->outPath())); - for (auto& [outputName, outputPath] : p.outputs) { - if (experimentalFeatureSettings.isEnabled( - Xp::CaDerivations)) { + auto drvHashes = staticOutputHashes(store, store.readDerivation(p.drvPath->outPath())); + for (auto & [outputName, outputPath] : p.outputs) { + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { auto drvOutput = get(drvHashes, outputName); if (!drvOutput) throw Error( "the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)", - store.printStorePath(p.drvPath->outPath()), outputName); - auto thisRealisation = store.queryRealisation( - DrvOutput{*drvOutput, outputName}); - assert(thisRealisation); // We’ve built it, so we must - // have the realisation + store.printStorePath(p.drvPath->outPath()), + outputName); + auto thisRealisation = store.queryRealisation(DrvOutput{*drvOutput, outputName}); + assert(thisRealisation); // We’ve built it, so we must + // have the realisation res.insert(*thisRealisation); } else { res.insert(outputPath); @@ -141,4 +121,4 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const return res; } -} +} // namespace nix diff --git a/src/libcmd/command-installable-value.cc b/src/libcmd/command-installable-value.cc index 0884f17e9..34e161b4b 100644 --- a/src/libcmd/command-installable-value.cc +++ b/src/libcmd/command-installable-value.cc @@ -8,4 +8,4 @@ void InstallableValueCommand::run(ref store, ref installable run(store, installableValue); } -} +} // namespace nix diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 31f64fd5a..6b6bbe345 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -402,4 +402,4 @@ void MixOutLinkBase::createOutLinksMaybe(const std::vector createOutLinks(outLink, toBuiltPaths(buildables), *store2); } -} +} // namespace nix diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index d275beb12..2e6ca4344 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -18,12 +18,11 @@ namespace nix { - fetchers::Settings fetchSettings; static GlobalConfig::Register rFetchSettings(&fetchSettings); -EvalSettings evalSettings { +EvalSettings evalSettings{ settings.readOnlyMode, { { @@ -31,7 +30,7 @@ EvalSettings evalSettings { [](EvalState & state, std::string_view rest) { experimentalFeatureSettings.require(Xp::Flakes); // FIXME `parseFlakeRef` should take a `std::string_view`. - auto flakeRef = parseFlakeRef(fetchSettings, std::string { rest }, {}, true, false); + auto flakeRef = parseFlakeRef(fetchSettings, std::string{rest}, {}, true, false); debug("fetching flake search path element '%s''", rest); auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store); auto storePath = nix::fetchToStore( @@ -49,17 +48,14 @@ EvalSettings evalSettings { static GlobalConfig::Register rEvalSettings(&evalSettings); - flake::Settings flakeSettings; static GlobalConfig::Register rFlakeSettings(&flakeSettings); - -CompatibilitySettings compatibilitySettings {}; +CompatibilitySettings compatibilitySettings{}; static GlobalConfig::Register rCompatibilitySettings(&compatibilitySettings); - MixEvalArgs::MixEvalArgs() { addFlag({ @@ -67,7 +63,9 @@ MixEvalArgs::MixEvalArgs() .description = "Pass the value *expr* as the argument *name* to Nix functions.", .category = category, .labels = {"name", "expr"}, - .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}}); }}, + .handler = {[&](std::string name, std::string expr) { + autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}}); + }}, }); addFlag({ @@ -75,7 +73,9 @@ MixEvalArgs::MixEvalArgs() .description = "Pass the string *string* as the argument *name* to Nix functions.", .category = category, .labels = {"name", "string"}, - .handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString{s}}); }}, + .handler = {[&](std::string name, std::string s) { + autoArgs.insert_or_assign(name, AutoArg{AutoArgString{s}}); + }}, }); addFlag({ @@ -83,7 +83,9 @@ MixEvalArgs::MixEvalArgs() .description = "Pass the contents of file *path* as the argument *name* to Nix functions.", .category = category, .labels = {"name", "path"}, - .handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile{path}}); }}, + .handler = {[&](std::string name, std::string path) { + autoArgs.insert_or_assign(name, AutoArg{AutoArgFile{path}}); + }}, .completer = completePath, }); @@ -107,18 +109,14 @@ MixEvalArgs::MixEvalArgs() )", .category = category, .labels = {"path"}, - .handler = {[&](std::string s) { - lookupPath.elements.emplace_back(LookupPath::Elem::parse(s)); - }}, + .handler = {[&](std::string s) { lookupPath.elements.emplace_back(LookupPath::Elem::parse(s)); }}, }); addFlag({ .longName = "impure", .description = "Allow access to mutable paths and repositories.", .category = category, - .handler = {[&]() { - evalSettings.pureEval = false; - }}, + .handler = {[&]() { evalSettings.pureEval = false; }}, }); addFlag({ @@ -130,7 +128,8 @@ MixEvalArgs::MixEvalArgs() auto from = parseFlakeRef(fetchSettings, _from, std::filesystem::current_path().string()); auto to = parseFlakeRef(fetchSettings, _to, std::filesystem::current_path().string()); fetchers::Attrs extraAttrs; - if (to.subdir != "") extraAttrs["dir"] = to.subdir; + if (to.subdir != "") + extraAttrs["dir"] = to.subdir; fetchers::overrideRegistry(from.input, to.input, extraAttrs); }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { @@ -141,7 +140,7 @@ MixEvalArgs::MixEvalArgs() addFlag({ .longName = "eval-store", .description = - R"( + R"( The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format) to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them. )", @@ -156,20 +155,21 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) auto res = state.buildBindings(autoArgs.size()); for (auto & [name, arg] : autoArgs) { auto v = state.allocValue(); - std::visit(overloaded { - [&](const AutoArgExpr & arg) { - state.mkThunk_(*v, state.parseExprFromString(arg.expr, compatibilitySettings.nixShellShebangArgumentsRelativeToScript ? state.rootPath(absPath(getCommandBaseDir())) : state.rootPath("."))); - }, - [&](const AutoArgString & arg) { - v->mkString(arg.s); - }, - [&](const AutoArgFile & arg) { - v->mkString(readFile(arg.path.string())); - }, - [&](const AutoArgStdin & arg) { - v->mkString(readFile(STDIN_FILENO)); - } - }, arg); + std::visit( + overloaded{ + [&](const AutoArgExpr & arg) { + state.mkThunk_( + *v, + state.parseExprFromString( + arg.expr, + compatibilitySettings.nixShellShebangArgumentsRelativeToScript + ? state.rootPath(absPath(getCommandBaseDir())) + : state.rootPath("."))); + }, + [&](const AutoArgString & arg) { v->mkString(arg.s); }, + [&](const AutoArgFile & arg) { v->mkString(readFile(arg.path.string())); }, + [&](const AutoArgStdin & arg) { v->mkString(readFile(STDIN_FILENO)); }}, + arg); res.insert(state.symbols.create(name), v); } return res.finish(); @@ -178,15 +178,8 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir) { if (EvalSettings::isPseudoUrl(s)) { - auto accessor = fetchers::downloadTarball( - state.store, - state.fetchSettings, - EvalSettings::resolvePseudoUrl(s)); - auto storePath = fetchToStore( - state.fetchSettings, - *state.store, - SourcePath(accessor), - FetchMode::Copy); + auto accessor = fetchers::downloadTarball(state.store, state.fetchSettings, EvalSettings::resolvePseudoUrl(s)); + auto storePath = fetchToStore(state.fetchSettings, *state.store, SourcePath(accessor), FetchMode::Copy); return state.storePath(storePath); } @@ -195,11 +188,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas auto flakeRef = parseFlakeRef(fetchSettings, std::string(s.substr(6)), {}, true, false); auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store); auto storePath = nix::fetchToStore( - state.fetchSettings, - *state.store, - SourcePath(accessor), - FetchMode::Copy, - lockedRef.input.getName()); + state.fetchSettings, *state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName()); state.allowPath(storePath); return state.storePath(storePath); } @@ -213,4 +202,4 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s)); } -} +} // namespace nix diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index a5d635859..95fdf95ad 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -11,14 +11,12 @@ Strings editorFor(const SourcePath & file, uint32_t line) throw Error("cannot open '%s' in an editor because it has no physical path", file); auto editor = getEnv("EDITOR").value_or("cat"); auto args = tokenizeString(editor); - if (line > 0 && ( - editor.find("emacs") != std::string::npos || - editor.find("nano") != std::string::npos || - editor.find("vim") != std::string::npos || - editor.find("kak") != std::string::npos)) + if (line > 0 + && (editor.find("emacs") != std::string::npos || editor.find("nano") != std::string::npos + || editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos)) args.push_back(fmt("+%d", line)); args.push_back(path->string()); return args; } -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/built-path.hh b/src/libcmd/include/nix/cmd/built-path.hh index c885876a7..d41529e5a 100644 --- a/src/libcmd/include/nix/cmd/built-path.hh +++ b/src/libcmd/include/nix/cmd/built-path.hh @@ -8,7 +8,8 @@ namespace nix { struct SingleBuiltPath; -struct SingleBuiltPathBuilt { +struct SingleBuiltPathBuilt +{ ref drvPath; std::pair output; @@ -18,26 +19,25 @@ struct SingleBuiltPathBuilt { static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view); nlohmann::json toJSON(const StoreDirConfig & store) const; - bool operator ==(const SingleBuiltPathBuilt &) const noexcept; - std::strong_ordering operator <=>(const SingleBuiltPathBuilt &) const noexcept; + bool operator==(const SingleBuiltPathBuilt &) const noexcept; + std::strong_ordering operator<=>(const SingleBuiltPathBuilt &) const noexcept; }; -using _SingleBuiltPathRaw = std::variant< - DerivedPathOpaque, - SingleBuiltPathBuilt ->; +using _SingleBuiltPathRaw = std::variant; -struct SingleBuiltPath : _SingleBuiltPathRaw { +struct SingleBuiltPath : _SingleBuiltPathRaw +{ using Raw = _SingleBuiltPathRaw; using Raw::Raw; using Opaque = DerivedPathOpaque; using Built = SingleBuiltPathBuilt; - bool operator == (const SingleBuiltPath &) const = default; - auto operator <=> (const SingleBuiltPath &) const = default; + bool operator==(const SingleBuiltPath &) const = default; + auto operator<=>(const SingleBuiltPath &) const = default; - inline const Raw & raw() const { + inline const Raw & raw() const + { return static_cast(*this); } @@ -51,7 +51,7 @@ struct SingleBuiltPath : _SingleBuiltPathRaw { static inline ref staticDrv(StorePath drvPath) { - return make_ref(SingleBuiltPath::Opaque { drvPath }); + return make_ref(SingleBuiltPath::Opaque{drvPath}); } /** @@ -59,40 +59,41 @@ static inline ref staticDrv(StorePath drvPath) * * See 'BuiltPath' for more an explanation. */ -struct BuiltPathBuilt { +struct BuiltPathBuilt +{ ref drvPath; std::map outputs; - bool operator == (const BuiltPathBuilt &) const noexcept; + bool operator==(const BuiltPathBuilt &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //std::strong_ordering operator <=> (const BuiltPathBuilt &) const noexcept; + // std::strong_ordering operator <=> (const BuiltPathBuilt &) const noexcept; std::string to_string(const StoreDirConfig & store) const; static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view); nlohmann::json toJSON(const StoreDirConfig & store) const; }; -using _BuiltPathRaw = std::variant< - DerivedPath::Opaque, - BuiltPathBuilt ->; +using _BuiltPathRaw = std::variant; /** * A built path. Similar to a DerivedPath, but enriched with the corresponding * output path(s). */ -struct BuiltPath : _BuiltPathRaw { +struct BuiltPath : _BuiltPathRaw +{ using Raw = _BuiltPathRaw; using Raw::Raw; using Opaque = DerivedPathOpaque; using Built = BuiltPathBuilt; - bool operator == (const BuiltPath &) const = default; - // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=> (const BuiltPath &) const = default; + bool operator==(const BuiltPath &) const = default; - inline const Raw & raw() const { + // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. + // auto operator <=> (const BuiltPath &) const = default; + + inline const Raw & raw() const + { return static_cast(*this); } @@ -104,4 +105,4 @@ struct BuiltPath : _BuiltPathRaw { typedef std::vector BuiltPaths; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/command-installable-value.hh b/src/libcmd/include/nix/cmd/command-installable-value.hh index b171d9f73..beb77be64 100644 --- a/src/libcmd/include/nix/cmd/command-installable-value.hh +++ b/src/libcmd/include/nix/cmd/command-installable-value.hh @@ -20,4 +20,4 @@ struct InstallableValueCommand : InstallableCommand void run(ref store, ref installable) override; }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/common-eval-args.hh b/src/libcmd/include/nix/cmd/common-eval-args.hh index 88ede1ed7..2a0499477 100644 --- a/src/libcmd/include/nix/cmd/common-eval-args.hh +++ b/src/libcmd/include/nix/cmd/common-eval-args.hh @@ -13,13 +13,17 @@ namespace nix { class Store; -namespace fetchers { struct Settings; } +namespace fetchers { +struct Settings; +} class EvalState; struct CompatibilitySettings; class Bindings; -namespace flake { struct Settings; } +namespace flake { +struct Settings; +} /** * @todo Get rid of global settings variables @@ -54,10 +58,23 @@ struct MixEvalArgs : virtual Args, virtual MixRepair std::optional evalStoreUrl; private: - struct AutoArgExpr { std::string expr; }; - struct AutoArgString { std::string s; }; - struct AutoArgFile { std::filesystem::path path; }; - struct AutoArgStdin { }; + struct AutoArgExpr + { + std::string expr; + }; + + struct AutoArgString + { + std::string s; + }; + + struct AutoArgFile + { + std::filesystem::path path; + }; + + struct AutoArgStdin + {}; using AutoArg = std::variant; @@ -69,4 +86,4 @@ private: */ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/compatibility-settings.hh b/src/libcmd/include/nix/cmd/compatibility-settings.hh index c7061a0a1..7c34ae17a 100644 --- a/src/libcmd/include/nix/cmd/compatibility-settings.hh +++ b/src/libcmd/include/nix/cmd/compatibility-settings.hh @@ -33,4 +33,4 @@ struct CompatibilitySettings : public Config )"}; }; -}; +}; // namespace nix diff --git a/src/libcmd/include/nix/cmd/editor-for.hh b/src/libcmd/include/nix/cmd/editor-for.hh index 11414e823..3fb8a072e 100644 --- a/src/libcmd/include/nix/cmd/editor-for.hh +++ b/src/libcmd/include/nix/cmd/editor-for.hh @@ -12,4 +12,4 @@ namespace nix { */ Strings editorFor(const SourcePath & file, uint32_t line); -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-attr-path.hh b/src/libcmd/include/nix/cmd/installable-attr-path.hh index 5a0dc993c..474bb358e 100644 --- a/src/libcmd/include/nix/cmd/installable-attr-path.hh +++ b/src/libcmd/include/nix/cmd/installable-attr-path.hh @@ -39,7 +39,10 @@ class InstallableAttrPath : public InstallableValue const std::string & attrPath, ExtendedOutputsSpec extendedOutputsSpec); - std::string what() const override { return attrPath; }; + std::string what() const override + { + return attrPath; + }; std::pair toValue(EvalState & state) override; @@ -55,4 +58,4 @@ public: ExtendedOutputsSpec extendedOutputsSpec); }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-derived-path.hh b/src/libcmd/include/nix/cmd/installable-derived-path.hh index daa6ba868..f255f2bba 100644 --- a/src/libcmd/include/nix/cmd/installable-derived-path.hh +++ b/src/libcmd/include/nix/cmd/installable-derived-path.hh @@ -11,8 +11,10 @@ struct InstallableDerivedPath : Installable DerivedPath derivedPath; InstallableDerivedPath(ref store, DerivedPath && derivedPath) - : store(store), derivedPath(std::move(derivedPath)) - { } + : store(store) + , derivedPath(std::move(derivedPath)) + { + } std::string what() const override; @@ -20,10 +22,8 @@ struct InstallableDerivedPath : Installable std::optional getStorePath() override; - static InstallableDerivedPath parse( - ref store, - std::string_view prefix, - ExtendedOutputsSpec extendedOutputsSpec); + static InstallableDerivedPath + parse(ref store, std::string_view prefix, ExtendedOutputsSpec extendedOutputsSpec); }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-flake.hh b/src/libcmd/include/nix/cmd/installable-flake.hh index 8699031b5..935ea8779 100644 --- a/src/libcmd/include/nix/cmd/installable-flake.hh +++ b/src/libcmd/include/nix/cmd/installable-flake.hh @@ -18,7 +18,8 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue /** * Extra struct to get around C++ designated initializer limitations */ - struct Flake { + struct Flake + { FlakeRef originalRef; FlakeRef lockedRef; }; @@ -26,8 +27,10 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue Flake flake; ExtraPathInfoFlake(Value && v, Flake && f) - : ExtraPathInfoValue(std::move(v)), flake(std::move(f)) - { } + : ExtraPathInfoValue(std::move(v)) + , flake(std::move(f)) + { + } }; struct InstallableFlake : InstallableValue @@ -49,7 +52,10 @@ struct InstallableFlake : InstallableValue Strings prefixes, const flake::LockFlags & lockFlags); - std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); } + std::string what() const override + { + return flakeRef.to_string() + "#" + *attrPaths.begin(); + } std::vector getActualAttrPaths(); @@ -61,8 +67,7 @@ struct InstallableFlake : InstallableValue * Get a cursor to every attrpath in getActualAttrPaths() that * exists. However if none exists, throw an exception. */ - std::vector> - getCursors(EvalState & state) override; + std::vector> getCursors(EvalState & state) override; std::shared_ptr getLockedFlake() const; @@ -79,11 +84,9 @@ struct InstallableFlake : InstallableValue */ static inline FlakeRef defaultNixpkgsFlakeRef() { - return FlakeRef::fromAttrs(fetchSettings, {{"type","indirect"}, {"id", "nixpkgs"}}); + return FlakeRef::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", "nixpkgs"}}); } -ref openEvalCache( - EvalState & state, - std::shared_ptr lockedFlake); +ref openEvalCache(EvalState & state, std::shared_ptr lockedFlake); -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/installable-value.hh b/src/libcmd/include/nix/cmd/installable-value.hh index e65c199a5..3521a4154 100644 --- a/src/libcmd/include/nix/cmd/installable-value.hh +++ b/src/libcmd/include/nix/cmd/installable-value.hh @@ -9,7 +9,10 @@ namespace nix { struct PackageInfo; struct SourceExprCommand; -namespace eval_cache { class EvalCache; class AttrCursor; } +namespace eval_cache { +class EvalCache; +class AttrCursor; +} // namespace eval_cache struct App { @@ -37,7 +40,8 @@ struct ExtraPathInfoValue : ExtraPathInfo /** * Extra struct to get around C++ designated initializer limitations */ - struct Value { + struct Value + { /** * An optional priority for use with "build envs". See Package */ @@ -61,7 +65,8 @@ struct ExtraPathInfoValue : ExtraPathInfo ExtraPathInfoValue(Value && v) : value(std::move(v)) - { } + { + } virtual ~ExtraPathInfoValue() = default; }; @@ -74,9 +79,12 @@ struct InstallableValue : Installable { ref state; - InstallableValue(ref state) : state(state) {} + InstallableValue(ref state) + : state(state) + { + } - virtual ~InstallableValue() { } + virtual ~InstallableValue() {} virtual std::pair toValue(EvalState & state) = 0; @@ -85,15 +93,13 @@ struct InstallableValue : Installable * However if none exists, throw exception instead of returning * empty vector. */ - virtual std::vector> - getCursors(EvalState & state); + virtual std::vector> getCursors(EvalState & state); /** * Get the first and most preferred cursor this Installable could * refer to, or throw an exception if none exists. */ - virtual ref - getCursor(EvalState & state); + virtual ref getCursor(EvalState & state); UnresolvedApp toApp(EvalState & state); @@ -116,7 +122,8 @@ protected: * @result A derived path (with empty info, for now) if the value * matched the above criteria. */ - std::optional trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx); + std::optional + trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx); }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/installables.hh b/src/libcmd/include/nix/cmd/installables.hh index 84941278a..530334e03 100644 --- a/src/libcmd/include/nix/cmd/installables.hh +++ b/src/libcmd/include/nix/cmd/installables.hh @@ -112,7 +112,7 @@ typedef std::vector> Installables; */ struct Installable { - virtual ~Installable() { } + virtual ~Installable() {} /** * What Installable is this? @@ -168,37 +168,19 @@ struct Installable BuildMode bMode = bmNormal); static std::set toStorePathSet( - ref evalStore, - ref store, - Realise mode, - OperateOn operateOn, - const Installables & installables); + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables); static std::vector toStorePaths( - ref evalStore, - ref store, - Realise mode, - OperateOn operateOn, - const Installables & installables); + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables); static StorePath toStorePath( - ref evalStore, - ref store, - Realise mode, - OperateOn operateOn, - ref installable); + ref evalStore, ref store, Realise mode, OperateOn operateOn, ref installable); - static std::set toDerivations( - ref store, - const Installables & installables, - bool useDeriver = false); + static std::set + toDerivations(ref store, const Installables & installables, bool useDeriver = false); static BuiltPaths toBuiltPaths( - ref evalStore, - ref store, - Realise mode, - OperateOn operateOn, - const Installables & installables); + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables); }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/legacy.hh b/src/libcmd/include/nix/cmd/legacy.hh index 0c375a7d2..546057184 100644 --- a/src/libcmd/include/nix/cmd/legacy.hh +++ b/src/libcmd/include/nix/cmd/legacy.hh @@ -7,13 +7,14 @@ namespace nix { -typedef std::function MainFunction; +typedef std::function MainFunction; struct RegisterLegacyCommand { typedef std::map Commands; - static Commands & commands() { + static Commands & commands() + { static Commands commands; return commands; } @@ -24,4 +25,4 @@ struct RegisterLegacyCommand } }; -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/markdown.hh b/src/libcmd/include/nix/cmd/markdown.hh index 66db1736c..95a59c2aa 100644 --- a/src/libcmd/include/nix/cmd/markdown.hh +++ b/src/libcmd/include/nix/cmd/markdown.hh @@ -14,4 +14,4 @@ namespace nix { */ std::string renderMarkdownToTerminal(std::string_view markdown); -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/misc-store-flags.hh b/src/libcmd/include/nix/cmd/misc-store-flags.hh index c9467ad8e..27e139076 100644 --- a/src/libcmd/include/nix/cmd/misc-store-flags.hh +++ b/src/libcmd/include/nix/cmd/misc-store-flags.hh @@ -4,18 +4,22 @@ namespace nix::flag { Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha); + static inline Args::Flag hashAlgo(HashAlgorithm * ha) { return hashAlgo("hash-algo", ha); } + Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha); Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf); Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf); + static inline Args::Flag hashAlgoOpt(std::optional * oha) { return hashAlgoOpt("hash-algo", oha); } + Args::Flag fileIngestionMethod(FileIngestionMethod * method); Args::Flag contentAddressMethod(ContentAddressMethod * method); -} +} // namespace nix::flag diff --git a/src/libcmd/include/nix/cmd/network-proxy.hh b/src/libcmd/include/nix/cmd/network-proxy.hh index 255597a61..f51b7dadb 100644 --- a/src/libcmd/include/nix/cmd/network-proxy.hh +++ b/src/libcmd/include/nix/cmd/network-proxy.hh @@ -19,4 +19,4 @@ extern const StringSet networkProxyVariables; */ bool haveNetworkProxyConnection(); -} +} // namespace nix diff --git a/src/libcmd/include/nix/cmd/repl-interacter.hh b/src/libcmd/include/nix/cmd/repl-interacter.hh index eb58563b2..89e854ad9 100644 --- a/src/libcmd/include/nix/cmd/repl-interacter.hh +++ b/src/libcmd/include/nix/cmd/repl-interacter.hh @@ -11,10 +11,11 @@ namespace nix { namespace detail { /** Provides the completion hooks for the repl, without exposing its complete * internals. */ -struct ReplCompleterMixin { +struct ReplCompleterMixin +{ virtual StringSet completePrefix(const std::string & prefix) = 0; }; -}; +}; // namespace detail enum class ReplPromptType { ReplPrompt, @@ -29,7 +30,7 @@ public: virtual Guard init(detail::ReplCompleterMixin * repl) = 0; /** Returns a boolean of whether the interacter got EOF */ virtual bool getLine(std::string & input, ReplPromptType promptType) = 0; - virtual ~ReplInteracter(){}; + virtual ~ReplInteracter() {}; }; class ReadlineLikeInteracter : public virtual ReplInteracter @@ -40,9 +41,10 @@ public: : historyFile(historyFile) { } + virtual Guard init(detail::ReplCompleterMixin * repl) override; virtual bool getLine(std::string & input, ReplPromptType promptType) override; virtual ~ReadlineLikeInteracter() override; }; -}; +}; // namespace nix diff --git a/src/libcmd/include/nix/cmd/repl.hh b/src/libcmd/include/nix/cmd/repl.hh index 83e39727f..a2c905f86 100644 --- a/src/libcmd/include/nix/cmd/repl.hh +++ b/src/libcmd/include/nix/cmd/repl.hh @@ -12,12 +12,12 @@ struct AbstractNixRepl AbstractNixRepl(ref state) : state(state) - { } + { + } - virtual ~AbstractNixRepl() - { } + virtual ~AbstractNixRepl() {} - typedef std::vector> AnnotatedValues; + typedef std::vector> AnnotatedValues; using RunNix = void(Path program, const Strings & args, const std::optional & input); @@ -33,13 +33,11 @@ struct AbstractNixRepl std::function getValues, RunNix * runNix = nullptr); - static ReplExitStatus runSimple( - ref evalState, - const ValMap & extraEnv); + static ReplExitStatus runSimple(ref evalState, const ValMap & extraEnv); virtual void initEnv() = 0; virtual ReplExitStatus mainLoop() = 0; }; -} +} // namespace nix diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc index 7783b4f40..28c3db3fc 100644 --- a/src/libcmd/installable-attr-path.cc +++ b/src/libcmd/installable-attr-path.cc @@ -35,7 +35,8 @@ InstallableAttrPath::InstallableAttrPath( , v(allocRootValue(v)) , attrPath(attrPath) , extendedOutputsSpec(std::move(extendedOutputsSpec)) -{ } +{ +} std::pair InstallableAttrPath::toValue(EvalState & state) { @@ -48,12 +49,9 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() { auto [v, pos] = toValue(*state); - if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths( - *v, - pos, - fmt("while evaluating the attribute '%s'", attrPath))) - { - return { *derivedPathWithInfo }; + if (std::optional derivedPathWithInfo = + trySinglePathToDerivedPaths(*v, pos, fmt("while evaluating the attribute '%s'", attrPath))) { + return {*derivedPathWithInfo}; } Bindings & autoArgs = *cmd.getAutoArgs(*state); @@ -70,19 +68,19 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() if (!drvPath) throw Error("'%s' is not a derivation", what()); - auto newOutputs = std::visit(overloaded { - [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { - StringSet outputsToInstall; - for (auto & output : packageInfo.queryOutputs(false, true)) - outputsToInstall.insert(output.first); - if (outputsToInstall.empty()) - outputsToInstall.insert("out"); - return OutputsSpec::Names { std::move(outputsToInstall) }; + auto newOutputs = std::visit( + overloaded{ + [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { + StringSet outputsToInstall; + for (auto & output : packageInfo.queryOutputs(false, true)) + outputsToInstall.insert(output.first); + if (outputsToInstall.empty()) + outputsToInstall.insert("out"); + return OutputsSpec::Names{std::move(outputsToInstall)}; + }, + [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; }, }, - [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { - return e; - }, - }, extendedOutputsSpec.raw); + extendedOutputsSpec.raw); auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs); @@ -93,11 +91,12 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() DerivedPathsWithInfo res; for (auto & [drvPath, outputs] : byDrvPath) res.push_back({ - .path = DerivedPath::Built { - .drvPath = makeConstantStorePathRef(drvPath), - .outputs = outputs, - }, - .info = make_ref(ExtraPathInfoValue::Value { + .path = + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(drvPath), + .outputs = outputs, + }, + .info = make_ref(ExtraPathInfoValue::Value{ .extendedOutputsSpec = outputs, /* FIXME: reconsider backwards compatibility above so we can fill in this info. */ @@ -115,10 +114,12 @@ InstallableAttrPath InstallableAttrPath::parse( ExtendedOutputsSpec extendedOutputsSpec) { return { - state, cmd, v, - prefix == "." ? "" : std::string { prefix }, + state, + cmd, + v, + prefix == "." ? "" : std::string{prefix}, std::move(extendedOutputsSpec), }; } -} +} // namespace nix diff --git a/src/libcmd/installable-derived-path.cc b/src/libcmd/installable-derived-path.cc index 5a92f81c7..929c663d1 100644 --- a/src/libcmd/installable-derived-path.cc +++ b/src/libcmd/installable-derived-path.cc @@ -21,35 +21,35 @@ std::optional InstallableDerivedPath::getStorePath() return derivedPath.getBaseStorePath(); } -InstallableDerivedPath InstallableDerivedPath::parse( - ref store, - std::string_view prefix, - ExtendedOutputsSpec extendedOutputsSpec) +InstallableDerivedPath +InstallableDerivedPath::parse(ref store, std::string_view prefix, ExtendedOutputsSpec extendedOutputsSpec) { - auto derivedPath = std::visit(overloaded { - // If the user did not use ^, we treat the output more - // liberally: we accept a symlink chain or an actual - // store path. - [&](const ExtendedOutputsSpec::Default &) -> DerivedPath { - auto storePath = store->followLinksToStorePath(prefix); - return DerivedPath::Opaque { - .path = std::move(storePath), - }; + auto derivedPath = std::visit( + overloaded{ + // If the user did not use ^, we treat the output more + // liberally: we accept a symlink chain or an actual + // store path. + [&](const ExtendedOutputsSpec::Default &) -> DerivedPath { + auto storePath = store->followLinksToStorePath(prefix); + return DerivedPath::Opaque{ + .path = std::move(storePath), + }; + }, + // If the user did use ^, we just do exactly what is written. + [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath { + auto drv = make_ref(SingleDerivedPath::parse(*store, prefix)); + drvRequireExperiment(*drv); + return DerivedPath::Built{ + .drvPath = std::move(drv), + .outputs = outputSpec, + }; + }, }, - // If the user did use ^, we just do exactly what is written. - [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath { - auto drv = make_ref(SingleDerivedPath::parse(*store, prefix)); - drvRequireExperiment(*drv); - return DerivedPath::Built { - .drvPath = std::move(drv), - .outputs = outputSpec, - }; - }, - }, extendedOutputsSpec.raw); - return InstallableDerivedPath { + extendedOutputsSpec.raw); + return InstallableDerivedPath{ store, std::move(derivedPath), }; } -} +} // namespace nix diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 85a4188a7..97f7eb645 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -28,8 +28,8 @@ namespace nix { std::vector InstallableFlake::getActualAttrPaths() { std::vector res; - if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){ - attrPaths.front().erase(0,1); + if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")) { + attrPaths.front().erase(0, 1); res.push_back(attrPaths.front()); return res; } @@ -47,8 +47,11 @@ static std::string showAttrPaths(const std::vector & paths) { std::string s; for (const auto & [n, i] : enumerate(paths)) { - if (n > 0) s += n + 1 == paths.size() ? " or " : ", "; - s += '\''; s += i; s += '\''; + if (n > 0) + s += n + 1 == paths.size() ? " or " : ", "; + s += '\''; + s += i; + s += '\''; } return s; } @@ -62,12 +65,12 @@ InstallableFlake::InstallableFlake( Strings attrPaths, Strings prefixes, const flake::LockFlags & lockFlags) - : InstallableValue(state), - flakeRef(flakeRef), - attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}), - prefixes(fragment == "" ? Strings{} : prefixes), - extendedOutputsSpec(std::move(extendedOutputsSpec)), - lockFlags(lockFlags) + : InstallableValue(state) + , flakeRef(flakeRef) + , attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}) + , prefixes(fragment == "" ? Strings{} : prefixes) + , extendedOutputsSpec(std::move(extendedOutputsSpec)) + , lockFlags(lockFlags) { if (cmd && cmd->getAutoArgs(*state)->size()) throw UsageError("'--arg' and '--argstr' are incompatible with flakes"); @@ -87,18 +90,14 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() auto v = attr->forceValue(); if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths( - v, - noPos, - fmt("while evaluating the flake output attribute '%s'", attrPath))) - { - return { *derivedPathWithInfo }; + v, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath))) { + return {*derivedPathWithInfo}; } else { throw Error( "expected flake output attribute '%s' to be a derivation or path but found %s: %s", attrPath, showType(v), - ValuePrinter(*this->state, v, errorPrintOptions) - ); + ValuePrinter(*this->state, v, errorPrintOptions)); } } @@ -113,39 +112,40 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() } return {{ - .path = DerivedPath::Built { - .drvPath = makeConstantStorePathRef(std::move(drvPath)), - .outputs = std::visit(overloaded { - [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { - StringSet outputsToInstall; - if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) { - if (aOutputSpecified->getBool()) { - if (auto aOutputName = attr->maybeGetAttr("outputName")) - outputsToInstall = { aOutputName->getString() }; - } - } else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) { - if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall")) - for (auto & s : aOutputsToInstall->getListOfStrings()) - outputsToInstall.insert(s); - } + .path = + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(std::move(drvPath)), + .outputs = std::visit( + overloaded{ + [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { + StringSet outputsToInstall; + if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) { + if (aOutputSpecified->getBool()) { + if (auto aOutputName = attr->maybeGetAttr("outputName")) + outputsToInstall = {aOutputName->getString()}; + } + } else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) { + if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall")) + for (auto & s : aOutputsToInstall->getListOfStrings()) + outputsToInstall.insert(s); + } - if (outputsToInstall.empty()) - outputsToInstall.insert("out"); + if (outputsToInstall.empty()) + outputsToInstall.insert("out"); - return OutputsSpec::Names { std::move(outputsToInstall) }; - }, - [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { - return e; - }, - }, extendedOutputsSpec.raw), - }, + return OutputsSpec::Names{std::move(outputsToInstall)}; + }, + [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; }, + }, + extendedOutputsSpec.raw), + }, .info = make_ref( - ExtraPathInfoValue::Value { + ExtraPathInfoValue::Value{ .priority = priority, .attrPath = attrPath, .extendedOutputsSpec = extendedOutputsSpec, }, - ExtraPathInfoFlake::Flake { + ExtraPathInfoFlake::Flake{ .originalRef = flakeRef, .lockedRef = getLockedFlake()->flake.lockedRef, }), @@ -157,8 +157,7 @@ std::pair InstallableFlake::toValue(EvalState & state) return {&getCursor(state)->forceValue(), noPos}; } -std::vector> -InstallableFlake::getCursors(EvalState & state) +std::vector> InstallableFlake::getCursors(EvalState & state) { auto evalCache = openEvalCache(state, getLockedFlake()); @@ -181,11 +180,7 @@ InstallableFlake::getCursors(EvalState & state) } if (res.size() == 0) - throw Error( - suggestions, - "flake '%s' does not provide attribute %s", - flakeRef, - showAttrPaths(attrPaths)); + throw Error(suggestions, "flake '%s' does not provide attribute %s", flakeRef, showAttrPaths(attrPaths)); return res; } @@ -196,8 +191,8 @@ std::shared_ptr InstallableFlake::getLockedFlake() const flake::LockFlags lockFlagsApplyConfig = lockFlags; // FIXME why this side effect? lockFlagsApplyConfig.applyNixConfig = true; - _lockedFlake = std::make_shared(lockFlake( - flakeSettings, *state, flakeRef, lockFlagsApplyConfig)); + _lockedFlake = + std::make_shared(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig)); } return _lockedFlake; } @@ -216,4 +211,4 @@ FlakeRef InstallableFlake::nixpkgsFlakeRef() const return defaultNixpkgsFlakeRef(); } -} +} // namespace nix diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index e92496347..3a167af3d 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -4,17 +4,14 @@ namespace nix { -std::vector> -InstallableValue::getCursors(EvalState & state) +std::vector> InstallableValue::getCursors(EvalState & state) { auto evalCache = - std::make_shared(std::nullopt, state, - [&]() { return toValue(state).first; }); + std::make_shared(std::nullopt, state, [&]() { return toValue(state).first; }); return {evalCache->getRoot()}; } -ref -InstallableValue::getCursor(EvalState & state) +ref InstallableValue::getCursor(EvalState & state) { /* Although getCursors should return at least one element, in case it doesn't, bound check to avoid an undefined behavior for vector[0] */ @@ -39,30 +36,32 @@ ref InstallableValue::require(ref installable) auto castedInstallable = installable.dynamic_pointer_cast(); if (!castedInstallable) throw nonValueInstallable(*installable); - return ref { castedInstallable }; + return ref{castedInstallable}; } -std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) +std::optional +InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) { if (v.type() == nPath) { auto storePath = fetchToStore(state->fetchSettings, *state->store, v.path(), FetchMode::Copy); return {{ - .path = DerivedPath::Opaque { - .path = std::move(storePath), - }, + .path = + DerivedPath::Opaque{ + .path = std::move(storePath), + }, .info = make_ref(), }}; } else if (v.type() == nString) { return {{ - .path = DerivedPath::fromSingle( - state->coerceToSingleDerivedPath(pos, v, errorCtx)), + .path = DerivedPath::fromSingle(state->coerceToSingleDerivedPath(pos, v, errorCtx)), .info = make_ref(), }}; } - else return std::nullopt; + else + return std::nullopt; } -} +} // namespace nix diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 49ffd82e1..0e6a204a7 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -61,7 +61,8 @@ MixFlakeOptions::MixFlakeOptions() .category = category, .handler = {[&]() { lockFlags.recreateLockFile = true; - warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead."); + warn( + "'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead."); }}, }); @@ -158,9 +159,7 @@ MixFlakeOptions::MixFlakeOptions() .description = "Write the given lock file instead of `flake.lock` within the top-level flake.", .category = category, .labels = {"flake-lock-path"}, - .handler = {[&](std::string lockFilePath) { - lockFlags.outputLockFilePath = lockFilePath; - }}, + .handler = {[&](std::string lockFilePath) { lockFlags.outputLockFilePath = lockFilePath; }}, .completer = completePath, }); @@ -175,12 +174,12 @@ MixFlakeOptions::MixFlakeOptions() flakeSettings, *evalState, parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir())), - { .writeLockFile = false }); + {.writeLockFile = false}); for (auto & [inputName, input] : flake.lockFile.root->inputs) { auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes if (auto input3 = std::dynamic_pointer_cast(input2)) { overrideRegistry( - fetchers::Input::fromAttrs(fetchSettings, {{"type","indirect"}, {"id", inputName}}), + fetchers::Input::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", inputName}}), input3->lockedRef.input, {}); } @@ -209,7 +208,8 @@ SourceExprCommand::SourceExprCommand() addFlag({ .longName = "expr", - .description = "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.", + .description = + "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.", .category = installablesCategory, .labels = {"expr"}, .handler = {&expr}, @@ -220,32 +220,26 @@ MixReadOnlyOption::MixReadOnlyOption() { addFlag({ .longName = "read-only", - .description = - "Do not instantiate each evaluated derivation. " - "This improves performance, but can cause errors when accessing " - "store paths of derivations during evaluation.", + .description = "Do not instantiate each evaluated derivation. " + "This improves performance, but can cause errors when accessing " + "store paths of derivations during evaluation.", .handler = {&settings.readOnlyMode, true}, }); } Strings SourceExprCommand::getDefaultFlakeAttrPaths() { - return { - "packages." + settings.thisSystem.get() + ".default", - "defaultPackage." + settings.thisSystem.get() - }; + return {"packages." + settings.thisSystem.get() + ".default", "defaultPackage." + settings.thisSystem.get()}; } Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes() { - return { - // As a convenience, look for the attribute in - // 'outputs.packages'. - "packages." + settings.thisSystem.get() + ".", - // As a temporary hack until Nixpkgs is properly converted - // to provide a clean 'packages' set, look in 'legacyPackages'. - "legacyPackages." + settings.thisSystem.get() + "." - }; + return {// As a convenience, look for the attribute in + // 'outputs.packages'. + "packages." + settings.thisSystem.get() + ".", + // As a temporary hack until Nixpkgs is properly converted + // to provide a clean 'packages' set, look in 'legacyPackages'. + "legacyPackages." + settings.thisSystem.get() + "."}; } Args::CompleterClosure SourceExprCommand::getCompleteInstallable() @@ -263,10 +257,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s evalSettings.pureEval = false; auto state = getEvalState(); - auto e = - state->parseExprFromFile( - resolveExprPath( - lookupFileArg(*state, *file))); + auto e = state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, *file))); Value root; state->eval(e, root); @@ -285,7 +276,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s } auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root); - Value &v1(*v); + Value & v1(*v); state->forceValue(v1, pos); Value v2; state->autoCallFunction(*autoArgs, v1, v2); @@ -310,7 +301,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s getDefaultFlakeAttrPaths(), prefix); } - } catch (EvalError&) { + } catch (EvalError &) { // Don't want eval errors to mess-up with the completion engine, so let's just swallow them } } @@ -334,22 +325,23 @@ void completeFlakeRefWithFragment( auto fragment = prefix.substr(hash + 1); std::string prefixRoot = ""; - if (fragment.starts_with(".")){ + if (fragment.starts_with(".")) { fragment = fragment.substr(1); prefixRoot = "."; } auto flakeRefS = std::string(prefix.substr(0, hash)); // TODO: ideally this would use the command base directory instead of assuming ".". - auto flakeRef = parseFlakeRef(fetchSettings, expandTilde(flakeRefS), std::filesystem::current_path().string()); + auto flakeRef = + parseFlakeRef(fetchSettings, expandTilde(flakeRefS), std::filesystem::current_path().string()); - auto evalCache = openEvalCache(*evalState, - std::make_shared(lockFlake( - flakeSettings, *evalState, flakeRef, lockFlags))); + auto evalCache = openEvalCache( + *evalState, + std::make_shared(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags))); auto root = evalCache->getRoot(); - if (prefixRoot == "."){ + if (prefixRoot == ".") { attrPathPrefixes.clear(); } /* Complete 'fragment' relative to all the @@ -369,7 +361,8 @@ void completeFlakeRefWithFragment( } auto attr = root->findAlongAttrPath(attrPath); - if (!attr) continue; + if (!attr) + continue; for (auto & attr2 : (*attr)->getAttrs()) { if (hasPrefix(evalState->symbols[attr2], lastAttr)) { @@ -377,7 +370,9 @@ void completeFlakeRefWithFragment( /* Strip the attrpath prefix. */ attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size()); // FIXME: handle names with dots - completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); + completions.add( + flakeRefS + "#" + prefixRoot + + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); } } } @@ -387,7 +382,8 @@ void completeFlakeRefWithFragment( if (fragment.empty()) { for (auto & attrPath : defaultFlakeAttrPaths) { auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath)); - if (!attr) continue; + if (!attr) + continue; completions.add(flakeRefS + "#" + prefixRoot); } } @@ -427,14 +423,12 @@ DerivedPathWithInfo Installable::toDerivedPath() { auto buildables = toDerivedPaths(); if (buildables.size() != 1) - throw Error("installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size()); + throw Error( + "installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size()); return std::move(buildables[0]); } -static StorePath getDeriver( - ref store, - const Installable & i, - const StorePath & drvPath) +static StorePath getDeriver(ref store, const Installable & i, const StorePath & drvPath) { auto derivers = store->queryValidDerivers(drvPath); if (derivers.empty()) @@ -443,35 +437,35 @@ static StorePath getDeriver( return *derivers.begin(); } -ref openEvalCache( - EvalState & state, - std::shared_ptr lockedFlake) +ref openEvalCache(EvalState & state, std::shared_ptr lockedFlake) { auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval - ? lockedFlake->getFingerprint(state.store, state.fetchSettings) - : std::nullopt; - auto rootLoader = [&state, lockedFlake]() - { - /* For testing whether the evaluation cache is - complete. */ - if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") - throw Error("not everything is cached, but evaluation is not allowed"); + ? lockedFlake->getFingerprint(state.store, state.fetchSettings) + : std::nullopt; + auto rootLoader = [&state, lockedFlake]() { + /* For testing whether the evaluation cache is + complete. */ + if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0") + throw Error("not everything is cached, but evaluation is not allowed"); - auto vFlake = state.allocValue(); - flake::callFlake(state, *lockedFlake, *vFlake); + auto vFlake = state.allocValue(); + flake::callFlake(state, *lockedFlake, *vFlake); - state.forceAttrs(*vFlake, noPos, "while parsing cached flake data"); + state.forceAttrs(*vFlake, noPos, "while parsing cached flake data"); - auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); - assert(aOutputs); + auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs")); + assert(aOutputs); - return aOutputs->value; - }; + return aOutputs->value; + }; if (fingerprint) { auto search = state.evalCaches.find(fingerprint.value()); if (search == state.evalCaches.end()) { - search = state.evalCaches.emplace(fingerprint.value(), make_ref(fingerprint, state, rootLoader)).first; + search = + state.evalCaches + .emplace(fingerprint.value(), make_ref(fingerprint, state, rootLoader)) + .first; } return search->second; } else { @@ -479,8 +473,7 @@ ref openEvalCache( } } -Installables SourceExprCommand::parseInstallables( - ref store, std::vector ss) +Installables SourceExprCommand::parseInstallables(ref store, std::vector ss) { Installables result; @@ -501,12 +494,10 @@ Installables SourceExprCommand::parseInstallables( if (file == "-") { auto e = state->parseStdin(); state->eval(e, *vFile); - } - else if (file) { + } else if (file) { auto dir = absPath(getCommandBaseDir()); state->evalFile(lookupFileArg(*state, *file, &dir), *vFile); - } - else { + } else { Path dir = absPath(getCommandBaseDir()); auto e = state->parseExprFromString(*expr, state->rootPath(dir)); state->eval(e, *vFile); @@ -515,9 +506,8 @@ Installables SourceExprCommand::parseInstallables( for (auto & s : ss) { auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s); result.push_back( - make_ref( - InstallableAttrPath::parse( - state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec)))); + make_ref(InstallableAttrPath::parse( + state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec)))); } } else { @@ -532,8 +522,9 @@ Installables SourceExprCommand::parseInstallables( if (prefix.find('/') != std::string::npos) { try { - result.push_back(make_ref( - InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw))); + result.push_back( + make_ref( + InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw))); continue; } catch (BadStorePath &) { } catch (...) { @@ -543,9 +534,10 @@ Installables SourceExprCommand::parseInstallables( } try { - auto [flakeRef, fragment] = parseFlakeRefWithFragment( - fetchSettings, std::string { prefix }, absPath(getCommandBaseDir())); - result.push_back(make_ref( + auto [flakeRef, fragment] = + parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, absPath(getCommandBaseDir())); + result.push_back( + make_ref( this, getEvalState(), std::move(flakeRef), @@ -566,8 +558,7 @@ Installables SourceExprCommand::parseInstallables( return result; } -ref SourceExprCommand::parseInstallable( - ref store, const std::string & installable) +ref SourceExprCommand::parseInstallable(ref store, const std::string & installable) { auto installables = parseInstallables(store, {installable}); assert(installables.size() == 1); @@ -578,20 +569,18 @@ static SingleBuiltPath getBuiltPath(ref evalStore, ref store, cons { return std::visit( overloaded{ - [&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath { - return SingleBuiltPath::Opaque { bo.path }; - }, + [&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath { return SingleBuiltPath::Opaque{bo.path}; }, [&](const SingleDerivedPath::Built & bfd) -> SingleBuiltPath { auto drvPath = getBuiltPath(evalStore, store, *bfd.drvPath); // Resolving this instead of `bfd` will yield the same result, but avoid duplicative work. - SingleDerivedPath::Built truncatedBfd { + SingleDerivedPath::Built truncatedBfd{ .drvPath = makeConstantStorePathRef(drvPath.outPath()), .output = bfd.output, }; auto outputPath = resolveDerivedPath(*store, truncatedBfd, &*evalStore); - return SingleBuiltPath::Built { + return SingleBuiltPath::Built{ .drvPath = make_ref(std::move(drvPath)), - .output = { bfd.output, outputPath }, + .output = {bfd.output, outputPath}, }; }, }, @@ -599,11 +588,7 @@ static SingleBuiltPath getBuiltPath(ref evalStore, ref store, cons } std::vector Installable::build( - ref evalStore, - ref store, - Realise mode, - const Installables & installables, - BuildMode bMode) + ref evalStore, ref store, Realise mode, const Installables & installables, BuildMode bMode) { std::vector res; for (auto & [_, builtPathWithResult] : build2(evalStore, store, mode, installables, bMode)) @@ -611,9 +596,7 @@ std::vector Installable::build( return res; } -static void throwBuildErrors( - std::vector & buildResults, - const Store & store) +static void throwBuildErrors(std::vector & buildResults, const Store & store) { std::vector failed; for (auto & buildResult : buildResults) { @@ -630,10 +613,11 @@ static void throwBuildErrors( StringSet failedPaths; for (; failedResult != failed.end(); failedResult++) { if (!failedResult->errorMsg.empty()) { - logError(ErrorInfo{ - .level = lvlError, - .msg = failedResult->errorMsg, - }); + logError( + ErrorInfo{ + .level = lvlError, + .msg = failedResult->errorMsg, + }); } failedPaths.insert(failedResult->path.to_string(store)); } @@ -643,11 +627,7 @@ static void throwBuildErrors( } std::vector, BuiltPathWithResult>> Installable::build2( - ref evalStore, - ref store, - Realise mode, - const Installables & installables, - BuildMode bMode) + ref evalStore, ref store, Realise mode, const Installables & installables, BuildMode bMode) { if (mode == Realise::Nothing) settings.readOnlyMode = true; @@ -678,22 +658,25 @@ std::vector, BuiltPathWithResult>> Installable::build for (auto & path : pathsToBuild) { for (auto & aux : backmap[path]) { - std::visit(overloaded { - [&](const DerivedPath::Built & bfd) { - auto outputs = resolveDerivedPath(*store, bfd, &*evalStore); - res.push_back({aux.installable, { - .path = BuiltPath::Built { - .drvPath = make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), - .outputs = outputs, - }, - .info = aux.info}}); + std::visit( + overloaded{ + [&](const DerivedPath::Built & bfd) { + auto outputs = resolveDerivedPath(*store, bfd, &*evalStore); + res.push_back( + {aux.installable, + {.path = + BuiltPath::Built{ + .drvPath = + make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), + .outputs = outputs, + }, + .info = aux.info}}); + }, + [&](const DerivedPath::Opaque & bo) { + res.push_back({aux.installable, {.path = BuiltPath::Opaque{bo.path}, .info = aux.info}}); + }, }, - [&](const DerivedPath::Opaque & bo) { - res.push_back({aux.installable, { - .path = BuiltPath::Opaque { bo.path }, - .info = aux.info}}); - }, - }, path.raw()); + path.raw()); } } @@ -707,26 +690,30 @@ std::vector, BuiltPathWithResult>> Installable::build throwBuildErrors(buildResults, *store); for (auto & buildResult : buildResults) { for (auto & aux : backmap[buildResult.path]) { - std::visit(overloaded { - [&](const DerivedPath::Built & bfd) { - std::map outputs; - for (auto & [outputName, realisation] : buildResult.builtOutputs) - outputs.emplace(outputName, realisation.outPath); - res.push_back({aux.installable, { - .path = BuiltPath::Built { - .drvPath = make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), - .outputs = outputs, - }, - .info = aux.info, - .result = buildResult}}); + std::visit( + overloaded{ + [&](const DerivedPath::Built & bfd) { + std::map outputs; + for (auto & [outputName, realisation] : buildResult.builtOutputs) + outputs.emplace(outputName, realisation.outPath); + res.push_back( + {aux.installable, + {.path = + BuiltPath::Built{ + .drvPath = + make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), + .outputs = outputs, + }, + .info = aux.info, + .result = buildResult}}); + }, + [&](const DerivedPath::Opaque & bo) { + res.push_back( + {aux.installable, + {.path = BuiltPath::Opaque{bo.path}, .info = aux.info, .result = buildResult}}); + }, }, - [&](const DerivedPath::Opaque & bo) { - res.push_back({aux.installable, { - .path = BuiltPath::Opaque { bo.path }, - .info = aux.info, - .result = buildResult}}); - }, - }, buildResult.path.raw()); + buildResult.path.raw()); } } @@ -741,11 +728,7 @@ std::vector, BuiltPathWithResult>> Installable::build } BuiltPaths Installable::toBuiltPaths( - ref evalStore, - ref store, - Realise mode, - OperateOn operateOn, - const Installables & installables) + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables) { if (operateOn == OperateOn::Output) { BuiltPaths res; @@ -764,10 +747,7 @@ BuiltPaths Installable::toBuiltPaths( } StorePathSet Installable::toStorePathSet( - ref evalStore, - ref store, - Realise mode, OperateOn operateOn, - const Installables & installables) + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables) { StorePathSet outPaths; for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) { @@ -778,10 +758,7 @@ StorePathSet Installable::toStorePathSet( } StorePaths Installable::toStorePaths( - ref evalStore, - ref store, - Realise mode, OperateOn operateOn, - const Installables & installables) + ref evalStore, ref store, Realise mode, OperateOn operateOn, const Installables & installables) { StorePaths outPaths; for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) { @@ -792,10 +769,7 @@ StorePaths Installable::toStorePaths( } StorePath Installable::toStorePath( - ref evalStore, - ref store, - Realise mode, OperateOn operateOn, - ref installable) + ref evalStore, ref store, Realise mode, OperateOn operateOn, ref installable) { auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable}); @@ -805,28 +779,23 @@ StorePath Installable::toStorePath( return *paths.begin(); } -StorePathSet Installable::toDerivations( - ref store, - const Installables & installables, - bool useDeriver) +StorePathSet Installable::toDerivations(ref store, const Installables & installables, bool useDeriver) { StorePathSet drvPaths; for (const auto & i : installables) for (const auto & b : i->toDerivedPaths()) - std::visit(overloaded { - [&](const DerivedPath::Opaque & bo) { - drvPaths.insert( - bo.path.isDerivation() - ? bo.path - : useDeriver - ? getDeriver(store, *i, bo.path) - : throw Error("argument '%s' did not evaluate to a derivation", i->what())); + std::visit( + overloaded{ + [&](const DerivedPath::Opaque & bo) { + drvPaths.insert( + bo.path.isDerivation() ? bo.path + : useDeriver ? getDeriver(store, *i, bo.path) + : throw Error("argument '%s' did not evaluate to a derivation", i->what())); + }, + [&](const DerivedPath::Built & bfd) { drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath)); }, }, - [&](const DerivedPath::Built & bfd) { - drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath)); - }, - }, b.path.raw()); + b.path.raw()); return drvPaths; } @@ -861,10 +830,7 @@ std::vector RawInstallablesCommand::getFlakeRefsForCompletion() std::vector res; res.reserve(rawInstallables.size()); for (const auto & i : rawInstallables) - res.push_back(parseFlakeRefWithFragment( - fetchSettings, - expandTilde(i), - absPath(getCommandBaseDir())).first); + res.push_back(parseFlakeRefWithFragment(fetchSettings, expandTilde(i), absPath(getCommandBaseDir())).first); return res; } @@ -883,12 +849,7 @@ void RawInstallablesCommand::run(ref store) std::vector InstallableCommand::getFlakeRefsForCompletion() { - return { - parseFlakeRefWithFragment( - fetchSettings, - expandTilde(_installable), - absPath(getCommandBaseDir())).first - }; + return {parseFlakeRefWithFragment(fetchSettings, expandTilde(_installable), absPath(getCommandBaseDir())).first}; } void InstallablesCommand::run(ref store, std::vector && rawInstallables) @@ -928,4 +889,4 @@ BuiltPaths toBuiltPaths(const std::vector & builtPathsWithR return res; } -} +} // namespace nix diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 41da73c7a..09cd9c1fb 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -18,25 +18,24 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown) { int windowWidth = getWindowSize().second; -#if HAVE_LOWDOWN_1_4 - struct lowdown_opts_term opts_term { +# if HAVE_LOWDOWN_1_4 + struct lowdown_opts_term opts_term{ .cols = (size_t) std::max(windowWidth - 5, 60), .hmargin = 0, .vmargin = 0, }; -#endif - struct lowdown_opts opts - { +# endif + struct lowdown_opts opts{ .type = LOWDOWN_TERM, -#if HAVE_LOWDOWN_1_4 +# if HAVE_LOWDOWN_1_4 .term = opts_term, -#endif +# endif .maxdepth = 20, -#if !HAVE_LOWDOWN_1_4 +# if !HAVE_LOWDOWN_1_4 .cols = (size_t) std::max(windowWidth - 5, 60), .hmargin = 0, .vmargin = 0, -#endif +# endif .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES, .oflags = LOWDOWN_TERM_NOLINK, }; diff --git a/src/libcmd/misc-store-flags.cc b/src/libcmd/misc-store-flags.cc index a57ad35ff..fd2211813 100644 --- a/src/libcmd/misc-store-flags.cc +++ b/src/libcmd/misc-store-flags.cc @@ -1,7 +1,6 @@ #include "nix/cmd/misc-store-flags.hh" -namespace nix::flag -{ +namespace nix::flag { static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { @@ -15,27 +14,23 @@ static void hashFormatCompleter(AddCompletions & completions, size_t index, std: Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf) { assert(*hf == nix::HashFormat::SRI); - return Args::Flag { - .longName = std::move(longName), - .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.", - .labels = {"hash-format"}, - .handler = {[hf](std::string s) { - *hf = parseHashFormat(s); - }}, - .completer = hashFormatCompleter, + return Args::Flag{ + .longName = std::move(longName), + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.", + .labels = {"hash-format"}, + .handler = {[hf](std::string s) { *hf = parseHashFormat(s); }}, + .completer = hashFormatCompleter, }; } Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf) { - return Args::Flag { - .longName = std::move(longName), - .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).", - .labels = {"hash-format"}, - .handler = {[ohf](std::string s) { - *ohf = std::optional{parseHashFormat(s)}; - }}, - .completer = hashFormatCompleter, + return Args::Flag{ + .longName = std::move(longName), + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).", + .labels = {"hash-format"}, + .handler = {[ohf](std::string s) { *ohf = std::optional{parseHashFormat(s)}; }}, + .completer = hashFormatCompleter, }; } @@ -48,34 +43,31 @@ static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::s Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha) { - return Args::Flag { - .longName = std::move(longName), - .description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`).", - .labels = {"hash-algo"}, - .handler = {[ha](std::string s) { - *ha = parseHashAlgo(s); - }}, - .completer = hashAlgoCompleter, + return Args::Flag{ + .longName = std::move(longName), + .description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`).", + .labels = {"hash-algo"}, + .handler = {[ha](std::string s) { *ha = parseHashAlgo(s); }}, + .completer = hashAlgoCompleter, }; } Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha) { - return Args::Flag { - .longName = std::move(longName), - .description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.", - .labels = {"hash-algo"}, - .handler = {[oha](std::string s) { - *oha = std::optional{parseHashAlgo(s)}; - }}, - .completer = hashAlgoCompleter, + return Args::Flag{ + .longName = std::move(longName), + .description = + "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.", + .labels = {"hash-algo"}, + .handler = {[oha](std::string s) { *oha = std::optional{parseHashAlgo(s)}; }}, + .completer = hashAlgoCompleter, }; } Args::Flag fileIngestionMethod(FileIngestionMethod * method) { - return Args::Flag { - .longName = "mode", + return Args::Flag{ + .longName = "mode", // FIXME indentation carefully made for context, this is messed up. .description = R"( How to compute the hash of the input. @@ -92,16 +84,14 @@ Args::Flag fileIngestionMethod(FileIngestionMethod * method) it to the hash function. )", .labels = {"file-ingestion-method"}, - .handler = {[method](std::string s) { - *method = parseFileIngestionMethod(s); - }}, + .handler = {[method](std::string s) { *method = parseFileIngestionMethod(s); }}, }; } Args::Flag contentAddressMethod(ContentAddressMethod * method) { - return Args::Flag { - .longName = "mode", + return Args::Flag{ + .longName = "mode", // FIXME indentation carefully made for context, this is messed up. .description = R"( How to compute the content-address of the store object. @@ -126,10 +116,8 @@ Args::Flag contentAddressMethod(ContentAddressMethod * method) for regular usage prefer `nar` and `flat`. )", .labels = {"content-address-method"}, - .handler = {[method](std::string s) { - *method = ContentAddressMethod::parse(s); - }}, + .handler = {[method](std::string s) { *method = ContentAddressMethod::parse(s); }}, }; } -} +} // namespace nix::flag diff --git a/src/libcmd/network-proxy.cc b/src/libcmd/network-proxy.cc index a4a89685c..6c9f2b073 100644 --- a/src/libcmd/network-proxy.cc +++ b/src/libcmd/network-proxy.cc @@ -47,4 +47,4 @@ bool haveNetworkProxyConnection() return false; } -} +} // namespace nix diff --git a/src/libcmd/repl-interacter.cc b/src/libcmd/repl-interacter.cc index 4de335dd5..c9b435675 100644 --- a/src/libcmd/repl-interacter.cc +++ b/src/libcmd/repl-interacter.cc @@ -5,8 +5,8 @@ #include #if USE_READLINE -#include -#include +# include +# include #else // editline < 1.15.2 don't wrap their API for C++ usage // (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461). @@ -14,7 +14,7 @@ // For compatibility with these versions, we wrap the API here // (wrapping multiple times on newer versions is no problem). extern "C" { -#include +# include } #endif @@ -35,7 +35,7 @@ void sigintHandler(int signo) { g_signal_received = signo; } -}; +}; // namespace static detail::ReplCompleterMixin * curRepl; // ugly @@ -185,8 +185,7 @@ bool ReadlineLikeInteracter::getLine(std::string & input, ReplPromptType promptT // editline doesn't echo the input to the output when non-interactive, unlike readline // this results in a different behavior when running tests. The echoing is // quite useful for reading the test output, so we add it here. - if (auto e = getEnv("_NIX_TEST_REPL_ECHO"); s && e && *e == "1") - { + if (auto e = getEnv("_NIX_TEST_REPL_ECHO"); s && e && *e == "1") { #if !USE_READLINE // This is probably not right for multi-line input, but we don't use that // in the characterisation tests, so it's fine. @@ -207,4 +206,4 @@ ReadlineLikeInteracter::~ReadlineLikeInteracter() write_history(historyFile.c_str()); } -}; +}; // namespace nix diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 8170bd579..ea3f44a7c 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -54,10 +54,7 @@ enum class ProcessLineResult { PromptAgain, }; -struct NixRepl - : AbstractNixRepl - , detail::ReplCompleterMixin - , gc +struct NixRepl : AbstractNixRepl, detail::ReplCompleterMixin, gc { size_t debugTraceIndex; @@ -80,8 +77,12 @@ struct NixRepl std::unique_ptr interacter; - NixRepl(const LookupPath & lookupPath, nix::ref store,ref state, - std::function getValues, RunNix * runNix); + NixRepl( + const LookupPath & lookupPath, + nix::ref store, + ref state, + std::function getValues, + RunNix * runNix); virtual ~NixRepl() = default; ReplExitStatus mainLoop() override; @@ -103,20 +104,22 @@ struct NixRepl void evalString(std::string s, Value & v); void loadDebugTraceEnv(DebugTrace & dt); - void printValue(std::ostream & str, - Value & v, - unsigned int maxDepth = std::numeric_limits::max()) + void printValue(std::ostream & str, Value & v, unsigned int maxDepth = std::numeric_limits::max()) { // Hide the progress bar during printing because it might interfere auto suspension = logger->suspend(); - ::nix::printValue(*state, str, v, PrintOptions { - .ansiColors = true, - .force = true, - .derivationPaths = true, - .maxDepth = maxDepth, - .prettyIndent = 2, - .errors = ErrorPrintBehavior::ThrowTopLevel, - }); + ::nix::printValue( + *state, + str, + v, + PrintOptions{ + .ansiColors = true, + .force = true, + .derivationPaths = true, + .maxDepth = maxDepth, + .prettyIndent = 2, + .errors = ErrorPrintBehavior::ThrowTopLevel, + }); } }; @@ -124,13 +127,17 @@ std::string removeWhitespace(std::string s) { s = chomp(s); size_t n = s.find_first_not_of(" \n\r\t"); - if (n != std::string::npos) s = std::string(s, n); + if (n != std::string::npos) + s = std::string(s, n); return s; } - -NixRepl::NixRepl(const LookupPath & lookupPath, nix::ref store, ref state, - std::function getValues, RunNix * runNix) +NixRepl::NixRepl( + const LookupPath & lookupPath, + nix::ref store, + ref state, + std::function getValues, + RunNix * runNix) : AbstractNixRepl(state) , debugTraceIndex(0) , getValues(getValues) @@ -188,7 +195,8 @@ ReplExitStatus NixRepl::mainLoop() auto suspension = logger->suspend(); // When continuing input from previous lines, don't print a prompt, just align to the same // number of chars as the prompt. - if (!interacter->getLine(input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) { + if (!interacter->getLine( + input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) { // Ctrl-D should exit the debugger. state->debugStop = false; logger->cout(""); @@ -200,14 +208,14 @@ ReplExitStatus NixRepl::mainLoop() } try { switch (processLine(input)) { - case ProcessLineResult::Quit: - return ReplExitStatus::QuitAll; - case ProcessLineResult::Continue: - return ReplExitStatus::Continue; - case ProcessLineResult::PromptAgain: - break; - default: - unreachable(); + case ProcessLineResult::Quit: + return ReplExitStatus::QuitAll; + case ProcessLineResult::Continue: + return ReplExitStatus::Continue; + case ProcessLineResult::PromptAgain: + break; + default: + unreachable(); } } catch (IncompleteReplExpr &) { continue; @@ -256,7 +264,8 @@ StringSet NixRepl::completePrefix(const std::string & prefix) /* This is a variable name; look it up in the current scope. */ StringSet::iterator i = varNames.lower_bound(cur); while (i != varNames.end()) { - if (i->substr(0, cur.size()) != cur) break; + if (i->substr(0, cur.size()) != cur) + break; completions.insert(prev + *i); i++; } @@ -275,11 +284,15 @@ StringSet NixRepl::completePrefix(const std::string & prefix) Expr * e = parseString(expr); Value v; e->eval(*state, *env, v); - state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)"); + state->forceAttrs( + v, + noPos, + "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)"); for (auto & i : *v.attrs()) { std::string_view name = state->symbols[i.name]; - if (name.substr(0, cur2.size()) != cur2) continue; + if (name.substr(0, cur2.size()) != cur2) + continue; completions.insert(concatStrings(prev, expr, ".", name)); } @@ -297,24 +310,23 @@ StringSet NixRepl::completePrefix(const std::string & prefix) return completions; } - // FIXME: DRY and match or use the parser static bool isVarName(std::string_view s) { - if (s.size() == 0) return false; + if (s.size() == 0) + return false; char c = s[0]; - if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false; + if ((c >= '0' && c <= '9') || c == '-' || c == '\'') + return false; for (auto & i : s) - if (!((i >= 'a' && i <= 'z') || - (i >= 'A' && i <= 'Z') || - (i >= '0' && i <= '9') || - i == '_' || i == '-' || i == '\'')) + if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || (i >= '0' && i <= '9') || i == '_' || i == '-' + || i == '\'')) return false; return true; } - -StorePath NixRepl::getDerivationPath(Value & v) { +StorePath NixRepl::getDerivationPath(Value & v) +{ auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); @@ -353,53 +365,50 @@ ProcessLineResult NixRepl::processLine(std::string line) if (line[0] == ':') { size_t p = line.find_first_of(" \n\r\t"); command = line.substr(0, p); - if (p != std::string::npos) arg = removeWhitespace(line.substr(p)); + if (p != std::string::npos) + arg = removeWhitespace(line.substr(p)); } else { arg = line; } if (command == ":?" || command == ":help") { // FIXME: convert to Markdown, include in the 'nix repl' manpage. - std::cout - << "The following commands are available:\n" - << "\n" - << " Evaluate and print expression\n" - << " = Bind expression to variable\n" - << " :a, :add Add attributes from resulting set to scope\n" - << " :b Build a derivation\n" - << " :bl Build a derivation, creating GC roots in the\n" - << " working directory\n" - << " :e, :edit Open package or function in $EDITOR\n" - << " :i Build derivation, then install result into\n" - << " current profile\n" - << " :l, :load Load Nix expression and add it to scope\n" - << " :lf, :load-flake Load Nix flake and add it to scope\n" - << " :ll, :last-loaded Show most recently loaded variables added to scope\n" - << " :p, :print Evaluate and print expression recursively\n" - << " Strings are printed directly, without escaping.\n" - << " :q, :quit Exit nix-repl\n" - << " :r, :reload Reload all files\n" - << " :sh Build dependencies of derivation, then start\n" - << " nix-shell\n" - << " :t Describe result of evaluation\n" - << " :u Build derivation, then start nix-shell\n" - << " :doc Show documentation of a builtin function\n" - << " :log Show logs for a derivation\n" - << " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n" - << " errors\n" - << " :?, :help Brings up this help menu\n" - ; + std::cout << "The following commands are available:\n" + << "\n" + << " Evaluate and print expression\n" + << " = Bind expression to variable\n" + << " :a, :add Add attributes from resulting set to scope\n" + << " :b Build a derivation\n" + << " :bl Build a derivation, creating GC roots in the\n" + << " working directory\n" + << " :e, :edit Open package or function in $EDITOR\n" + << " :i Build derivation, then install result into\n" + << " current profile\n" + << " :l, :load Load Nix expression and add it to scope\n" + << " :lf, :load-flake Load Nix flake and add it to scope\n" + << " :ll, :last-loaded Show most recently loaded variables added to scope\n" + << " :p, :print Evaluate and print expression recursively\n" + << " Strings are printed directly, without escaping.\n" + << " :q, :quit Exit nix-repl\n" + << " :r, :reload Reload all files\n" + << " :sh Build dependencies of derivation, then start\n" + << " nix-shell\n" + << " :t Describe result of evaluation\n" + << " :u Build derivation, then start nix-shell\n" + << " :doc Show documentation of a builtin function\n" + << " :log Show logs for a derivation\n" + << " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n" + << " errors\n" + << " :?, :help Brings up this help menu\n"; if (state->debugRepl) { - std::cout - << "\n" - << " Debug mode commands\n" - << " :env Show env stack\n" - << " :bt, :backtrace Show trace stack\n" - << " :st Show current trace\n" - << " :st Change to another trace in the stack\n" - << " :c, :continue Go until end of program, exception, or builtins.break\n" - << " :s, :step Go one step\n" - ; + std::cout << "\n" + << " Debug mode commands\n" + << " :env Show env stack\n" + << " :bt, :backtrace Show trace stack\n" + << " :st Show current trace\n" + << " :st Change to another trace in the stack\n" + << " :c, :continue Go until end of program, exception, or builtins.break\n" + << " :s, :step Go one step\n"; } } @@ -424,17 +433,18 @@ ProcessLineResult NixRepl::processLine(std::string line) try { // change the DebugTrace index. debugTraceIndex = stoi(arg); - } catch (...) { } + } catch (...) { + } for (const auto & [idx, i] : enumerate(state->debugTraces)) { - if (idx == debugTraceIndex) { - std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": "; - showDebugTrace(std::cout, state->positions, i); - std::cout << std::endl; - printEnvBindings(*state, i.expr, i.env); - loadDebugTraceEnv(i); - break; - } + if (idx == debugTraceIndex) { + std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": "; + showDebugTrace(std::cout, state->positions, i); + std::cout << std::endl; + printEnvBindings(*state, i.expr, i.env); + loadDebugTraceEnv(i); + break; + } } } @@ -478,7 +488,7 @@ ProcessLineResult NixRepl::processLine(std::string line) Value v; evalString(arg, v); - const auto [path, line] = [&] () -> std::pair { + const auto [path, line] = [&]() -> std::pair { if (v.type() == nPath || v.type() == nString) { NixStringContext context; auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit"); @@ -502,7 +512,7 @@ ProcessLineResult NixRepl::processLine(std::string line) // runProgram redirects stdout to a StringSink, // using runProgram2 to allow editors to display their UI - runProgram2(RunOptions { .program = editor, .lookupPath = true, .args = args , .isInteractive = true }); + runProgram2(RunOptions{.program = editor, .lookupPath = true, .args = args, .isInteractive = true}); // Reload right after exiting the editor state->resetFileCache(); @@ -533,9 +543,9 @@ ProcessLineResult NixRepl::processLine(std::string line) if (command == ":b" || command == ":bl") { state->store->buildPaths({ - DerivedPath::Built { + DerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }, }); auto drv = state->store->readDerivation(drvPath); @@ -554,9 +564,7 @@ ProcessLineResult NixRepl::processLine(std::string line) runNix("nix-env", {"-i", drvPathRaw}); } else if (command == ":log") { settings.readOnlyMode = true; - Finally roModeReset([&]() { - settings.readOnlyMode = false; - }); + Finally roModeReset([&]() { settings.readOnlyMode = false; }); auto subs = getDefaultSubstituters(); subs.push_front(state->store); @@ -579,7 +587,8 @@ ProcessLineResult NixRepl::processLine(std::string line) break; } } - if (!foundLog) throw Error("build log of '%s' is not available", drvPathRaw); + if (!foundLog) + throw Error("build log of '%s' is not available", drvPathRaw); } else { runNix("nix-shell", {drvPathRaw}); } @@ -642,9 +651,8 @@ ProcessLineResult NixRepl::processLine(std::string line) for (auto & arg : args) arg = "*" + arg + "*"; - markdown += - "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` " - + concatStringsSep(" ", args) + "\n\n"; + markdown += "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` " + concatStringsSep(" ", args) + + "\n\n"; } markdown += stripIndentation(doc->doc); @@ -685,11 +693,8 @@ ProcessLineResult NixRepl::processLine(std::string line) else { size_t p = line.find('='); std::string name; - if (p != std::string::npos && - p < line.size() && - line[p + 1] != '=' && - isVarName(name = removeWhitespace(line.substr(0, p)))) - { + if (p != std::string::npos && p < line.size() && line[p + 1] != '=' + && isVarName(name = removeWhitespace(line.substr(0, p)))) { Expr * e = parseString(line.substr(p + 1)); Value & v(*state->allocValue()); v.mkThunk(env, e); @@ -737,9 +742,13 @@ void NixRepl::loadFlake(const std::string & flakeRefS) Value v; - flake::callFlake(*state, - flake::lockFlake(flakeSettings, *state, flakeRef, - flake::LockFlags { + flake::callFlake( + *state, + flake::lockFlake( + flakeSettings, + *state, + flakeRef, + flake::LockFlags{ .updateLockFile = false, .useRegistries = !evalSettings.pureEval, .allowUnlocked = !evalSettings.pureEval, @@ -748,7 +757,6 @@ void NixRepl::loadFlake(const std::string & flakeRefS) addAttrsToScope(v); } - void NixRepl::initEnv() { env = &state->allocEnv(envSize); @@ -771,7 +779,6 @@ void NixRepl::showLastLoaded() } } - void NixRepl::reloadFilesAndFlakes() { initEnv(); @@ -780,7 +787,6 @@ void NixRepl::reloadFilesAndFlakes() loadFlakes(); } - void NixRepl::loadFiles() { Strings old = loadedFiles; @@ -797,7 +803,6 @@ void NixRepl::loadFiles() } } - void NixRepl::loadFlakes() { Strings old = loadedFlakes; @@ -809,10 +814,12 @@ void NixRepl::loadFlakes() } } - void NixRepl::addAttrsToScope(Value & attrs) { - state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); }, "while evaluating an attribute set to be merged in the global scope"); + state->forceAttrs( + attrs, + [&]() { return attrs.determinePos(noPos); }, + "while evaluating an attribute set to be merged in the global scope"); if (displ + attrs.attrs()->size() >= envSize) throw Error("environment full; cannot add more variables"); @@ -847,7 +854,6 @@ void NixRepl::addAttrsToScope(Value & attrs) notice("... and %1% more; view with :ll", attrs.attrs()->size() - max_print); } - void NixRepl::addVarToScope(const Symbol name, Value & v) { if (displ >= envSize) @@ -860,13 +866,11 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) varNames.emplace(state->symbols[name]); } - Expr * NixRepl::parseString(std::string s) { return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); } - void NixRepl::evalString(std::string s, Value & v) { Expr * e; @@ -884,46 +888,39 @@ void NixRepl::evalString(std::string s, Value & v) state->forceValue(v, v.determinePos(noPos)); } - void NixRepl::runNix(Path program, const Strings & args, const std::optional & input) { if (runNixPtr) (*runNixPtr)(program, args, input); else - throw Error("Cannot run '%s' because no method of calling the Nix CLI was provided. This is a configuration problem pertaining to how this program was built. See Nix 2.25 release notes", program); + throw Error( + "Cannot run '%s' because no method of calling the Nix CLI was provided. This is a configuration problem pertaining to how this program was built. See Nix 2.25 release notes", + program); } - std::unique_ptr AbstractNixRepl::create( - const LookupPath & lookupPath, nix::ref store, ref state, - std::function getValues, RunNix * runNix) + const LookupPath & lookupPath, + nix::ref store, + ref state, + std::function getValues, + RunNix * runNix) { - return std::make_unique( - lookupPath, - std::move(store), - state, - getValues, - runNix - ); + return std::make_unique(lookupPath, std::move(store), state, getValues, runNix); } - -ReplExitStatus AbstractNixRepl::runSimple( - ref evalState, - const ValMap & extraEnv) +ReplExitStatus AbstractNixRepl::runSimple(ref evalState, const ValMap & extraEnv) { - auto getValues = [&]()->NixRepl::AnnotatedValues{ + auto getValues = [&]() -> NixRepl::AnnotatedValues { NixRepl::AnnotatedValues values; return values; }; LookupPath lookupPath = {}; auto repl = std::make_unique( - lookupPath, - openStore(), - evalState, - getValues, - /*runNix=*/nullptr - ); + lookupPath, + openStore(), + evalState, + getValues, + /*runNix=*/nullptr); repl->initEnv(); @@ -934,4 +931,4 @@ ReplExitStatus AbstractNixRepl::runSimple( return repl->mainLoop(); } -} +} // namespace nix diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index efaebf0e7..02e901de9 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -31,13 +31,11 @@ * @param init Function that takes a T* and returns the initializer for T * @return Pointer to allocated and initialized object */ -template +template static T * unsafe_new_with_self(F && init) { // Allocate - void * p = ::operator new( - sizeof(T), - static_cast(alignof(T))); + void * p = ::operator new(sizeof(T), static_cast(alignof(T))); // Initialize with placement new return new (p) T(init(static_cast(p))); } @@ -86,12 +84,13 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, Value * fn, n NIXC_CATCH_ERRS } -nix_err nix_value_call_multi(nix_c_context * context, EvalState * state, nix_value * fn, size_t nargs, nix_value ** args, nix_value * value) +nix_err nix_value_call_multi( + nix_c_context * context, EvalState * state, nix_value * fn, size_t nargs, nix_value ** args, nix_value * value) { if (context) context->last_err_code = NIX_OK; try { - state->state.callFunction(fn->value, {(nix::Value * *) args, nargs}, value->value, nix::noPos); + state->state.callFunction(fn->value, {(nix::Value **) args, nargs}, value->value, nix::noPos); state->state.forceValue(value->value, nix::noPos); } NIXC_CATCH_ERRS @@ -152,7 +151,8 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil NIXC_CATCH_ERRS } -nix_err nix_eval_state_builder_set_lookup_path(nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath_c) +nix_err nix_eval_state_builder_set_lookup_path( + nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath_c) { if (context) context->last_err_code = NIX_OK; @@ -175,11 +175,7 @@ EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder return EvalState{ .fetchSettings = std::move(builder->fetchSettings), .settings = std::move(builder->settings), - .state = nix::EvalState( - builder->lookupPath, - builder->store, - self->fetchSettings, - self->settings), + .state = nix::EvalState(builder->lookupPath, builder->store, self->fetchSettings, self->settings), }; }); } @@ -195,11 +191,10 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c if (nix_eval_state_builder_load(context, builder) != NIX_OK) return nullptr; - if (nix_eval_state_builder_set_lookup_path(context, builder, lookupPath_c) - != NIX_OK) + if (nix_eval_state_builder_set_lookup_path(context, builder, lookupPath_c) != NIX_OK) return nullptr; - auto *state = nix_eval_state_build(context, builder); + auto * state = nix_eval_state_build(context, builder); nix_eval_state_builder_free(builder); return state; } @@ -265,20 +260,23 @@ nix_err nix_gc_incref(nix_c_context * context, const void *) context->last_err_code = NIX_OK; return NIX_OK; } + nix_err nix_gc_decref(nix_c_context * context, const void *) { if (context) context->last_err_code = NIX_OK; return NIX_OK; } + void nix_gc_now() {} #endif -nix_err nix_value_incref(nix_c_context * context, nix_value *x) +nix_err nix_value_incref(nix_c_context * context, nix_value * x) { return nix_gc_incref(context, (const void *) x); } -nix_err nix_value_decref(nix_c_context * context, nix_value *x) + +nix_err nix_value_decref(nix_c_context * context, nix_value * x) { return nix_gc_decref(context, (const void *) x); } diff --git a/src/libexpr-c/nix_api_external.cc b/src/libexpr-c/nix_api_external.cc index 04d2e52b5..ecb67cfb4 100644 --- a/src/libexpr-c/nix_api_external.cc +++ b/src/libexpr-c/nix_api_external.cc @@ -48,11 +48,13 @@ class NixCExternalValue : public nix::ExternalValueBase public: NixCExternalValue(NixCExternalValueDesc & desc, void * v) : desc(desc) - , v(v){}; + , v(v) {}; + void * get_ptr() { return v; } + /** * Print out the value */ @@ -155,11 +157,17 @@ public: } nix_string_context ctx{context}; desc.printValueAsXML( - v, (EvalState *) &state, strict, location, &doc, &ctx, &drvsSeen, + v, + (EvalState *) &state, + strict, + location, + &doc, + &ctx, + &drvsSeen, *reinterpret_cast(&pos)); } - virtual ~NixCExternalValue() override{}; + virtual ~NixCExternalValue() override {}; }; ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v) diff --git a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh index 48c96ae2c..4cf985e15 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh @@ -16,141 +16,159 @@ #include "nix/store/tests/libstore.hh" namespace nix { - class LibExprTest : public LibStoreTest { - public: - static void SetUpTestSuite() { - LibStoreTest::SetUpTestSuite(); - initGC(); - } - - protected: - LibExprTest() - : LibStoreTest() - , state({}, store, fetchSettings, evalSettings, nullptr) - { - evalSettings.nixPath = {}; - } - Value eval(std::string input, bool forceValue = true) { - Value v; - Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root)); - assert(e); - state.eval(e, v); - if (forceValue) - state.forceValue(v, noPos); - return v; - } - - Value * maybeThunk(std::string input, bool forceValue = true) { - Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root)); - assert(e); - return e->maybeThunk(state, state.baseEnv); - } - - Symbol createSymbol(const char * value) { - return state.symbols.create(value); - } - - bool readOnlyMode = true; - fetchers::Settings fetchSettings{}; - EvalSettings evalSettings{readOnlyMode}; - EvalState state; - }; - - MATCHER(IsListType, "") { - return arg != nList; +class LibExprTest : public LibStoreTest +{ +public: + static void SetUpTestSuite() + { + LibStoreTest::SetUpTestSuite(); + initGC(); } - MATCHER(IsList, "") { - return arg.type() == nList; +protected: + LibExprTest() + : LibStoreTest() + , state({}, store, fetchSettings, evalSettings, nullptr) + { + evalSettings.nixPath = {}; } - MATCHER(IsString, "") { - return arg.type() == nString; + Value eval(std::string input, bool forceValue = true) + { + Value v; + Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root)); + assert(e); + state.eval(e, v); + if (forceValue) + state.forceValue(v, noPos); + return v; } - MATCHER(IsNull, "") { - return arg.type() == nNull; + Value * maybeThunk(std::string input, bool forceValue = true) + { + Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root)); + assert(e); + return e->maybeThunk(state, state.baseEnv); } - MATCHER(IsThunk, "") { - return arg.type() == nThunk; + Symbol createSymbol(const char * value) + { + return state.symbols.create(value); } - MATCHER(IsAttrs, "") { - return arg.type() == nAttrs; - } + bool readOnlyMode = true; + fetchers::Settings fetchSettings{}; + EvalSettings evalSettings{readOnlyMode}; + EvalState state; +}; - MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) { - if (arg.type() != nString) { +MATCHER(IsListType, "") +{ + return arg != nList; +} + +MATCHER(IsList, "") +{ + return arg.type() == nList; +} + +MATCHER(IsString, "") +{ + return arg.type() == nString; +} + +MATCHER(IsNull, "") +{ + return arg.type() == nNull; +} + +MATCHER(IsThunk, "") +{ + return arg.type() == nThunk; +} + +MATCHER(IsAttrs, "") +{ + return arg.type() == nAttrs; +} + +MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) +{ + if (arg.type() != nString) { + return false; + } + return std::string_view(arg.c_str()) == s; +} + +MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) +{ + if (arg.type() != nInt) { + return false; + } + return arg.integer().value == v; +} + +MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) +{ + if (arg.type() != nFloat) { + return false; + } + return arg.fpoint() == v; +} + +MATCHER(IsTrue, "") +{ + if (arg.type() != nBool) { + return false; + } + return arg.boolean() == true; +} + +MATCHER(IsFalse, "") +{ + if (arg.type() != nBool) { + return false; + } + return arg.boolean() == false; +} + +MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) +{ + if (arg.type() != nPath) { + *result_listener << "Expected a path got " << arg.type(); + return false; + } else { + auto path = arg.path(); + if (path.path != CanonPath(p)) { + *result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path; return false; } - return std::string_view(arg.c_str()) == s; } + return true; +} - MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) { - if (arg.type() != nInt) { - return false; - } - return arg.integer().value == v; +MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) +{ + if (arg.type() != nList) { + *result_listener << "Expected list got " << arg.type(); + return false; + } else if (arg.listSize() != (size_t) n) { + *result_listener << "Expected as list of size " << n << " got " << arg.listSize(); + return false; } + return true; +} - MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) { - if (arg.type() != nFloat) { - return false; - } - return arg.fpoint() == v; +MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) +{ + if (arg.type() != nAttrs) { + *result_listener << "Expected set got " << arg.type(); + return false; + } else if (arg.attrs()->size() != (size_t) n) { + *result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs()->size(); + return false; } - - MATCHER(IsTrue, "") { - if (arg.type() != nBool) { - return false; - } - return arg.boolean() == true; - } - - MATCHER(IsFalse, "") { - if (arg.type() != nBool) { - return false; - } - return arg.boolean() == false; - } - - MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) { - if (arg.type() != nPath) { - *result_listener << "Expected a path got " << arg.type(); - return false; - } else { - auto path = arg.path(); - if (path.path != CanonPath(p)) { - *result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path; - return false; - } - } - return true; - } - - - MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) { - if (arg.type() != nList) { - *result_listener << "Expected list got " << arg.type(); - return false; - } else if (arg.listSize() != (size_t)n) { - *result_listener << "Expected as list of size " << n << " got " << arg.listSize(); - return false; - } - return true; - } - - MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) { - if (arg.type() != nAttrs) { - *result_listener << "Expected set got " << arg.type(); - return false; - } else if (arg.attrs()->size() != (size_t) n) { - *result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs()->size(); - return false; - } - return true; - } - + return true; +} } /* namespace nix */ diff --git a/src/libexpr-test-support/include/nix/expr/tests/nix_api_expr.hh b/src/libexpr-test-support/include/nix/expr/tests/nix_api_expr.hh index 3e5aec313..376761d76 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/nix_api_expr.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/nix_api_expr.hh @@ -18,6 +18,7 @@ protected: state = nix_state_create(nullptr, nullptr, store); value = nix_alloc_value(nullptr, state); } + ~nix_api_expr_test() { nix_gc_decref(nullptr, value); @@ -28,4 +29,4 @@ protected: nix_value * value; }; -} +} // namespace nixC diff --git a/src/libexpr-test-support/include/nix/expr/tests/value/context.hh b/src/libexpr-test-support/include/nix/expr/tests/value/context.hh index a6a851d3a..68a0b8dea 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/value/context.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/value/context.hh @@ -9,23 +9,27 @@ namespace rc { using namespace nix; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; -} +} // namespace rc diff --git a/src/libexpr-test-support/tests/value/context.cc b/src/libexpr-test-support/tests/value/context.cc index 51ff1b2ae..b24d83a5a 100644 --- a/src/libexpr-test-support/tests/value/context.cc +++ b/src/libexpr-test-support/tests/value/context.cc @@ -36,4 +36,4 @@ Gen Arbitrary::arbitrary() }); } -} +} // namespace rc diff --git a/src/libexpr-tests/derived-path.cc b/src/libexpr-tests/derived-path.cc index 9cc5d5371..a4bd29c1c 100644 --- a/src/libexpr-tests/derived-path.cc +++ b/src/libexpr-tests/derived-path.cc @@ -8,36 +8,30 @@ namespace nix { // Testing of trivial expressions -class DerivedPathExpressionTest : public LibExprTest {}; +class DerivedPathExpressionTest : public LibExprTest +{}; // FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is // no a real fixture. // // See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args -TEST_F(DerivedPathExpressionTest, force_init) -{ -} +TEST_F(DerivedPathExpressionTest, force_init) {} #ifndef COVERAGE -RC_GTEST_FIXTURE_PROP( - DerivedPathExpressionTest, - prop_opaque_path_round_trip, - (const SingleDerivedPath::Opaque & o)) +RC_GTEST_FIXTURE_PROP(DerivedPathExpressionTest, prop_opaque_path_round_trip, (const SingleDerivedPath::Opaque & o)) { auto * v = state.allocValue(); state.mkStorePathString(o.path, *v); auto d = state.coerceToSingleDerivedPath(noPos, *v, ""); - RC_ASSERT(SingleDerivedPath { o } == d); + RC_ASSERT(SingleDerivedPath{o} == d); } // TODO use DerivedPath::Built for parameter once it supports a single output // path only. RC_GTEST_FIXTURE_PROP( - DerivedPathExpressionTest, - prop_derived_path_built_placeholder_round_trip, - (const SingleDerivedPath::Built & b)) + DerivedPathExpressionTest, prop_derived_path_built_placeholder_round_trip, (const SingleDerivedPath::Built & b)) { /** * We set these in tests rather than the regular globals so we don't have @@ -49,7 +43,7 @@ RC_GTEST_FIXTURE_PROP( auto * v = state.allocValue(); state.mkOutputString(*v, b, std::nullopt, mockXpSettings); auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "", mockXpSettings); - RC_ASSERT(SingleDerivedPath { b } == d); + RC_ASSERT(SingleDerivedPath{b} == d); } RC_GTEST_FIXTURE_PROP( @@ -63,7 +57,7 @@ RC_GTEST_FIXTURE_PROP( auto * v = state.allocValue(); state.mkOutputString(*v, b, outPath, mockXpSettings); auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "", mockXpSettings); - RC_ASSERT(SingleDerivedPath { b } == d); + RC_ASSERT(SingleDerivedPath{b} == d); } #endif diff --git a/src/libexpr-tests/error_traces.cc b/src/libexpr-tests/error_traces.cc index 32e49efe6..7e7b5eb84 100644 --- a/src/libexpr-tests/error_traces.cc +++ b/src/libexpr-tests/error_traces.cc @@ -5,1374 +5,1358 @@ namespace nix { - using namespace testing; +using namespace testing; - // Testing eval of PrimOp's - class ErrorTraceTest : public LibExprTest { }; +// Testing eval of PrimOp's +class ErrorTraceTest : public LibExprTest +{}; - TEST_F(ErrorTraceTest, TraceBuilder) { - ASSERT_THROW( - state.error("puppy").debugThrow(), - EvalError - ); +TEST_F(ErrorTraceTest, TraceBuilder) +{ + ASSERT_THROW(state.error("puppy").debugThrow(), EvalError); - ASSERT_THROW( - state.error("puppy").withTrace(noPos, "doggy").debugThrow(), - EvalError - ); + ASSERT_THROW(state.error("puppy").withTrace(noPos, "doggy").debugThrow(), EvalError); - ASSERT_THROW( + ASSERT_THROW( + try { try { - try { - state.error("puppy").withTrace(noPos, "doggy").debugThrow(); - } catch (Error & e) { - e.addTrace(state.positions[noPos], "beans"); - throw; - } - } catch (BaseError & e) { - ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(HintFmt("puppy"))); - auto trace = e.info().traces.rbegin(); - ASSERT_EQ(e.info().traces.size(), 2u); - ASSERT_EQ(PrintToString(trace->hint), - PrintToString(HintFmt("doggy"))); - trace++; - ASSERT_EQ(PrintToString(trace->hint), - PrintToString(HintFmt("beans"))); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); + } catch (Error & e) { + e.addTrace(state.positions[noPos], "beans"); throw; } - , EvalError - ); - } - - TEST_F(ErrorTraceTest, NestedThrows) { - try { - state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (BaseError & e) { - try { - state.error("beans").debugThrow(); - } catch (Error & e2) { - e.addTrace(state.positions[noPos], "beans2"); - //e2.addTrace(state.positions[noPos], "Something", ""); - ASSERT_TRUE(e.info().traces.size() == 2u); - ASSERT_TRUE(e2.info().traces.size() == 0u); - ASSERT_FALSE(&e.info() == &e2.info()); - } + ASSERT_EQ(PrintToString(e.info().msg), PrintToString(HintFmt("puppy"))); + auto trace = e.info().traces.rbegin(); + ASSERT_EQ(e.info().traces.size(), 2u); + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("doggy"))); + trace++; + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("beans"))); + throw; + }, + EvalError); +} + +TEST_F(ErrorTraceTest, NestedThrows) +{ + try { + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); + } catch (BaseError & e) { + try { + state.error("beans").debugThrow(); + } catch (Error & e2) { + e.addTrace(state.positions[noPos], "beans2"); + // e2.addTrace(state.positions[noPos], "Something", ""); + ASSERT_TRUE(e.info().traces.size() == 2u); + ASSERT_TRUE(e2.info().traces.size() == 0u); + ASSERT_FALSE(&e.info() == &e2.info()); } } +} -#define ASSERT_TRACE1(args, type, message) \ - ASSERT_THROW( \ - std::string expr(args); \ - std::string name = expr.substr(0, expr.find(" ")); \ - try { \ - Value v = eval("builtins." args); \ - state.forceValueDeep(v); \ - } catch (BaseError & e) { \ - ASSERT_EQ(PrintToString(e.info().msg), \ - PrintToString(message)); \ - ASSERT_EQ(e.info().traces.size(), 1u) << "while testing " args << std::endl << e.what(); \ - auto trace = e.info().traces.rbegin(); \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(HintFmt("while calling the '%s' builtin", name))); \ - throw; \ - } \ - , type \ - ) +#define ASSERT_TRACE1(args, type, message) \ + ASSERT_THROW( \ + std::string expr(args); std::string name = expr.substr(0, expr.find(" ")); try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 1u) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + }, \ + type) -#define ASSERT_TRACE2(args, type, message, context) \ - ASSERT_THROW( \ - std::string expr(args); \ - std::string name = expr.substr(0, expr.find(" ")); \ - try { \ - Value v = eval("builtins." args); \ - state.forceValueDeep(v); \ - } catch (BaseError & e) { \ - ASSERT_EQ(PrintToString(e.info().msg), \ - PrintToString(message)); \ - ASSERT_EQ(e.info().traces.size(), 2u) << "while testing " args << std::endl << e.what(); \ - auto trace = e.info().traces.rbegin(); \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(HintFmt("while calling the '%s' builtin", name))); \ - throw; \ - } \ - , type \ - ) +#define ASSERT_TRACE2(args, type, message, context) \ + ASSERT_THROW( \ + std::string expr(args); std::string name = expr.substr(0, expr.find(" ")); try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 2u) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + }, \ + type) -#define ASSERT_TRACE3(args, type, message, context1, context2) \ - ASSERT_THROW( \ - std::string expr(args); \ - std::string name = expr.substr(0, expr.find(" ")); \ - try { \ - Value v = eval("builtins." args); \ - state.forceValueDeep(v); \ - } catch (BaseError & e) { \ - ASSERT_EQ(PrintToString(e.info().msg), \ - PrintToString(message)); \ - ASSERT_EQ(e.info().traces.size(), 3u) << "while testing " args << std::endl << e.what(); \ - auto trace = e.info().traces.rbegin(); \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context1)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context2)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(HintFmt("while calling the '%s' builtin", name))); \ - throw; \ - } \ - , type \ - ) +#define ASSERT_TRACE3(args, type, message, context1, context2) \ + ASSERT_THROW( \ + std::string expr(args); std::string name = expr.substr(0, expr.find(" ")); try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 3u) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context1)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context2)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + }, \ + type) -#define ASSERT_TRACE4(args, type, message, context1, context2, context3) \ - ASSERT_THROW( \ - std::string expr(args); \ - std::string name = expr.substr(0, expr.find(" ")); \ - try { \ - Value v = eval("builtins." args); \ - state.forceValueDeep(v); \ - } catch (BaseError & e) { \ - ASSERT_EQ(PrintToString(e.info().msg), \ - PrintToString(message)); \ - ASSERT_EQ(e.info().traces.size(), 4u) << "while testing " args << std::endl << e.what(); \ - auto trace = e.info().traces.rbegin(); \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context1)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context2)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(context3)); \ - ++trace; \ - ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(HintFmt("while calling the '%s' builtin", name))); \ - throw; \ - } \ - , type \ - ) +#define ASSERT_TRACE4(args, type, message, context1, context2, context3) \ + ASSERT_THROW( \ + std::string expr(args); std::string name = expr.substr(0, expr.find(" ")); try { \ + Value v = eval("builtins." args); \ + state.forceValueDeep(v); \ + } catch (BaseError & e) { \ + ASSERT_EQ(PrintToString(e.info().msg), PrintToString(message)); \ + ASSERT_EQ(e.info().traces.size(), 4u) << "while testing " args << std::endl << e.what(); \ + auto trace = e.info().traces.rbegin(); \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context1)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context2)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(context3)); \ + ++trace; \ + ASSERT_EQ(PrintToString(trace->hint), PrintToString(HintFmt("while calling the '%s' builtin", name))); \ + throw; \ + }, \ + type) // We assume that expr starts with "builtins.derivationStrict { name =", // otherwise the name attribute position (1, 29) would be invalid. -#define DERIVATION_TRACE_HINTFMT(name) \ - HintFmt("while evaluating derivation '%s'\n" \ - " whose name attribute is located at %s", \ - name, Pos(1, 29, Pos::String{.source = make_ref(expr)})) +#define DERIVATION_TRACE_HINTFMT(name) \ + HintFmt( \ + "while evaluating derivation '%s'\n" \ + " whose name attribute is located at %s", \ + name, \ + Pos(1, 29, Pos::String{.source = make_ref(expr)})) // To keep things simple, we also assume that derivation name is "foo". -#define ASSERT_DERIVATION_TRACE1(args, type, message) \ - ASSERT_TRACE2(args, type, message, DERIVATION_TRACE_HINTFMT("foo")) -#define ASSERT_DERIVATION_TRACE2(args, type, message, context) \ - ASSERT_TRACE3(args, type, message, context, DERIVATION_TRACE_HINTFMT("foo")) -#define ASSERT_DERIVATION_TRACE3(args, type, message, context1, context2) \ - ASSERT_TRACE4(args, type, message, context1, context2, DERIVATION_TRACE_HINTFMT("foo")) - - TEST_F(ErrorTraceTest, genericClosure) { - ASSERT_TRACE2("genericClosure 1", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure {}", - TypeError, - HintFmt("attribute '%s' missing", "startSet"), - HintFmt("in the attrset passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = 1; }", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", - TypeError, - HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", - TypeError, - HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", - TypeError, - HintFmt("attribute '%s' missing", "key"), - HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); - - ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", - EvalError, - HintFmt("cannot compare %s with %s", "a string", "an integer"), - HintFmt("while comparing the `key` attributes of two genericClosure elements")); - - ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); - - } - - - TEST_F(ErrorTraceTest, replaceStrings) { - ASSERT_TRACE2("replaceStrings 0 0 {}", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.replaceStrings")); - - ASSERT_TRACE2("replaceStrings [] 0 {}", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.replaceStrings")); - - ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", - EvalError, - HintFmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); - - ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); - - ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", - TypeError, - HintFmt("expected a string but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); - - ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the third argument passed to builtins.replaceStrings")); - - } - - - TEST_F(ErrorTraceTest, scopedImport) { - } - - - TEST_F(ErrorTraceTest, import) { - } - - - TEST_F(ErrorTraceTest, typeOf) { - } - - - TEST_F(ErrorTraceTest, isNull) { - } - - - TEST_F(ErrorTraceTest, isFunction) { - } - - - TEST_F(ErrorTraceTest, isInt) { - } - - - TEST_F(ErrorTraceTest, isFloat) { - } - - - TEST_F(ErrorTraceTest, isString) { - } - - - TEST_F(ErrorTraceTest, isBool) { - } - - - TEST_F(ErrorTraceTest, isPath) { - } - - - TEST_F(ErrorTraceTest, break) { - } - - - TEST_F(ErrorTraceTest, abort) { - } - - - TEST_F(ErrorTraceTest, throw) { - } - - - TEST_F(ErrorTraceTest, addErrorContext) { - } - - - TEST_F(ErrorTraceTest, ceil) { - ASSERT_TRACE2("ceil \"foo\"", - TypeError, - HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.ceil")); - - } - - - TEST_F(ErrorTraceTest, floor) { - ASSERT_TRACE2("floor \"foo\"", - TypeError, - HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.floor")); - - } - - - TEST_F(ErrorTraceTest, tryEval) { - } - - - TEST_F(ErrorTraceTest, getEnv) { - ASSERT_TRACE2("getEnv [ ]", - TypeError, - HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.getEnv")); - - } - - - TEST_F(ErrorTraceTest, seq) { - } - - - TEST_F(ErrorTraceTest, deepSeq) { - } - - - TEST_F(ErrorTraceTest, trace) { - } - - - TEST_F(ErrorTraceTest, placeholder) { - ASSERT_TRACE2("placeholder []", - TypeError, - HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.placeholder")); - - } - - - TEST_F(ErrorTraceTest, toPath) { - ASSERT_TRACE2("toPath []", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.toPath")); - - ASSERT_TRACE2("toPath \"foo\"", - EvalError, - HintFmt("string '%s' doesn't represent an absolute path", "foo"), - HintFmt("while evaluating the first argument passed to builtins.toPath")); - - } - - - TEST_F(ErrorTraceTest, storePath) { - ASSERT_TRACE2("storePath true", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to 'builtins.storePath'")); - - } - - - TEST_F(ErrorTraceTest, pathExists) { - ASSERT_TRACE2("pathExists []", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), - HintFmt("while realising the context of a path")); - - ASSERT_TRACE2("pathExists \"zorglub\"", - EvalError, - HintFmt("string '%s' doesn't represent an absolute path", "zorglub"), - HintFmt("while realising the context of a path")); - - } - - - TEST_F(ErrorTraceTest, baseNameOf) { - ASSERT_TRACE2("baseNameOf []", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.baseNameOf")); - - } - - - TEST_F(ErrorTraceTest, dirOf) { - } - - - TEST_F(ErrorTraceTest, readFile) { - } - - - TEST_F(ErrorTraceTest, findFile) { - } - - - TEST_F(ErrorTraceTest, hashFile) { - } - - - TEST_F(ErrorTraceTest, readDir) { - } - - - TEST_F(ErrorTraceTest, toXML) { - } - - - TEST_F(ErrorTraceTest, toJSON) { - } - - - TEST_F(ErrorTraceTest, fromJSON) { - } - - - TEST_F(ErrorTraceTest, toFile) { - } - - - TEST_F(ErrorTraceTest, filterSource) { - ASSERT_TRACE2("filterSource [] []", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); - - ASSERT_TRACE2("filterSource [] \"foo\"", - EvalError, - HintFmt("string '%s' doesn't represent an absolute path", "foo"), - HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); - - ASSERT_TRACE2("filterSource [] ./.", - TypeError, - HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.filterSource")); - - // Unsupported by store "dummy" - - // ASSERT_TRACE2("filterSource (_: 1) ./.", - // TypeError, - // HintFmt("attempt to call something which is not a function but %s", "an integer"), - // HintFmt("while adding path '/home/layus/projects/nix'")); - - // ASSERT_TRACE2("filterSource (_: _: 1) ./.", - // TypeError, - // HintFmt("expected a Boolean but found %s: %s", "an integer", "1"), - // HintFmt("while evaluating the return value of the path filter function")); - - } - - - TEST_F(ErrorTraceTest, path) { - } - - - TEST_F(ErrorTraceTest, attrNames) { - ASSERT_TRACE2("attrNames []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the argument passed to builtins.attrNames")); - - } - - - TEST_F(ErrorTraceTest, attrValues) { - ASSERT_TRACE2("attrValues []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the argument passed to builtins.attrValues")); - - } - - - TEST_F(ErrorTraceTest, getAttr) { - ASSERT_TRACE2("getAttr [] []", - TypeError, - HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.getAttr")); - - ASSERT_TRACE2("getAttr \"foo\" []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the second argument passed to builtins.getAttr")); - - ASSERT_TRACE2("getAttr \"foo\" {}", - TypeError, - HintFmt("attribute '%s' missing", "foo"), - HintFmt("in the attribute set under consideration")); - - } - - - TEST_F(ErrorTraceTest, unsafeGetAttrPos) { - } - - - TEST_F(ErrorTraceTest, hasAttr) { - ASSERT_TRACE2("hasAttr [] []", - TypeError, - HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.hasAttr")); - - ASSERT_TRACE2("hasAttr \"foo\" []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the second argument passed to builtins.hasAttr")); - - } - - - TEST_F(ErrorTraceTest, isAttrs) { - } - - - TEST_F(ErrorTraceTest, removeAttrs) { - ASSERT_TRACE2("removeAttrs \"\" \"\"", - TypeError, - HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); - - ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", - TypeError, - HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); - - ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", - TypeError, - HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); - - } - - - TEST_F(ErrorTraceTest, listToAttrs) { - ASSERT_TRACE2("listToAttrs 1", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the argument passed to builtins.listToAttrs")); - - ASSERT_TRACE2("listToAttrs [ 1 ]", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating an element of the list passed to builtins.listToAttrs")); - - ASSERT_TRACE2("listToAttrs [ {} ]", - TypeError, - HintFmt("attribute '%s' missing", "name"), - HintFmt("in a {name=...; value=...;} pair")); - - ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); - - ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", - TypeError, - HintFmt("attribute '%s' missing", "value"), - HintFmt("in a {name=...; value=...;} pair")); - - } - - - TEST_F(ErrorTraceTest, intersectAttrs) { - ASSERT_TRACE2("intersectAttrs [] []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.intersectAttrs")); - - ASSERT_TRACE2("intersectAttrs {} []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the second argument passed to builtins.intersectAttrs")); - - } - - - TEST_F(ErrorTraceTest, catAttrs) { - ASSERT_TRACE2("catAttrs [] {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.catAttrs")); - - ASSERT_TRACE2("catAttrs \"foo\" {}", - TypeError, - HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument passed to builtins.catAttrs")); - - ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); - - ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); - - } - - - TEST_F(ErrorTraceTest, functionArgs) { - ASSERT_TRACE1("functionArgs {}", - TypeError, - HintFmt("'functionArgs' requires a function")); - - } - - - TEST_F(ErrorTraceTest, mapAttrs) { - ASSERT_TRACE2("mapAttrs [] []", - TypeError, - HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the second argument passed to builtins.mapAttrs")); - - // XXX: deferred - // ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }", - // TypeError, - // HintFmt("attempt to call something which is not a function but %s", "a string"), - // HintFmt("while evaluating the attribute 'foo'")); - - // ASSERT_TRACE2("mapAttrs (x: x + \"1\") { foo.bar = 1; }", - // TypeError, - // HintFmt("attempt to call something which is not a function but %s", "a string"), - // HintFmt("while evaluating the attribute 'foo'")); - - // ASSERT_TRACE2("mapAttrs (x: y: x + 1) { foo.bar = 1; }", - // TypeError, - // HintFmt("cannot coerce %s to a string", "an integer"), - // HintFmt("while evaluating a path segment")); - - } - - - TEST_F(ErrorTraceTest, zipAttrsWith) { - ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", - TypeError, - HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), - HintFmt("while evaluating the first argument passed to builtins.zipAttrsWith")); - - ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); - - // XXX: How to properly tell that the function takes two arguments ? - // The same question also applies to sort, and maybe others. - // Due to laziness, we only create a thunk, and it fails later on. - // ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]", - // TypeError, - // HintFmt("attempt to call something which is not a function but %s", "an integer"), - // HintFmt("while evaluating the attribute 'foo'")); - - // XXX: Also deferred deeply - // ASSERT_TRACE2("zipAttrsWith (a: b: a + b) [ { foo = 1; } { foo = 2; } ]", - // TypeError, - // HintFmt("cannot coerce %s to a string", "a list"), - // HintFmt("while evaluating a path segment")); - - } - - - TEST_F(ErrorTraceTest, isList) { - } - - - TEST_F(ErrorTraceTest, elemAt) { - ASSERT_TRACE2("elemAt \"foo\" (-1)", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to 'builtins.elemAt'")); - - ASSERT_TRACE1("elemAt [] (-1)", - Error, - HintFmt("'builtins.elemAt' called with index %d on a list of size %d", -1, 0)); - - ASSERT_TRACE1("elemAt [\"foo\"] 3", - Error, - HintFmt("'builtins.elemAt' called with index %d on a list of size %d", 3, 1)); - - } - - - TEST_F(ErrorTraceTest, head) { - ASSERT_TRACE2("head 1", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to 'builtins.head'")); - - ASSERT_TRACE1("head []", - Error, - HintFmt("'builtins.head' called on an empty list")); - - } - - - TEST_F(ErrorTraceTest, tail) { - ASSERT_TRACE2("tail 1", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to 'builtins.tail'")); - - ASSERT_TRACE1("tail []", - Error, - HintFmt("'builtins.tail' called on an empty list")); - - } - - - TEST_F(ErrorTraceTest, map) { - ASSERT_TRACE2("map 1 \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.map")); - - ASSERT_TRACE2("map 1 [ 1 ]", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.map")); - - } - - - TEST_F(ErrorTraceTest, filter) { - ASSERT_TRACE2("filter 1 \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.filter")); - - ASSERT_TRACE2("filter 1 [ \"foo\" ]", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.filter")); - - ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "5" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the filtering function passed to builtins.filter")); - - } - - - TEST_F(ErrorTraceTest, elem) { - ASSERT_TRACE2("elem 1 \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.elem")); - - } - - - TEST_F(ErrorTraceTest, concatLists) { - ASSERT_TRACE2("concatLists 1", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.concatLists")); - - ASSERT_TRACE2("concatLists [ 1 ]", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating a value of the list passed to builtins.concatLists")); - - ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating a value of the list passed to builtins.concatLists")); - - } - - - TEST_F(ErrorTraceTest, length) { - ASSERT_TRACE2("length 1", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.length")); - - ASSERT_TRACE2("length \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.length")); - - } - - - TEST_F(ErrorTraceTest, foldlPrime) { - ASSERT_TRACE2("foldl' 1 \"foo\" true", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.foldlStrict")); - - ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", - TypeError, - HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the third argument passed to builtins.foldlStrict")); - - ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", - TypeError, - HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); - - ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("in the left operand of the AND (&&) operator")); - - } - - - TEST_F(ErrorTraceTest, any) { - ASSERT_TRACE2("any 1 \"foo\"", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.any")); - - ASSERT_TRACE2("any (_: 1) \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.any")); - - ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the function passed to builtins.any")); - - } - - - TEST_F(ErrorTraceTest, all) { - ASSERT_TRACE2("all 1 \"foo\"", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.all")); - - ASSERT_TRACE2("all (_: 1) \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.all")); - - ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the function passed to builtins.all")); - - } - - - TEST_F(ErrorTraceTest, genList) { - ASSERT_TRACE2("genList 1 \"foo\"", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.genList")); - - ASSERT_TRACE2("genList 1 2", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genList")); - - // XXX: deferred - // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", - // TypeError, - // HintFmt("cannot add %s to an integer", "a string"), - // HintFmt("while evaluating anonymous lambda")); - - ASSERT_TRACE1("genList false (-3)", - EvalError, - HintFmt("cannot create list of size %d", -3)); - - } - - - TEST_F(ErrorTraceTest, sort) { - ASSERT_TRACE2("sort 1 \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.sort")); - - ASSERT_TRACE2("sort 1 [ \"foo\" ]", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.sort")); - - ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", - TypeError, - HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); - - ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the sorting function passed to builtins.sort")); - - // XXX: Trace too deep, need better asserts - // ASSERT_TRACE1("sort (a: b: a <= b) [ \"foo\" {} ] # TODO", - // TypeError, - // HintFmt("cannot compare %s with %s", "a string", "a set")); - - // ASSERT_TRACE1("sort (a: b: a <= b) [ {} {} ] # TODO", - // TypeError, - // HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); - - } - - - TEST_F(ErrorTraceTest, partition) { - ASSERT_TRACE2("partition 1 \"foo\"", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.partition")); - - ASSERT_TRACE2("partition (_: 1) \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.partition")); - - ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the partition function passed to builtins.partition")); - - } - - - TEST_F(ErrorTraceTest, groupBy) { - ASSERT_TRACE2("groupBy 1 \"foo\"", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.groupBy")); - - ASSERT_TRACE2("groupBy (_: 1) \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.groupBy")); - - ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); - - } - - - TEST_F(ErrorTraceTest, concatMap) { - ASSERT_TRACE2("concatMap 1 \"foo\"", - TypeError, - HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.concatMap")); - - ASSERT_TRACE2("concatMap (x: 1) \"foo\"", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.concatMap")); - - ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); - - ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); - - } - - - TEST_F(ErrorTraceTest, add) { - ASSERT_TRACE2("add \"foo\" 1", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument of the addition")); - - ASSERT_TRACE2("add 1 \"foo\"", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument of the addition")); - - } - - - TEST_F(ErrorTraceTest, sub) { - ASSERT_TRACE2("sub \"foo\" 1", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument of the subtraction")); - - ASSERT_TRACE2("sub 1 \"foo\"", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument of the subtraction")); - - } - - - TEST_F(ErrorTraceTest, mul) { - ASSERT_TRACE2("mul \"foo\" 1", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first argument of the multiplication")); - - ASSERT_TRACE2("mul 1 \"foo\"", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument of the multiplication")); - - } - - - TEST_F(ErrorTraceTest, div) { - ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the first operand of the division")); - - ASSERT_TRACE2("div 1 \"foo\"", - TypeError, - HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second operand of the division")); - - ASSERT_TRACE1("div \"foo\" 0", - EvalError, - HintFmt("division by zero")); - - } - - - TEST_F(ErrorTraceTest, bitAnd) { - ASSERT_TRACE2("bitAnd 1.1 2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.bitAnd")); - - ASSERT_TRACE2("bitAnd 1 2.2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.bitAnd")); - - } - - - TEST_F(ErrorTraceTest, bitOr) { - ASSERT_TRACE2("bitOr 1.1 2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.bitOr")); - - ASSERT_TRACE2("bitOr 1 2.2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.bitOr")); - - } - - - TEST_F(ErrorTraceTest, bitXor) { - ASSERT_TRACE2("bitXor 1.1 2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.bitXor")); - - ASSERT_TRACE2("bitXor 1 2.2", - TypeError, - HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), - HintFmt("while evaluating the second argument passed to builtins.bitXor")); - - } - - - TEST_F(ErrorTraceTest, lessThan) { - ASSERT_TRACE1("lessThan 1 \"foo\"", - EvalError, - HintFmt("cannot compare %s with %s", "an integer", "a string")); - - ASSERT_TRACE1("lessThan {} {}", - EvalError, - HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); - - ASSERT_TRACE2("lessThan [ 1 2 ] [ \"foo\" ]", - EvalError, - HintFmt("cannot compare %s with %s", "an integer", "a string"), - HintFmt("while comparing two list elements")); - - } - - - TEST_F(ErrorTraceTest, toString) { - ASSERT_TRACE2("toString { a = 1; }", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), - HintFmt("while evaluating the first argument passed to builtins.toString")); - - } - - - TEST_F(ErrorTraceTest, substring) { - ASSERT_TRACE2("substring {} \"foo\" true", - TypeError, - HintFmt("expected an integer but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the first argument (the start offset) passed to builtins.substring")); - - ASSERT_TRACE2("substring 3 \"foo\" true", - TypeError, - HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - HintFmt("while evaluating the second argument (the substring length) passed to builtins.substring")); - - ASSERT_TRACE2("substring 0 3 {}", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the third argument (the string) passed to builtins.substring")); - - ASSERT_TRACE1("substring (-3) 3 \"sometext\"", - EvalError, - HintFmt("negative start position in 'substring'")); - - } - - - TEST_F(ErrorTraceTest, stringLength) { - ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the argument passed to builtins.stringLength")); - - } - - - TEST_F(ErrorTraceTest, hashString) { - ASSERT_TRACE2("hashString 1 {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.hashString")); - - ASSERT_TRACE1("hashString \"foo\" \"content\"", - UsageError, - HintFmt("unknown hash algorithm '%s', expect 'blake3', 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); - - ASSERT_TRACE2("hashString \"sha256\" {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument passed to builtins.hashString")); - - } - - - TEST_F(ErrorTraceTest, match) { - ASSERT_TRACE2("match 1 {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.match")); - - ASSERT_TRACE2("match \"foo\" {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument passed to builtins.match")); - - ASSERT_TRACE1("match \"(.*\" \"\"", - EvalError, - HintFmt("invalid regular expression '%s'", "(.*")); - - } - - - TEST_F(ErrorTraceTest, split) { - ASSERT_TRACE2("split 1 {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.split")); - - ASSERT_TRACE2("split \"foo\" {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument passed to builtins.split")); - - ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", - EvalError, - HintFmt("invalid regular expression '%s'", "f(o*o")); - - } - - - TEST_F(ErrorTraceTest, concatStringsSep) { - ASSERT_TRACE2("concatStringsSep 1 {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); - - ASSERT_TRACE2("concatStringsSep \"foo\" {}", - TypeError, - HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); - - ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", - TypeError, - HintFmt("cannot coerce %s to a string: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); - - } - - - TEST_F(ErrorTraceTest, parseDrvName) { - ASSERT_TRACE2("parseDrvName 1", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.parseDrvName")); - - } - - - TEST_F(ErrorTraceTest, compareVersions) { - ASSERT_TRACE2("compareVersions 1 {}", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.compareVersions")); - - ASSERT_TRACE2("compareVersions \"abd\" {}", - TypeError, - HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), - HintFmt("while evaluating the second argument passed to builtins.compareVersions")); - - } - - - TEST_F(ErrorTraceTest, splitVersion) { - ASSERT_TRACE2("splitVersion 1", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.splitVersion")); - - } - - - TEST_F(ErrorTraceTest, traceVerbose) { - } - - - TEST_F(ErrorTraceTest, derivationStrict) { - ASSERT_TRACE2("derivationStrict \"\"", - TypeError, - HintFmt("expected a set but found %s: %s", "a string", "\"\""), - HintFmt("while evaluating the argument passed to builtins.derivationStrict")); - - ASSERT_TRACE2("derivationStrict {}", - TypeError, - HintFmt("attribute '%s' missing", "name"), - HintFmt("in the attrset passed as argument to builtins.derivationStrict")); - - ASSERT_TRACE3("derivationStrict { name = 1; }", - TypeError, - HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict"), - HintFmt("while evaluating the derivation attribute 'name'")); - - ASSERT_DERIVATION_TRACE1("derivationStrict { name = \"foo\"; }", - EvalError, - HintFmt("required attribute 'builder' missing")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), - HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), - HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", - EvalError, - HintFmt("invalid value '%s' for 'outputHashMode' attribute", "15"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", - EvalError, - HintFmt("invalid value '%s' for 'outputHashMode' attribute", "custom"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", - TypeError, - HintFmt("cannot coerce %s to a string: { }", "a set"), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "system", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", - TypeError, - HintFmt("cannot coerce %s to a string: { }", "a set"), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drvPath\"; }", - EvalError, - HintFmt("invalid derivation output name 'drvPath'"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; outputs = \"out\"; __structuredAttrs = true; }", - EvalError, - HintFmt("expected a list but found %s: %s", "a string", "\"out\""), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", - EvalError, - HintFmt("derivation cannot have an empty set of outputs"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drvPath\" ]; }", - EvalError, - HintFmt("invalid derivation output name 'drvPath'"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", - EvalError, - HintFmt("duplicate derivation output '%s'", "out"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "__contentAddressed", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", - TypeError, - HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", - TypeError, - HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", - TypeError, - HintFmt("cannot coerce %s to a string: { }", "a set"), - HintFmt("while evaluating an element of the argument list"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", - TypeError, - HintFmt("cannot coerce %s to a string: { }", "a set"), - HintFmt("while evaluating an element of the argument list"), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); - - ASSERT_DERIVATION_TRACE3("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", - TypeError, - HintFmt("cannot coerce %s to a string: { }", "a set"), - HintFmt(""), - HintFmt("while evaluating attribute '%s' of derivation '%s'", "FOO", "foo")); - } +#define ASSERT_DERIVATION_TRACE1(args, type, message) \ + ASSERT_TRACE2(args, type, message, DERIVATION_TRACE_HINTFMT("foo")) +#define ASSERT_DERIVATION_TRACE2(args, type, message, context) \ + ASSERT_TRACE3(args, type, message, context, DERIVATION_TRACE_HINTFMT("foo")) +#define ASSERT_DERIVATION_TRACE3(args, type, message, context1, context2) \ + ASSERT_TRACE4(args, type, message, context1, context2, DERIVATION_TRACE_HINTFMT("foo")) + +TEST_F(ErrorTraceTest, genericClosure) +{ + ASSERT_TRACE2( + "genericClosure 1", + TypeError, + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure {}", + TypeError, + HintFmt("attribute '%s' missing", "startSet"), + HintFmt("in the attrset passed as argument to builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = 1; }", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = [{ key = 1;}]; operator = true; }", + TypeError, + HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", + TypeError, + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", + TypeError, + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", + TypeError, + HintFmt("attribute '%s' missing", "key"), + HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); + + ASSERT_TRACE2( + "genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", + EvalError, + HintFmt("cannot compare %s with %s", "a string", "an integer"), + HintFmt("while comparing the `key` attributes of two genericClosure elements")); + + ASSERT_TRACE2( + "genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", + TypeError, + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); +} + +TEST_F(ErrorTraceTest, replaceStrings) +{ + ASSERT_TRACE2( + "replaceStrings 0 0 {}", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.replaceStrings")); + + ASSERT_TRACE2( + "replaceStrings [] 0 {}", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.replaceStrings")); + + ASSERT_TRACE1( + "replaceStrings [ 0 ] [] {}", + EvalError, + HintFmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); + + ASSERT_TRACE2( + "replaceStrings [ 1 ] [ \"new\" ] {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); + + ASSERT_TRACE2( + "replaceStrings [ \"oo\" ] [ true ] \"foo\"", + TypeError, + HintFmt("expected a string but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); + + ASSERT_TRACE2( + "replaceStrings [ \"old\" ] [ \"new\" ] {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument passed to builtins.replaceStrings")); +} + +TEST_F(ErrorTraceTest, scopedImport) {} + +TEST_F(ErrorTraceTest, import) {} + +TEST_F(ErrorTraceTest, typeOf) {} + +TEST_F(ErrorTraceTest, isNull) {} + +TEST_F(ErrorTraceTest, isFunction) {} + +TEST_F(ErrorTraceTest, isInt) {} + +TEST_F(ErrorTraceTest, isFloat) {} + +TEST_F(ErrorTraceTest, isString) {} + +TEST_F(ErrorTraceTest, isBool) {} + +TEST_F(ErrorTraceTest, isPath) {} + +TEST_F(ErrorTraceTest, break) {} + +TEST_F(ErrorTraceTest, abort) {} + +TEST_F(ErrorTraceTest, throw) {} + +TEST_F(ErrorTraceTest, addErrorContext) {} + +TEST_F(ErrorTraceTest, ceil) +{ + ASSERT_TRACE2( + "ceil \"foo\"", + TypeError, + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.ceil")); +} + +TEST_F(ErrorTraceTest, floor) +{ + ASSERT_TRACE2( + "floor \"foo\"", + TypeError, + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.floor")); +} + +TEST_F(ErrorTraceTest, tryEval) {} + +TEST_F(ErrorTraceTest, getEnv) +{ + ASSERT_TRACE2( + "getEnv [ ]", + TypeError, + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getEnv")); +} + +TEST_F(ErrorTraceTest, seq) {} + +TEST_F(ErrorTraceTest, deepSeq) {} + +TEST_F(ErrorTraceTest, trace) {} + +TEST_F(ErrorTraceTest, placeholder) +{ + ASSERT_TRACE2( + "placeholder []", + TypeError, + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.placeholder")); +} + +TEST_F(ErrorTraceTest, toPath) +{ + ASSERT_TRACE2( + "toPath []", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.toPath")); + + ASSERT_TRACE2( + "toPath \"foo\"", + EvalError, + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the first argument passed to builtins.toPath")); +} + +TEST_F(ErrorTraceTest, storePath) +{ + ASSERT_TRACE2( + "storePath true", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.storePath'")); +} + +TEST_F(ErrorTraceTest, pathExists) +{ + ASSERT_TRACE2( + "pathExists []", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while realising the context of a path")); + + ASSERT_TRACE2( + "pathExists \"zorglub\"", + EvalError, + HintFmt("string '%s' doesn't represent an absolute path", "zorglub"), + HintFmt("while realising the context of a path")); +} + +TEST_F(ErrorTraceTest, baseNameOf) +{ + ASSERT_TRACE2( + "baseNameOf []", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.baseNameOf")); +} + +TEST_F(ErrorTraceTest, dirOf) {} + +TEST_F(ErrorTraceTest, readFile) {} + +TEST_F(ErrorTraceTest, findFile) {} + +TEST_F(ErrorTraceTest, hashFile) {} + +TEST_F(ErrorTraceTest, readDir) {} + +TEST_F(ErrorTraceTest, toXML) {} + +TEST_F(ErrorTraceTest, toJSON) {} + +TEST_F(ErrorTraceTest, fromJSON) {} + +TEST_F(ErrorTraceTest, toFile) {} + +TEST_F(ErrorTraceTest, filterSource) +{ + ASSERT_TRACE2( + "filterSource [] []", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + + ASSERT_TRACE2( + "filterSource [] \"foo\"", + EvalError, + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + + ASSERT_TRACE2( + "filterSource [] ./.", + TypeError, + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.filterSource")); + + // Unsupported by store "dummy" + + // ASSERT_TRACE2("filterSource (_: 1) ./.", + // TypeError, + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while adding path '/home/layus/projects/nix'")); + + // ASSERT_TRACE2("filterSource (_: _: 1) ./.", + // TypeError, + // HintFmt("expected a Boolean but found %s: %s", "an integer", "1"), + // HintFmt("while evaluating the return value of the path filter function")); +} + +TEST_F(ErrorTraceTest, path) {} + +TEST_F(ErrorTraceTest, attrNames) +{ + ASSERT_TRACE2( + "attrNames []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrNames")); +} + +TEST_F(ErrorTraceTest, attrValues) +{ + ASSERT_TRACE2( + "attrValues []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrValues")); +} + +TEST_F(ErrorTraceTest, getAttr) +{ + ASSERT_TRACE2( + "getAttr [] []", + TypeError, + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getAttr")); + + ASSERT_TRACE2( + "getAttr \"foo\" []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.getAttr")); + + ASSERT_TRACE2( + "getAttr \"foo\" {}", + TypeError, + HintFmt("attribute '%s' missing", "foo"), + HintFmt("in the attribute set under consideration")); +} + +TEST_F(ErrorTraceTest, unsafeGetAttrPos) {} + +TEST_F(ErrorTraceTest, hasAttr) +{ + ASSERT_TRACE2( + "hasAttr [] []", + TypeError, + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.hasAttr")); + + ASSERT_TRACE2( + "hasAttr \"foo\" []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.hasAttr")); +} + +TEST_F(ErrorTraceTest, isAttrs) {} + +TEST_F(ErrorTraceTest, removeAttrs) +{ + ASSERT_TRACE2( + "removeAttrs \"\" \"\"", + TypeError, + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); + + ASSERT_TRACE2( + "removeAttrs \"\" [ 1 ]", + TypeError, + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); + + ASSERT_TRACE2( + "removeAttrs \"\" [ \"1\" ]", + TypeError, + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); +} + +TEST_F(ErrorTraceTest, listToAttrs) +{ + ASSERT_TRACE2( + "listToAttrs 1", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the argument passed to builtins.listToAttrs")); + + ASSERT_TRACE2( + "listToAttrs [ 1 ]", + TypeError, + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element of the list passed to builtins.listToAttrs")); + + ASSERT_TRACE2( + "listToAttrs [ {} ]", + TypeError, + HintFmt("attribute '%s' missing", "name"), + HintFmt("in a {name=...; value=...;} pair")); + + ASSERT_TRACE2( + "listToAttrs [ { name = 1; } ]", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); + + ASSERT_TRACE2( + "listToAttrs [ { name = \"foo\"; } ]", + TypeError, + HintFmt("attribute '%s' missing", "value"), + HintFmt("in a {name=...; value=...;} pair")); +} + +TEST_F(ErrorTraceTest, intersectAttrs) +{ + ASSERT_TRACE2( + "intersectAttrs [] []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.intersectAttrs")); + + ASSERT_TRACE2( + "intersectAttrs {} []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.intersectAttrs")); +} + +TEST_F(ErrorTraceTest, catAttrs) +{ + ASSERT_TRACE2( + "catAttrs [] {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.catAttrs")); + + ASSERT_TRACE2( + "catAttrs \"foo\" {}", + TypeError, + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.catAttrs")); + + ASSERT_TRACE2( + "catAttrs \"foo\" [ 1 ]", + TypeError, + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + + ASSERT_TRACE2( + "catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", + TypeError, + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); +} + +TEST_F(ErrorTraceTest, functionArgs) +{ + ASSERT_TRACE1("functionArgs {}", TypeError, HintFmt("'functionArgs' requires a function")); +} + +TEST_F(ErrorTraceTest, mapAttrs) +{ + ASSERT_TRACE2( + "mapAttrs [] []", + TypeError, + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.mapAttrs")); + + // XXX: deferred + // ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }", + // TypeError, + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); + + // ASSERT_TRACE2("mapAttrs (x: x + \"1\") { foo.bar = 1; }", + // TypeError, + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); + + // ASSERT_TRACE2("mapAttrs (x: y: x + 1) { foo.bar = 1; }", + // TypeError, + // HintFmt("cannot coerce %s to a string", "an integer"), + // HintFmt("while evaluating a path segment")); +} + +TEST_F(ErrorTraceTest, zipAttrsWith) +{ + ASSERT_TRACE2( + "zipAttrsWith [] [ 1 ]", + TypeError, + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.zipAttrsWith")); + + ASSERT_TRACE2( + "zipAttrsWith (_: 1) [ 1 ]", + TypeError, + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); + + // XXX: How to properly tell that the function takes two arguments ? + // The same question also applies to sort, and maybe others. + // Due to laziness, we only create a thunk, and it fails later on. + // ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]", + // TypeError, + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while evaluating the attribute 'foo'")); + + // XXX: Also deferred deeply + // ASSERT_TRACE2("zipAttrsWith (a: b: a + b) [ { foo = 1; } { foo = 2; } ]", + // TypeError, + // HintFmt("cannot coerce %s to a string", "a list"), + // HintFmt("while evaluating a path segment")); +} + +TEST_F(ErrorTraceTest, isList) {} + +TEST_F(ErrorTraceTest, elemAt) +{ + ASSERT_TRACE2( + "elemAt \"foo\" (-1)", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.elemAt'")); + + ASSERT_TRACE1( + "elemAt [] (-1)", Error, HintFmt("'builtins.elemAt' called with index %d on a list of size %d", -1, 0)); + + ASSERT_TRACE1( + "elemAt [\"foo\"] 3", Error, HintFmt("'builtins.elemAt' called with index %d on a list of size %d", 3, 1)); +} + +TEST_F(ErrorTraceTest, head) +{ + ASSERT_TRACE2( + "head 1", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.head'")); + + ASSERT_TRACE1("head []", Error, HintFmt("'builtins.head' called on an empty list")); +} + +TEST_F(ErrorTraceTest, tail) +{ + ASSERT_TRACE2( + "tail 1", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.tail'")); + + ASSERT_TRACE1("tail []", Error, HintFmt("'builtins.tail' called on an empty list")); +} + +TEST_F(ErrorTraceTest, map) +{ + ASSERT_TRACE2( + "map 1 \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.map")); + + ASSERT_TRACE2( + "map 1 [ 1 ]", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.map")); +} + +TEST_F(ErrorTraceTest, filter) +{ + ASSERT_TRACE2( + "filter 1 \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.filter")); + + ASSERT_TRACE2( + "filter 1 [ \"foo\" ]", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.filter")); + + ASSERT_TRACE2( + "filter (_: 5) [ \"foo\" ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "5" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the filtering function passed to builtins.filter")); +} + +TEST_F(ErrorTraceTest, elem) +{ + ASSERT_TRACE2( + "elem 1 \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.elem")); +} + +TEST_F(ErrorTraceTest, concatLists) +{ + ASSERT_TRACE2( + "concatLists 1", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatLists")); + + ASSERT_TRACE2( + "concatLists [ 1 ]", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); + + ASSERT_TRACE2( + "concatLists [ [1] \"foo\" ]", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); +} + +TEST_F(ErrorTraceTest, length) +{ + ASSERT_TRACE2( + "length 1", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); + + ASSERT_TRACE2( + "length \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); +} + +TEST_F(ErrorTraceTest, foldlPrime) +{ + ASSERT_TRACE2( + "foldl' 1 \"foo\" true", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.foldlStrict")); + + ASSERT_TRACE2( + "foldl' (_: 1) \"foo\" true", + TypeError, + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the third argument passed to builtins.foldlStrict")); + + ASSERT_TRACE1( + "foldl' (_: 1) \"foo\" [ true ]", + TypeError, + HintFmt( + "attempt to call something which is not a function but %s: %s", + "an integer", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); + + ASSERT_TRACE2( + "foldl' (a: b: a && b) \"foo\" [ true ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("in the left operand of the AND (&&) operator")); +} + +TEST_F(ErrorTraceTest, any) +{ + ASSERT_TRACE2( + "any 1 \"foo\"", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.any")); + + ASSERT_TRACE2( + "any (_: 1) \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.any")); + + ASSERT_TRACE2( + "any (_: 1) [ \"foo\" ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.any")); +} + +TEST_F(ErrorTraceTest, all) +{ + ASSERT_TRACE2( + "all 1 \"foo\"", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.all")); + + ASSERT_TRACE2( + "all (_: 1) \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.all")); + + ASSERT_TRACE2( + "all (_: 1) [ \"foo\" ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.all")); +} + +TEST_F(ErrorTraceTest, genList) +{ + ASSERT_TRACE2( + "genList 1 \"foo\"", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.genList")); + + ASSERT_TRACE2( + "genList 1 2", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genList")); + + // XXX: deferred + // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", + // TypeError, + // HintFmt("cannot add %s to an integer", "a string"), + // HintFmt("while evaluating anonymous lambda")); + + ASSERT_TRACE1("genList false (-3)", EvalError, HintFmt("cannot create list of size %d", -3)); +} + +TEST_F(ErrorTraceTest, sort) +{ + ASSERT_TRACE2( + "sort 1 \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.sort")); + + ASSERT_TRACE2( + "sort 1 [ \"foo\" ]", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.sort")); + + ASSERT_TRACE1( + "sort (_: 1) [ \"foo\" \"bar\" ]", + TypeError, + HintFmt( + "attempt to call something which is not a function but %s: %s", + "an integer", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); + + ASSERT_TRACE2( + "sort (_: _: 1) [ \"foo\" \"bar\" ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the sorting function passed to builtins.sort")); + + // XXX: Trace too deep, need better asserts + // ASSERT_TRACE1("sort (a: b: a <= b) [ \"foo\" {} ] # TODO", + // TypeError, + // HintFmt("cannot compare %s with %s", "a string", "a set")); + + // ASSERT_TRACE1("sort (a: b: a <= b) [ {} {} ] # TODO", + // TypeError, + // HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); +} + +TEST_F(ErrorTraceTest, partition) +{ + ASSERT_TRACE2( + "partition 1 \"foo\"", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.partition")); + + ASSERT_TRACE2( + "partition (_: 1) \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.partition")); + + ASSERT_TRACE2( + "partition (_: 1) [ \"foo\" ]", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the partition function passed to builtins.partition")); +} + +TEST_F(ErrorTraceTest, groupBy) +{ + ASSERT_TRACE2( + "groupBy 1 \"foo\"", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.groupBy")); + + ASSERT_TRACE2( + "groupBy (_: 1) \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.groupBy")); + + ASSERT_TRACE2( + "groupBy (x: x) [ \"foo\" \"bar\" 1 ]", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); +} + +TEST_F(ErrorTraceTest, concatMap) +{ + ASSERT_TRACE2( + "concatMap 1 \"foo\"", + TypeError, + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatMap")); + + ASSERT_TRACE2( + "concatMap (x: 1) \"foo\"", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.concatMap")); + + ASSERT_TRACE2( + "concatMap (x: 1) [ \"foo\" ] # TODO", + TypeError, + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); + + ASSERT_TRACE2( + "concatMap (x: \"foo\") [ 1 2 ] # TODO", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); +} + +TEST_F(ErrorTraceTest, add) +{ + ASSERT_TRACE2( + "add \"foo\" 1", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the addition")); + + ASSERT_TRACE2( + "add 1 \"foo\"", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the addition")); +} + +TEST_F(ErrorTraceTest, sub) +{ + ASSERT_TRACE2( + "sub \"foo\" 1", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the subtraction")); + + ASSERT_TRACE2( + "sub 1 \"foo\"", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the subtraction")); +} + +TEST_F(ErrorTraceTest, mul) +{ + ASSERT_TRACE2( + "mul \"foo\" 1", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the multiplication")); + + ASSERT_TRACE2( + "mul 1 \"foo\"", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the multiplication")); +} + +TEST_F(ErrorTraceTest, div) +{ + ASSERT_TRACE2( + "div \"foo\" 1 # TODO: an integer was expected -> a number", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first operand of the division")); + + ASSERT_TRACE2( + "div 1 \"foo\"", + TypeError, + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second operand of the division")); + + ASSERT_TRACE1("div \"foo\" 0", EvalError, HintFmt("division by zero")); +} + +TEST_F(ErrorTraceTest, bitAnd) +{ + ASSERT_TRACE2( + "bitAnd 1.1 2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitAnd")); + + ASSERT_TRACE2( + "bitAnd 1 2.2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitAnd")); +} + +TEST_F(ErrorTraceTest, bitOr) +{ + ASSERT_TRACE2( + "bitOr 1.1 2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitOr")); + + ASSERT_TRACE2( + "bitOr 1 2.2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitOr")); +} + +TEST_F(ErrorTraceTest, bitXor) +{ + ASSERT_TRACE2( + "bitXor 1.1 2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitXor")); + + ASSERT_TRACE2( + "bitXor 1 2.2", + TypeError, + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitXor")); +} + +TEST_F(ErrorTraceTest, lessThan) +{ + ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, HintFmt("cannot compare %s with %s", "an integer", "a string")); + + ASSERT_TRACE1( + "lessThan {} {}", + EvalError, + HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + + ASSERT_TRACE2( + "lessThan [ 1 2 ] [ \"foo\" ]", + EvalError, + HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt("while comparing two list elements")); +} + +TEST_F(ErrorTraceTest, toString) +{ + ASSERT_TRACE2( + "toString { a = 1; }", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), + HintFmt("while evaluating the first argument passed to builtins.toString")); +} + +TEST_F(ErrorTraceTest, substring) +{ + ASSERT_TRACE2( + "substring {} \"foo\" true", + TypeError, + HintFmt("expected an integer but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the first argument (the start offset) passed to builtins.substring")); + + ASSERT_TRACE2( + "substring 3 \"foo\" true", + TypeError, + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument (the substring length) passed to builtins.substring")); + + ASSERT_TRACE2( + "substring 0 3 {}", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument (the string) passed to builtins.substring")); + + ASSERT_TRACE1("substring (-3) 3 \"sometext\"", EvalError, HintFmt("negative start position in 'substring'")); +} + +TEST_F(ErrorTraceTest, stringLength) +{ + ASSERT_TRACE2( + "stringLength {} # TODO: context is missing ???", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the argument passed to builtins.stringLength")); +} + +TEST_F(ErrorTraceTest, hashString) +{ + ASSERT_TRACE2( + "hashString 1 {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.hashString")); + + ASSERT_TRACE1( + "hashString \"foo\" \"content\"", + UsageError, + HintFmt("unknown hash algorithm '%s', expect 'blake3', 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); + + ASSERT_TRACE2( + "hashString \"sha256\" {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.hashString")); +} + +TEST_F(ErrorTraceTest, match) +{ + ASSERT_TRACE2( + "match 1 {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.match")); + + ASSERT_TRACE2( + "match \"foo\" {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.match")); + + ASSERT_TRACE1("match \"(.*\" \"\"", EvalError, HintFmt("invalid regular expression '%s'", "(.*")); +} + +TEST_F(ErrorTraceTest, split) +{ + ASSERT_TRACE2( + "split 1 {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.split")); + + ASSERT_TRACE2( + "split \"foo\" {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.split")); + + ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", EvalError, HintFmt("invalid regular expression '%s'", "f(o*o")); +} + +TEST_F(ErrorTraceTest, concatStringsSep) +{ + ASSERT_TRACE2( + "concatStringsSep 1 {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); + + ASSERT_TRACE2( + "concatStringsSep \"foo\" {}", + TypeError, + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt( + "while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); + + ASSERT_TRACE2( + "concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", + TypeError, + HintFmt("cannot coerce %s to a string: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); +} + +TEST_F(ErrorTraceTest, parseDrvName) +{ + ASSERT_TRACE2( + "parseDrvName 1", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.parseDrvName")); +} + +TEST_F(ErrorTraceTest, compareVersions) +{ + ASSERT_TRACE2( + "compareVersions 1 {}", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.compareVersions")); + + ASSERT_TRACE2( + "compareVersions \"abd\" {}", + TypeError, + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.compareVersions")); +} + +TEST_F(ErrorTraceTest, splitVersion) +{ + ASSERT_TRACE2( + "splitVersion 1", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.splitVersion")); +} + +TEST_F(ErrorTraceTest, traceVerbose) {} + +TEST_F(ErrorTraceTest, derivationStrict) +{ + ASSERT_TRACE2( + "derivationStrict \"\"", + TypeError, + HintFmt("expected a set but found %s: %s", "a string", "\"\""), + HintFmt("while evaluating the argument passed to builtins.derivationStrict")); + + ASSERT_TRACE2( + "derivationStrict {}", + TypeError, + HintFmt("attribute '%s' missing", "name"), + HintFmt("in the attrset passed as argument to builtins.derivationStrict")); + + ASSERT_TRACE3( + "derivationStrict { name = 1; }", + TypeError, + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict"), + HintFmt("while evaluating the derivation attribute 'name'")); + + ASSERT_DERIVATION_TRACE1( + "derivationStrict { name = \"foo\"; }", EvalError, HintFmt("required attribute 'builder' missing")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), + HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "15" ANSI_NORMAL)), + HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", + EvalError, + HintFmt("invalid value '%s' for 'outputHashMode' attribute", "15"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", + EvalError, + HintFmt("invalid value '%s' for 'outputHashMode' attribute", "custom"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputHashMode", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = {}; }", + TypeError, + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "system", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", + TypeError, + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drvPath\"; }", + EvalError, + HintFmt("invalid derivation output name 'drvPath'"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; outputs = \"out\"; __structuredAttrs = true; }", + EvalError, + HintFmt("expected a list but found %s: %s", "a string", "\"out\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", + EvalError, + HintFmt("derivation cannot have an empty set of outputs"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drvPath\" ]; }", + EvalError, + HintFmt("invalid derivation output name 'drvPath'"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE2( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", + EvalError, + HintFmt("duplicate derivation output '%s'", "out"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "outputs", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__contentAddressed", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", + TypeError, + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "__impure", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", + TypeError, + HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", + TypeError, + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt("while evaluating an element of the argument list"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", + TypeError, + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt("while evaluating an element of the argument list"), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "args", "foo")); + + ASSERT_DERIVATION_TRACE3( + "derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", + TypeError, + HintFmt("cannot coerce %s to a string: { }", "a set"), + HintFmt(""), + HintFmt("while evaluating attribute '%s' of derivation '%s'", "FOO", "foo")); +} } /* namespace nix */ diff --git a/src/libexpr-tests/eval.cc b/src/libexpr-tests/eval.cc index e9664dc58..ad70ea5b8 100644 --- a/src/libexpr-tests/eval.cc +++ b/src/libexpr-tests/eval.cc @@ -6,7 +6,8 @@ namespace nix { -TEST(nix_isAllowedURI, http_example_com) { +TEST(nix_isAllowedURI, http_example_com) +{ Strings allowed; allowed.push_back("http://example.com"); @@ -20,7 +21,8 @@ TEST(nix_isAllowedURI, http_example_com) { ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed)); } -TEST(nix_isAllowedURI, http_example_com_foo) { +TEST(nix_isAllowedURI, http_example_com_foo) +{ Strings allowed; allowed.push_back("http://example.com/foo"); @@ -34,7 +36,8 @@ TEST(nix_isAllowedURI, http_example_com_foo) { // ASSERT_TRUE(isAllowedURI("http://example.com/foo?ok=1", allowed)); } -TEST(nix_isAllowedURI, http) { +TEST(nix_isAllowedURI, http) +{ Strings allowed; allowed.push_back("http://"); @@ -48,7 +51,8 @@ TEST(nix_isAllowedURI, http) { ASSERT_FALSE(isAllowedURI("http:foo", allowed)); } -TEST(nix_isAllowedURI, https) { +TEST(nix_isAllowedURI, https) +{ Strings allowed; allowed.push_back("https://"); @@ -58,7 +62,8 @@ TEST(nix_isAllowedURI, https) { ASSERT_FALSE(isAllowedURI("http://example.com/https:", allowed)); } -TEST(nix_isAllowedURI, absolute_path) { +TEST(nix_isAllowedURI, absolute_path) +{ Strings allowed; allowed.push_back("/var/evil"); // bad idea @@ -76,7 +81,8 @@ TEST(nix_isAllowedURI, absolute_path) { ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed)); } -TEST(nix_isAllowedURI, file_url) { +TEST(nix_isAllowedURI, file_url) +{ Strings allowed; allowed.push_back("file:///var/evil"); // bad idea @@ -103,7 +109,8 @@ TEST(nix_isAllowedURI, file_url) { ASSERT_FALSE(isAllowedURI("file://", allowed)); } -TEST(nix_isAllowedURI, github_all) { +TEST(nix_isAllowedURI, github_all) +{ Strings allowed; allowed.push_back("github:"); ASSERT_TRUE(isAllowedURI("github:", allowed)); @@ -117,7 +124,8 @@ TEST(nix_isAllowedURI, github_all) { ASSERT_FALSE(isAllowedURI("github", allowed)); } -TEST(nix_isAllowedURI, github_org) { +TEST(nix_isAllowedURI, github_org) +{ Strings allowed; allowed.push_back("github:foo"); ASSERT_FALSE(isAllowedURI("github:", allowed)); @@ -130,7 +138,8 @@ TEST(nix_isAllowedURI, github_org) { ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed)); } -TEST(nix_isAllowedURI, non_scheme_colon) { +TEST(nix_isAllowedURI, non_scheme_colon) +{ Strings allowed; allowed.push_back("https://foo/bar:"); ASSERT_TRUE(isAllowedURI("https://foo/bar:", allowed)); @@ -138,16 +147,19 @@ TEST(nix_isAllowedURI, non_scheme_colon) { ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed)); } -class EvalStateTest : public LibExprTest {}; +class EvalStateTest : public LibExprTest +{}; -TEST_F(EvalStateTest, getBuiltins_ok) { +TEST_F(EvalStateTest, getBuiltins_ok) +{ auto evaled = maybeThunk("builtins"); auto & builtins = state.getBuiltins(); ASSERT_TRUE(builtins.type() == nAttrs); ASSERT_EQ(evaled, &builtins); } -TEST_F(EvalStateTest, getBuiltin_ok) { +TEST_F(EvalStateTest, getBuiltin_ok) +{ auto & builtin = state.getBuiltin("toString"); ASSERT_TRUE(builtin.type() == nFunction); // FIXME @@ -157,7 +169,8 @@ TEST_F(EvalStateTest, getBuiltin_ok) { ASSERT_EQ(state.forceBool(builtin2, noPos, "in unit test"), true); } -TEST_F(EvalStateTest, getBuiltin_fail) { +TEST_F(EvalStateTest, getBuiltin_fail) +{ ASSERT_THROW(state.getBuiltin("nonexistent"), EvalError); } diff --git a/src/libexpr-tests/json.cc b/src/libexpr-tests/json.cc index 11f31d058..c090ac5d7 100644 --- a/src/libexpr-tests/json.cc +++ b/src/libexpr-tests/json.cc @@ -4,65 +4,75 @@ namespace nix { // Testing the conversion to JSON - class JSONValueTest : public LibExprTest { - protected: - std::string getJSONValue(Value& value) { - std::stringstream ss; - NixStringContext ps; - printValueAsJSON(state, true, value, noPos, ss, ps); - return ss.str(); - } - }; - - TEST_F(JSONValueTest, null) { - Value v; - v.mkNull(); - ASSERT_EQ(getJSONValue(v), "null"); +class JSONValueTest : public LibExprTest +{ +protected: + std::string getJSONValue(Value & value) + { + std::stringstream ss; + NixStringContext ps; + printValueAsJSON(state, true, value, noPos, ss, ps); + return ss.str(); } +}; - TEST_F(JSONValueTest, BoolFalse) { - Value v; - v.mkBool(false); - ASSERT_EQ(getJSONValue(v),"false"); - } +TEST_F(JSONValueTest, null) +{ + Value v; + v.mkNull(); + ASSERT_EQ(getJSONValue(v), "null"); +} - TEST_F(JSONValueTest, BoolTrue) { - Value v; - v.mkBool(true); - ASSERT_EQ(getJSONValue(v), "true"); - } +TEST_F(JSONValueTest, BoolFalse) +{ + Value v; + v.mkBool(false); + ASSERT_EQ(getJSONValue(v), "false"); +} - TEST_F(JSONValueTest, IntPositive) { - Value v; - v.mkInt(100); - ASSERT_EQ(getJSONValue(v), "100"); - } +TEST_F(JSONValueTest, BoolTrue) +{ + Value v; + v.mkBool(true); + ASSERT_EQ(getJSONValue(v), "true"); +} - TEST_F(JSONValueTest, IntNegative) { - Value v; - v.mkInt(-100); - ASSERT_EQ(getJSONValue(v), "-100"); - } +TEST_F(JSONValueTest, IntPositive) +{ + Value v; + v.mkInt(100); + ASSERT_EQ(getJSONValue(v), "100"); +} - TEST_F(JSONValueTest, String) { - Value v; - v.mkString("test"); - ASSERT_EQ(getJSONValue(v), "\"test\""); - } +TEST_F(JSONValueTest, IntNegative) +{ + Value v; + v.mkInt(-100); + ASSERT_EQ(getJSONValue(v), "-100"); +} - TEST_F(JSONValueTest, StringQuotes) { - Value v; +TEST_F(JSONValueTest, String) +{ + Value v; + v.mkString("test"); + ASSERT_EQ(getJSONValue(v), "\"test\""); +} - v.mkString("test\""); - ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); - } +TEST_F(JSONValueTest, StringQuotes) +{ + Value v; - // The dummy store doesn't support writing files. Fails with this exception message: - // C++ exception with description "error: operation 'addToStoreFromDump' is - // not supported by store 'dummy'" thrown in the test body. - TEST_F(JSONValueTest, DISABLED_Path) { - Value v; - v.mkPath(state.rootPath(CanonPath("/test"))); - ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\""); - } + v.mkString("test\""); + ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); +} + +// The dummy store doesn't support writing files. Fails with this exception message: +// C++ exception with description "error: operation 'addToStoreFromDump' is +// not supported by store 'dummy'" thrown in the test body. +TEST_F(JSONValueTest, DISABLED_Path) +{ + Value v; + v.mkPath(state.rootPath(CanonPath("/test"))); + ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\""); +} } /* namespace nix */ diff --git a/src/libexpr-tests/main.cc b/src/libexpr-tests/main.cc index 52cca53c4..61b40e834 100644 --- a/src/libexpr-tests/main.cc +++ b/src/libexpr-tests/main.cc @@ -5,7 +5,8 @@ using namespace nix; -int main (int argc, char **argv) { +int main(int argc, char ** argv) +{ if (argc > 1 && std::string_view(argv[1]) == "__build-remote") { printError("test-build-remote: not supported in libexpr unit tests"); return 1; @@ -14,25 +15,26 @@ int main (int argc, char **argv) { // Disable build hook. We won't be testing remote builds in these unit tests. If we do, fix the above build hook. settings.buildHook = {}; - #ifdef __linux__ // should match the conditional around sandboxBuildDir declaration. +#ifdef __linux__ // should match the conditional around sandboxBuildDir declaration. - // When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's sandboxBuildDir, e.g.: - // Host + // When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's + // sandboxBuildDir, e.g.: Host // storeDir = /nix/store // sandboxBuildDir = /build // This process // storeDir = /build/foo/bar/store // sandboxBuildDir = /build - // However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different sandboxBuildDir. + // However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different + // sandboxBuildDir. settings.sandboxBuildDir = "/test-build-dir-instead-of-usual-build-dir"; - #endif +#endif - #ifdef __APPLE__ +#ifdef __APPLE__ // Avoid this error, when already running in a sandbox: // sandbox-exec: sandbox_apply: Operation not permitted settings.sandboxMode = smDisabled; setEnv("_NIX_TEST_NO_SANDBOX", "1"); - #endif +#endif // For pipe operator tests in trivial.cc experimentalFeatureSettings.set("experimental-features", "pipe-operators"); diff --git a/src/libexpr-tests/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc index f3b6fed0e..529c2f584 100644 --- a/src/libexpr-tests/nix_api_expr.cc +++ b/src/libexpr-tests/nix_api_expr.cc @@ -394,6 +394,7 @@ static void primop_bad_return_thunk( { nix_init_apply(context, ret, args[0], args[1]); } + TEST_F(nix_api_expr_test, nix_expr_primop_bad_return_thunk) { PrimOp * primop = diff --git a/src/libexpr-tests/nix_api_external.cc b/src/libexpr-tests/nix_api_external.cc index c1deabad6..93da3ca39 100644 --- a/src/libexpr-tests/nix_api_external.cc +++ b/src/libexpr-tests/nix_api_external.cc @@ -27,6 +27,7 @@ public: private: int _x; + static void print_function(void * self, nix_printer * printer) {} static void show_type_function(void * self, nix_string_return * res) {} @@ -68,4 +69,4 @@ TEST_F(nix_api_expr_test, nix_expr_eval_external) nix_state_free(stateFn); } -} +} // namespace nixC diff --git a/src/libexpr-tests/nix_api_value.cc b/src/libexpr-tests/nix_api_value.cc index 1da980ab8..5d85ed68d 100644 --- a/src/libexpr-tests/nix_api_value.cc +++ b/src/libexpr-tests/nix_api_value.cc @@ -120,6 +120,7 @@ TEST_F(nix_api_expr_test, nix_value_set_get_path_invalid) ASSERT_EQ(nullptr, nix_get_path_string(ctx, value)); assert_ctx_err(); } + TEST_F(nix_api_expr_test, nix_value_set_get_path) { const char * p = "/nix/store/40s0qmrfb45vlh6610rk29ym318dswdr-myname"; @@ -399,4 +400,4 @@ TEST_F(nix_api_expr_test, nix_copy_value) nix_gc_decref(ctx, source); } -} +} // namespace nixC diff --git a/src/libexpr-tests/primops.cc b/src/libexpr-tests/primops.cc index 9b5590d8d..f3f7de8d9 100644 --- a/src/libexpr-tests/primops.cc +++ b/src/libexpr-tests/primops.cc @@ -7,887 +7,996 @@ #include "nix/expr/tests/libexpr.hh" namespace nix { - class CaptureLogger : public Logger +class CaptureLogger : public Logger +{ + std::ostringstream oss; + +public: + CaptureLogger() {} + + std::string get() const { - std::ostringstream oss; - - public: - CaptureLogger() {} - - std::string get() const { - return oss.str(); - } - - void log(Verbosity lvl, std::string_view s) override { - oss << s << std::endl; - } - - void logEI(const ErrorInfo & ei) override { - showErrorInfo(oss, ei, loggerSettings.showTrace.get()); - } - }; - - class CaptureLogging { - std::unique_ptr oldLogger; - public: - CaptureLogging() { - oldLogger = std::move(logger); - logger = std::make_unique(); - } - - ~CaptureLogging() { - logger = std::move(oldLogger); - } - }; - - - // Testing eval of PrimOp's - class PrimOpTest : public LibExprTest {}; - - - TEST_F(PrimOpTest, throw) { - ASSERT_THROW(eval("throw \"foo\""), ThrownError); + return oss.str(); } - TEST_F(PrimOpTest, abort) { - ASSERT_THROW(eval("abort \"abort\""), Abort); + void log(Verbosity lvl, std::string_view s) override + { + oss << s << std::endl; } - TEST_F(PrimOpTest, ceil) { - auto v = eval("builtins.ceil 1.9"); - ASSERT_THAT(v, IsIntEq(2)); - auto intMin = eval("builtins.ceil (-4611686018427387904 - 4611686018427387904)"); - ASSERT_THAT(intMin, IsIntEq(std::numeric_limits::min())); - ASSERT_THROW(eval("builtins.ceil 1.0e200"), EvalError); - ASSERT_THROW(eval("builtins.ceil -1.0e200"), EvalError); - ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200)"), EvalError); // inf - ASSERT_THROW(eval("builtins.ceil (-1.0e200 * 1.0e200)"), EvalError); // -inf - ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200 - 1.0e200 * 1.0e200)"), EvalError); // nan - // bugs in previous Nix versions - ASSERT_THROW(eval("builtins.ceil (4611686018427387904 + 4611686018427387903)"), EvalError); - ASSERT_THROW(eval("builtins.ceil (-4611686018427387904 - 4611686018427387903)"), EvalError); + void logEI(const ErrorInfo & ei) override + { + showErrorInfo(oss, ei, loggerSettings.showTrace.get()); + } +}; + +class CaptureLogging +{ + std::unique_ptr oldLogger; +public: + CaptureLogging() + { + oldLogger = std::move(logger); + logger = std::make_unique(); } - TEST_F(PrimOpTest, floor) { - auto v = eval("builtins.floor 1.9"); - ASSERT_THAT(v, IsIntEq(1)); - auto intMin = eval("builtins.ceil (-4611686018427387904 - 4611686018427387904)"); - ASSERT_THAT(intMin, IsIntEq(std::numeric_limits::min())); - ASSERT_THROW(eval("builtins.ceil 1.0e200"), EvalError); - ASSERT_THROW(eval("builtins.ceil -1.0e200"), EvalError); - ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200)"), EvalError); // inf - ASSERT_THROW(eval("builtins.ceil (-1.0e200 * 1.0e200)"), EvalError); // -inf - ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200 - 1.0e200 * 1.0e200)"), EvalError); // nan - // bugs in previous Nix versions - ASSERT_THROW(eval("builtins.ceil (4611686018427387904 + 4611686018427387903)"), EvalError); - ASSERT_THROW(eval("builtins.ceil (-4611686018427387904 - 4611686018427387903)"), EvalError); + ~CaptureLogging() + { + logger = std::move(oldLogger); } +}; - TEST_F(PrimOpTest, tryEvalFailure) { - auto v = eval("builtins.tryEval (throw \"\")"); - ASSERT_THAT(v, IsAttrsOfSize(2)); - auto s = createSymbol("success"); - auto p = v.attrs()->get(s); - ASSERT_NE(p, nullptr); - ASSERT_THAT(*p->value, IsFalse()); - } +// Testing eval of PrimOp's +class PrimOpTest : public LibExprTest +{}; - TEST_F(PrimOpTest, tryEvalSuccess) { - auto v = eval("builtins.tryEval 123"); - ASSERT_THAT(v, IsAttrs()); - auto s = createSymbol("success"); - auto p = v.attrs()->get(s); - ASSERT_NE(p, nullptr); - ASSERT_THAT(*p->value, IsTrue()); - s = createSymbol("value"); - p = v.attrs()->get(s); - ASSERT_NE(p, nullptr); - ASSERT_THAT(*p->value, IsIntEq(123)); - } +TEST_F(PrimOpTest, throw) +{ + ASSERT_THROW(eval("throw \"foo\""), ThrownError); +} - TEST_F(PrimOpTest, getEnv) { - setEnv("_NIX_UNIT_TEST_ENV_VALUE", "test value"); - auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\""); - ASSERT_THAT(v, IsStringEq("test value")); - } +TEST_F(PrimOpTest, abort) +{ + ASSERT_THROW(eval("abort \"abort\""), Abort); +} - TEST_F(PrimOpTest, seq) { - ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError); - } +TEST_F(PrimOpTest, ceil) +{ + auto v = eval("builtins.ceil 1.9"); + ASSERT_THAT(v, IsIntEq(2)); + auto intMin = eval("builtins.ceil (-4611686018427387904 - 4611686018427387904)"); + ASSERT_THAT(intMin, IsIntEq(std::numeric_limits::min())); + ASSERT_THROW(eval("builtins.ceil 1.0e200"), EvalError); + ASSERT_THROW(eval("builtins.ceil -1.0e200"), EvalError); + ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200)"), EvalError); // inf + ASSERT_THROW(eval("builtins.ceil (-1.0e200 * 1.0e200)"), EvalError); // -inf + ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200 - 1.0e200 * 1.0e200)"), EvalError); // nan + // bugs in previous Nix versions + ASSERT_THROW(eval("builtins.ceil (4611686018427387904 + 4611686018427387903)"), EvalError); + ASSERT_THROW(eval("builtins.ceil (-4611686018427387904 - 4611686018427387903)"), EvalError); +} - TEST_F(PrimOpTest, seqNotDeep) { - auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }"); - ASSERT_THAT(v, IsAttrs()); - } +TEST_F(PrimOpTest, floor) +{ + auto v = eval("builtins.floor 1.9"); + ASSERT_THAT(v, IsIntEq(1)); + auto intMin = eval("builtins.ceil (-4611686018427387904 - 4611686018427387904)"); + ASSERT_THAT(intMin, IsIntEq(std::numeric_limits::min())); + ASSERT_THROW(eval("builtins.ceil 1.0e200"), EvalError); + ASSERT_THROW(eval("builtins.ceil -1.0e200"), EvalError); + ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200)"), EvalError); // inf + ASSERT_THROW(eval("builtins.ceil (-1.0e200 * 1.0e200)"), EvalError); // -inf + ASSERT_THROW(eval("builtins.ceil (1.0e200 * 1.0e200 - 1.0e200 * 1.0e200)"), EvalError); // nan + // bugs in previous Nix versions + ASSERT_THROW(eval("builtins.ceil (4611686018427387904 + 4611686018427387903)"), EvalError); + ASSERT_THROW(eval("builtins.ceil (-4611686018427387904 - 4611686018427387903)"), EvalError); +} - TEST_F(PrimOpTest, deepSeq) { - ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError); - } +TEST_F(PrimOpTest, tryEvalFailure) +{ + auto v = eval("builtins.tryEval (throw \"\")"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + auto s = createSymbol("success"); + auto p = v.attrs()->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsFalse()); +} - TEST_F(PrimOpTest, trace) { - CaptureLogging l; - auto v = eval("builtins.trace \"test string 123\" 123"); - ASSERT_THAT(v, IsIntEq(123)); - auto text = (dynamic_cast(logger.get()))->get(); - ASSERT_NE(text.find("test string 123"), std::string::npos); - } +TEST_F(PrimOpTest, tryEvalSuccess) +{ + auto v = eval("builtins.tryEval 123"); + ASSERT_THAT(v, IsAttrs()); + auto s = createSymbol("success"); + auto p = v.attrs()->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsTrue()); + s = createSymbol("value"); + p = v.attrs()->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsIntEq(123)); +} - TEST_F(PrimOpTest, placeholder) { - auto v = eval("builtins.placeholder \"out\""); - ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")); - } +TEST_F(PrimOpTest, getEnv) +{ + setEnv("_NIX_UNIT_TEST_ENV_VALUE", "test value"); + auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\""); + ASSERT_THAT(v, IsStringEq("test value")); +} - TEST_F(PrimOpTest, baseNameOf) { - auto v = eval("builtins.baseNameOf /some/path"); - ASSERT_THAT(v, IsStringEq("path")); - } +TEST_F(PrimOpTest, seq) +{ + ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError); +} - TEST_F(PrimOpTest, dirOf) { - auto v = eval("builtins.dirOf /some/path"); - ASSERT_THAT(v, IsPathEq("/some")); - } +TEST_F(PrimOpTest, seqNotDeep) +{ + auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }"); + ASSERT_THAT(v, IsAttrs()); +} - TEST_F(PrimOpTest, attrValues) { - auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }"); - ASSERT_THAT(v, IsListOfSize(2)); - ASSERT_THAT(*v.listView()[0], IsIntEq(1)); - ASSERT_THAT(*v.listView()[1], IsStringEq("foo")); - } +TEST_F(PrimOpTest, deepSeq) +{ + ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError); +} - TEST_F(PrimOpTest, getAttr) { - auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }"); - ASSERT_THAT(v, IsStringEq("foo")); - } +TEST_F(PrimOpTest, trace) +{ + CaptureLogging l; + auto v = eval("builtins.trace \"test string 123\" 123"); + ASSERT_THAT(v, IsIntEq(123)); + auto text = (dynamic_cast(logger.get()))->get(); + ASSERT_NE(text.find("test string 123"), std::string::npos); +} - TEST_F(PrimOpTest, getAttrNotFound) { - // FIXME: TypeError is really bad here, also the error wording is worse - // than on Nix <=2.3 - ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError); - } +TEST_F(PrimOpTest, placeholder) +{ + auto v = eval("builtins.placeholder \"out\""); + ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")); +} - TEST_F(PrimOpTest, unsafeGetAttrPos) { - state.corepkgsFS->addFile(CanonPath("foo.nix"), "\n\r\n\r{ y = \"x\"; }"); +TEST_F(PrimOpTest, baseNameOf) +{ + auto v = eval("builtins.baseNameOf /some/path"); + ASSERT_THAT(v, IsStringEq("path")); +} - auto expr = "builtins.unsafeGetAttrPos \"y\" (import )"; - auto v = eval(expr); - ASSERT_THAT(v, IsAttrsOfSize(3)); +TEST_F(PrimOpTest, dirOf) +{ + auto v = eval("builtins.dirOf /some/path"); + ASSERT_THAT(v, IsPathEq("/some")); +} - auto file = v.attrs()->find(createSymbol("file")); - ASSERT_NE(file, nullptr); - ASSERT_THAT(*file->value, IsString()); - auto s = baseNameOf(file->value->string_view()); - ASSERT_EQ(s, "foo.nix"); +TEST_F(PrimOpTest, attrValues) +{ + auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }"); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listView()[0], IsIntEq(1)); + ASSERT_THAT(*v.listView()[1], IsStringEq("foo")); +} - auto line = v.attrs()->find(createSymbol("line")); - ASSERT_NE(line, nullptr); - state.forceValue(*line->value, noPos); - ASSERT_THAT(*line->value, IsIntEq(4)); +TEST_F(PrimOpTest, getAttr) +{ + auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }"); + ASSERT_THAT(v, IsStringEq("foo")); +} - auto column = v.attrs()->find(createSymbol("column")); - ASSERT_NE(column, nullptr); - state.forceValue(*column->value, noPos); - ASSERT_THAT(*column->value, IsIntEq(3)); - } +TEST_F(PrimOpTest, getAttrNotFound) +{ + // FIXME: TypeError is really bad here, also the error wording is worse + // than on Nix <=2.3 + ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError); +} - TEST_F(PrimOpTest, hasAttr) { - auto v = eval("builtins.hasAttr \"x\" { x = 1; }"); - ASSERT_THAT(v, IsTrue()); - } +TEST_F(PrimOpTest, unsafeGetAttrPos) +{ + state.corepkgsFS->addFile(CanonPath("foo.nix"), "\n\r\n\r{ y = \"x\"; }"); - TEST_F(PrimOpTest, hasAttrNotFound) { - auto v = eval("builtins.hasAttr \"x\" { }"); - ASSERT_THAT(v, IsFalse()); - } + auto expr = "builtins.unsafeGetAttrPos \"y\" (import )"; + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(3)); - TEST_F(PrimOpTest, isAttrs) { - auto v = eval("builtins.isAttrs {}"); - ASSERT_THAT(v, IsTrue()); - } + auto file = v.attrs()->find(createSymbol("file")); + ASSERT_NE(file, nullptr); + ASSERT_THAT(*file->value, IsString()); + auto s = baseNameOf(file->value->string_view()); + ASSERT_EQ(s, "foo.nix"); - TEST_F(PrimOpTest, isAttrsFalse) { - auto v = eval("builtins.isAttrs null"); - ASSERT_THAT(v, IsFalse()); - } + auto line = v.attrs()->find(createSymbol("line")); + ASSERT_NE(line, nullptr); + state.forceValue(*line->value, noPos); + ASSERT_THAT(*line->value, IsIntEq(4)); - TEST_F(PrimOpTest, removeAttrs) { - auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]"); - ASSERT_THAT(v, IsAttrsOfSize(0)); - } + auto column = v.attrs()->find(createSymbol("column")); + ASSERT_NE(column, nullptr); + state.forceValue(*column->value, noPos); + ASSERT_THAT(*column->value, IsIntEq(3)); +} - TEST_F(PrimOpTest, removeAttrsRetains) { - auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]"); - ASSERT_THAT(v, IsAttrsOfSize(1)); - ASSERT_NE(v.attrs()->find(createSymbol("y")), nullptr); - } +TEST_F(PrimOpTest, hasAttr) +{ + auto v = eval("builtins.hasAttr \"x\" { x = 1; }"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(PrimOpTest, listToAttrsEmptyList) { - auto v = eval("builtins.listToAttrs []"); - ASSERT_THAT(v, IsAttrsOfSize(0)); - ASSERT_EQ(v.type(), nAttrs); - ASSERT_EQ(v.attrs()->size(), 0u); - } +TEST_F(PrimOpTest, hasAttrNotFound) +{ + auto v = eval("builtins.hasAttr \"x\" { }"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(PrimOpTest, listToAttrsNotFieldName) { - ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error); - } +TEST_F(PrimOpTest, isAttrs) +{ + auto v = eval("builtins.isAttrs {}"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(PrimOpTest, listToAttrs) { - auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]"); - ASSERT_THAT(v, IsAttrsOfSize(1)); - auto key = v.attrs()->find(createSymbol("key")); - ASSERT_NE(key, nullptr); - ASSERT_THAT(*key->value, IsIntEq(123)); - } +TEST_F(PrimOpTest, isAttrsFalse) +{ + auto v = eval("builtins.isAttrs null"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(PrimOpTest, intersectAttrs) { - auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }"); - ASSERT_THAT(v, IsAttrsOfSize(1)); - auto b = v.attrs()->find(createSymbol("b")); - ASSERT_NE(b, nullptr); - ASSERT_THAT(*b->value, IsIntEq(3)); - } +TEST_F(PrimOpTest, removeAttrs) +{ + auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]"); + ASSERT_THAT(v, IsAttrsOfSize(0)); +} - TEST_F(PrimOpTest, catAttrs) { - auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]"); - ASSERT_THAT(v, IsListOfSize(2)); - ASSERT_THAT(*v.listView()[0], IsIntEq(1)); - ASSERT_THAT(*v.listView()[1], IsIntEq(2)); - } +TEST_F(PrimOpTest, removeAttrsRetains) +{ + auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + ASSERT_NE(v.attrs()->find(createSymbol("y")), nullptr); +} - TEST_F(PrimOpTest, functionArgs) { - auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)"); - ASSERT_THAT(v, IsAttrsOfSize(2)); +TEST_F(PrimOpTest, listToAttrsEmptyList) +{ + auto v = eval("builtins.listToAttrs []"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + ASSERT_EQ(v.type(), nAttrs); + ASSERT_EQ(v.attrs()->size(), 0u); +} - auto x = v.attrs()->find(createSymbol("x")); - ASSERT_NE(x, nullptr); - ASSERT_THAT(*x->value, IsFalse()); +TEST_F(PrimOpTest, listToAttrsNotFieldName) +{ + ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error); +} - auto y = v.attrs()->find(createSymbol("y")); - ASSERT_NE(y, nullptr); - ASSERT_THAT(*y->value, IsTrue()); - } +TEST_F(PrimOpTest, listToAttrs) +{ + auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto key = v.attrs()->find(createSymbol("key")); + ASSERT_NE(key, nullptr); + ASSERT_THAT(*key->value, IsIntEq(123)); +} - TEST_F(PrimOpTest, mapAttrs) { - auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }"); - ASSERT_THAT(v, IsAttrsOfSize(2)); +TEST_F(PrimOpTest, intersectAttrs) +{ + auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto b = v.attrs()->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(3)); +} - auto a = v.attrs()->find(createSymbol("a")); - ASSERT_NE(a, nullptr); - ASSERT_THAT(*a->value, IsThunk()); - state.forceValue(*a->value, noPos); - ASSERT_THAT(*a->value, IsIntEq(10)); +TEST_F(PrimOpTest, catAttrs) +{ + auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]"); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listView()[0], IsIntEq(1)); + ASSERT_THAT(*v.listView()[1], IsIntEq(2)); +} - auto b = v.attrs()->find(createSymbol("b")); - ASSERT_NE(b, nullptr); - ASSERT_THAT(*b->value, IsThunk()); - state.forceValue(*b->value, noPos); - ASSERT_THAT(*b->value, IsIntEq(20)); - } +TEST_F(PrimOpTest, functionArgs) +{ + auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)"); + ASSERT_THAT(v, IsAttrsOfSize(2)); - TEST_F(PrimOpTest, isList) { - auto v = eval("builtins.isList []"); - ASSERT_THAT(v, IsTrue()); - } + auto x = v.attrs()->find(createSymbol("x")); + ASSERT_NE(x, nullptr); + ASSERT_THAT(*x->value, IsFalse()); - TEST_F(PrimOpTest, isListFalse) { - auto v = eval("builtins.isList null"); - ASSERT_THAT(v, IsFalse()); - } + auto y = v.attrs()->find(createSymbol("y")); + ASSERT_NE(y, nullptr); + ASSERT_THAT(*y->value, IsTrue()); +} - TEST_F(PrimOpTest, elemtAt) { - auto v = eval("builtins.elemAt [0 1 2 3] 3"); - ASSERT_THAT(v, IsIntEq(3)); - } +TEST_F(PrimOpTest, mapAttrs) +{ + auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }"); + ASSERT_THAT(v, IsAttrsOfSize(2)); - TEST_F(PrimOpTest, elemtAtOutOfBounds) { - ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error); - ASSERT_THROW(eval("builtins.elemAt [0] 4294967296"), Error); - } + auto a = v.attrs()->find(createSymbol("a")); + ASSERT_NE(a, nullptr); + ASSERT_THAT(*a->value, IsThunk()); + state.forceValue(*a->value, noPos); + ASSERT_THAT(*a->value, IsIntEq(10)); - TEST_F(PrimOpTest, head) { - auto v = eval("builtins.head [ 3 2 1 0 ]"); - ASSERT_THAT(v, IsIntEq(3)); - } + auto b = v.attrs()->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsThunk()); + state.forceValue(*b->value, noPos); + ASSERT_THAT(*b->value, IsIntEq(20)); +} - TEST_F(PrimOpTest, headEmpty) { - ASSERT_THROW(eval("builtins.head [ ]"), Error); - } +TEST_F(PrimOpTest, isList) +{ + auto v = eval("builtins.isList []"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(PrimOpTest, headWrongType) { - ASSERT_THROW(eval("builtins.head { }"), Error); - } +TEST_F(PrimOpTest, isListFalse) +{ + auto v = eval("builtins.isList null"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(PrimOpTest, tail) { - auto v = eval("builtins.tail [ 3 2 1 0 ]"); - ASSERT_THAT(v, IsListOfSize(3)); - auto listView = v.listView(); - for (const auto [n, elem] : enumerate(listView)) - ASSERT_THAT(*elem, IsIntEq(2 - static_cast(n))); - } +TEST_F(PrimOpTest, elemtAt) +{ + auto v = eval("builtins.elemAt [0 1 2 3] 3"); + ASSERT_THAT(v, IsIntEq(3)); +} - TEST_F(PrimOpTest, tailEmpty) { - ASSERT_THROW(eval("builtins.tail []"), Error); - } +TEST_F(PrimOpTest, elemtAtOutOfBounds) +{ + ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error); + ASSERT_THROW(eval("builtins.elemAt [0] 4294967296"), Error); +} - TEST_F(PrimOpTest, map) { - auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]"); - ASSERT_THAT(v, IsListOfSize(3)); - auto elem = v.listView()[0]; +TEST_F(PrimOpTest, head) +{ + auto v = eval("builtins.head [ 3 2 1 0 ]"); + ASSERT_THAT(v, IsIntEq(3)); +} + +TEST_F(PrimOpTest, headEmpty) +{ + ASSERT_THROW(eval("builtins.head [ ]"), Error); +} + +TEST_F(PrimOpTest, headWrongType) +{ + ASSERT_THROW(eval("builtins.head { }"), Error); +} + +TEST_F(PrimOpTest, tail) +{ + auto v = eval("builtins.tail [ 3 2 1 0 ]"); + ASSERT_THAT(v, IsListOfSize(3)); + auto listView = v.listView(); + for (const auto [n, elem] : enumerate(listView)) + ASSERT_THAT(*elem, IsIntEq(2 - static_cast(n))); +} + +TEST_F(PrimOpTest, tailEmpty) +{ + ASSERT_THROW(eval("builtins.tail []"), Error); +} + +TEST_F(PrimOpTest, map) +{ + auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]"); + ASSERT_THAT(v, IsListOfSize(3)); + auto elem = v.listView()[0]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("foobar")); + + elem = v.listView()[1]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("foobla")); + + elem = v.listView()[2]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("fooabc")); +} + +TEST_F(PrimOpTest, filter) +{ + auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]"); + ASSERT_THAT(v, IsListOfSize(3)); + for (const auto elem : v.listView()) + ASSERT_THAT(*elem, IsIntEq(2)); +} + +TEST_F(PrimOpTest, elemTrue) +{ + auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]"); + ASSERT_THAT(v, IsTrue()); +} + +TEST_F(PrimOpTest, elemFalse) +{ + auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]"); + ASSERT_THAT(v, IsFalse()); +} + +TEST_F(PrimOpTest, concatLists) +{ + auto v = eval("builtins.concatLists [[1 2] [3 4]]"); + ASSERT_THAT(v, IsListOfSize(4)); + auto listView = v.listView(); + for (const auto [i, elem] : enumerate(listView)) + ASSERT_THAT(*elem, IsIntEq(static_cast(i) + 1)); +} + +TEST_F(PrimOpTest, length) +{ + auto v = eval("builtins.length [ 1 2 3 ]"); + ASSERT_THAT(v, IsIntEq(3)); +} + +TEST_F(PrimOpTest, foldStrict) +{ + auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]"); + ASSERT_THAT(v, IsIntEq(6)); +} + +TEST_F(PrimOpTest, anyTrue) +{ + auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]"); + ASSERT_THAT(v, IsTrue()); +} + +TEST_F(PrimOpTest, anyFalse) +{ + auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]"); + ASSERT_THAT(v, IsFalse()); +} + +TEST_F(PrimOpTest, allTrue) +{ + auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]"); + ASSERT_THAT(v, IsTrue()); +} + +TEST_F(PrimOpTest, allFalse) +{ + auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]"); + ASSERT_THAT(v, IsFalse()); +} + +TEST_F(PrimOpTest, genList) +{ + auto v = eval("builtins.genList (x: x + 1) 3"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 3u); + auto listView = v.listView(); + for (const auto [i, elem] : enumerate(listView)) { ASSERT_THAT(*elem, IsThunk()); state.forceValue(*elem, noPos); - ASSERT_THAT(*elem, IsStringEq("foobar")); - - elem = v.listView()[1]; - ASSERT_THAT(*elem, IsThunk()); - state.forceValue(*elem, noPos); - ASSERT_THAT(*elem, IsStringEq("foobla")); - - elem = v.listView()[2]; - ASSERT_THAT(*elem, IsThunk()); - state.forceValue(*elem, noPos); - ASSERT_THAT(*elem, IsStringEq("fooabc")); + ASSERT_THAT(*elem, IsIntEq(static_cast(i) + 1)); } +} - TEST_F(PrimOpTest, filter) { - auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]"); - ASSERT_THAT(v, IsListOfSize(3)); - for (const auto elem : v.listView()) - ASSERT_THAT(*elem, IsIntEq(2)); - } +TEST_F(PrimOpTest, sortLessThan) +{ + auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 6u); - TEST_F(PrimOpTest, elemTrue) { - auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]"); - ASSERT_THAT(v, IsTrue()); - } + const std::vector numbers = {42, 77, 147, 249, 483, 526}; + auto listView = v.listView(); + for (const auto [n, elem] : enumerate(listView)) + ASSERT_THAT(*elem, IsIntEq(numbers[n])); +} - TEST_F(PrimOpTest, elemFalse) { - auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(PrimOpTest, partition) +{ + auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]"); + ASSERT_THAT(v, IsAttrsOfSize(2)); - TEST_F(PrimOpTest, concatLists) { - auto v = eval("builtins.concatLists [[1 2] [3 4]]"); - ASSERT_THAT(v, IsListOfSize(4)); - auto listView = v.listView(); - for (const auto [i, elem] : enumerate(listView)) - ASSERT_THAT(*elem, IsIntEq(static_cast(i)+1)); - } + auto right = v.attrs()->get(createSymbol("right")); + ASSERT_NE(right, nullptr); + ASSERT_THAT(*right->value, IsListOfSize(2)); + ASSERT_THAT(*right->value->listView()[0], IsIntEq(23)); + ASSERT_THAT(*right->value->listView()[1], IsIntEq(42)); - TEST_F(PrimOpTest, length) { - auto v = eval("builtins.length [ 1 2 3 ]"); - ASSERT_THAT(v, IsIntEq(3)); - } + auto wrong = v.attrs()->get(createSymbol("wrong")); + ASSERT_NE(wrong, nullptr); + ASSERT_EQ(wrong->value->type(), nList); + ASSERT_EQ(wrong->value->listSize(), 3u); + ASSERT_THAT(*wrong->value, IsListOfSize(3)); + ASSERT_THAT(*wrong->value->listView()[0], IsIntEq(1)); + ASSERT_THAT(*wrong->value->listView()[1], IsIntEq(9)); + ASSERT_THAT(*wrong->value->listView()[2], IsIntEq(3)); +} - TEST_F(PrimOpTest, foldStrict) { - auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]"); - ASSERT_THAT(v, IsIntEq(6)); - } +TEST_F(PrimOpTest, concatMap) +{ + auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 6u); - TEST_F(PrimOpTest, anyTrue) { - auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]"); - ASSERT_THAT(v, IsTrue()); - } + const std::vector numbers = {1, 2, 0, 3, 4, 0}; + auto listView = v.listView(); + for (const auto [n, elem] : enumerate(listView)) + ASSERT_THAT(*elem, IsIntEq(numbers[n])); +} - TEST_F(PrimOpTest, anyFalse) { - auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(PrimOpTest, addInt) +{ + auto v = eval("builtins.add 3 5"); + ASSERT_THAT(v, IsIntEq(8)); +} - TEST_F(PrimOpTest, allTrue) { - auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]"); - ASSERT_THAT(v, IsTrue()); - } +TEST_F(PrimOpTest, addFloat) +{ + auto v = eval("builtins.add 3.0 5.0"); + ASSERT_THAT(v, IsFloatEq(8.0)); +} - TEST_F(PrimOpTest, allFalse) { - auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(PrimOpTest, addFloatToInt) +{ + auto v = eval("builtins.add 3.0 5"); + ASSERT_THAT(v, IsFloatEq(8.0)); - TEST_F(PrimOpTest, genList) { - auto v = eval("builtins.genList (x: x + 1) 3"); - ASSERT_EQ(v.type(), nList); - ASSERT_EQ(v.listSize(), 3u); - auto listView = v.listView(); - for (const auto [i, elem] : enumerate(listView)) { - ASSERT_THAT(*elem, IsThunk()); - state.forceValue(*elem, noPos); - ASSERT_THAT(*elem, IsIntEq(static_cast(i)+1)); - } - } + v = eval("builtins.add 3 5.0"); + ASSERT_THAT(v, IsFloatEq(8.0)); +} - TEST_F(PrimOpTest, sortLessThan) { - auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]"); - ASSERT_EQ(v.type(), nList); - ASSERT_EQ(v.listSize(), 6u); +TEST_F(PrimOpTest, subInt) +{ + auto v = eval("builtins.sub 5 2"); + ASSERT_THAT(v, IsIntEq(3)); +} - const std::vector numbers = { 42, 77, 147, 249, 483, 526 }; - auto listView = v.listView(); - for (const auto [n, elem] : enumerate(listView)) - ASSERT_THAT(*elem, IsIntEq(numbers[n])); - } +TEST_F(PrimOpTest, subFloat) +{ + auto v = eval("builtins.sub 5.0 2.0"); + ASSERT_THAT(v, IsFloatEq(3.0)); +} - TEST_F(PrimOpTest, partition) { - auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]"); - ASSERT_THAT(v, IsAttrsOfSize(2)); +TEST_F(PrimOpTest, subFloatFromInt) +{ + auto v = eval("builtins.sub 5.0 2"); + ASSERT_THAT(v, IsFloatEq(3.0)); - auto right = v.attrs()->get(createSymbol("right")); - ASSERT_NE(right, nullptr); - ASSERT_THAT(*right->value, IsListOfSize(2)); - ASSERT_THAT(*right->value->listView()[0], IsIntEq(23)); - ASSERT_THAT(*right->value->listView()[1], IsIntEq(42)); + v = eval("builtins.sub 4 2.0"); + ASSERT_THAT(v, IsFloatEq(2.0)); +} - auto wrong = v.attrs()->get(createSymbol("wrong")); - ASSERT_NE(wrong, nullptr); - ASSERT_EQ(wrong->value->type(), nList); - ASSERT_EQ(wrong->value->listSize(), 3u); - ASSERT_THAT(*wrong->value, IsListOfSize(3)); - ASSERT_THAT(*wrong->value->listView()[0], IsIntEq(1)); - ASSERT_THAT(*wrong->value->listView()[1], IsIntEq(9)); - ASSERT_THAT(*wrong->value->listView()[2], IsIntEq(3)); - } +TEST_F(PrimOpTest, mulInt) +{ + auto v = eval("builtins.mul 3 5"); + ASSERT_THAT(v, IsIntEq(15)); +} - TEST_F(PrimOpTest, concatMap) { - auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]"); - ASSERT_EQ(v.type(), nList); - ASSERT_EQ(v.listSize(), 6u); +TEST_F(PrimOpTest, mulFloat) +{ + auto v = eval("builtins.mul 3.0 5.0"); + ASSERT_THAT(v, IsFloatEq(15.0)); +} - const std::vector numbers = { 1, 2, 0, 3, 4, 0 }; - auto listView = v.listView(); - for (const auto [n, elem] : enumerate(listView)) - ASSERT_THAT(*elem, IsIntEq(numbers[n])); - } +TEST_F(PrimOpTest, mulFloatMixed) +{ + auto v = eval("builtins.mul 3 5.0"); + ASSERT_THAT(v, IsFloatEq(15.0)); - TEST_F(PrimOpTest, addInt) { - auto v = eval("builtins.add 3 5"); - ASSERT_THAT(v, IsIntEq(8)); - } + v = eval("builtins.mul 2.0 5"); + ASSERT_THAT(v, IsFloatEq(10.0)); +} - TEST_F(PrimOpTest, addFloat) { - auto v = eval("builtins.add 3.0 5.0"); - ASSERT_THAT(v, IsFloatEq(8.0)); - } +TEST_F(PrimOpTest, divInt) +{ + auto v = eval("builtins.div 5 (-1)"); + ASSERT_THAT(v, IsIntEq(-5)); +} - TEST_F(PrimOpTest, addFloatToInt) { - auto v = eval("builtins.add 3.0 5"); - ASSERT_THAT(v, IsFloatEq(8.0)); +TEST_F(PrimOpTest, divIntZero) +{ + ASSERT_THROW(eval("builtins.div 5 0"), EvalError); +} - v = eval("builtins.add 3 5.0"); - ASSERT_THAT(v, IsFloatEq(8.0)); - } +TEST_F(PrimOpTest, divFloat) +{ + auto v = eval("builtins.div 5.0 (-1)"); + ASSERT_THAT(v, IsFloatEq(-5.0)); +} - TEST_F(PrimOpTest, subInt) { - auto v = eval("builtins.sub 5 2"); - ASSERT_THAT(v, IsIntEq(3)); - } +TEST_F(PrimOpTest, divFloatZero) +{ + ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError); +} - TEST_F(PrimOpTest, subFloat) { - auto v = eval("builtins.sub 5.0 2.0"); - ASSERT_THAT(v, IsFloatEq(3.0)); - } +TEST_F(PrimOpTest, bitOr) +{ + auto v = eval("builtins.bitOr 1 2"); + ASSERT_THAT(v, IsIntEq(3)); +} - TEST_F(PrimOpTest, subFloatFromInt) { - auto v = eval("builtins.sub 5.0 2"); - ASSERT_THAT(v, IsFloatEq(3.0)); +TEST_F(PrimOpTest, bitXor) +{ + auto v = eval("builtins.bitXor 3 2"); + ASSERT_THAT(v, IsIntEq(1)); +} - v = eval("builtins.sub 4 2.0"); - ASSERT_THAT(v, IsFloatEq(2.0)); - } +TEST_F(PrimOpTest, lessThanFalse) +{ + auto v = eval("builtins.lessThan 3 1"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(PrimOpTest, mulInt) { - auto v = eval("builtins.mul 3 5"); - ASSERT_THAT(v, IsIntEq(15)); - } +TEST_F(PrimOpTest, lessThanTrue) +{ + auto v = eval("builtins.lessThan 1 3"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(PrimOpTest, mulFloat) { - auto v = eval("builtins.mul 3.0 5.0"); - ASSERT_THAT(v, IsFloatEq(15.0)); - } +TEST_F(PrimOpTest, toStringAttrsThrows) +{ + ASSERT_THROW(eval("builtins.toString {}"), EvalError); +} - TEST_F(PrimOpTest, mulFloatMixed) { - auto v = eval("builtins.mul 3 5.0"); - ASSERT_THAT(v, IsFloatEq(15.0)); +TEST_F(PrimOpTest, toStringLambdaThrows) +{ + ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError); +} - v = eval("builtins.mul 2.0 5"); - ASSERT_THAT(v, IsFloatEq(10.0)); - } +class ToStringPrimOpTest : public PrimOpTest, + public testing::WithParamInterface> +{}; - TEST_F(PrimOpTest, divInt) { - auto v = eval("builtins.div 5 (-1)"); - ASSERT_THAT(v, IsIntEq(-5)); - } - - TEST_F(PrimOpTest, divIntZero) { - ASSERT_THROW(eval("builtins.div 5 0"), EvalError); - } - - TEST_F(PrimOpTest, divFloat) { - auto v = eval("builtins.div 5.0 (-1)"); - ASSERT_THAT(v, IsFloatEq(-5.0)); - } - - TEST_F(PrimOpTest, divFloatZero) { - ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError); - } - - TEST_F(PrimOpTest, bitOr) { - auto v = eval("builtins.bitOr 1 2"); - ASSERT_THAT(v, IsIntEq(3)); - } - - TEST_F(PrimOpTest, bitXor) { - auto v = eval("builtins.bitXor 3 2"); - ASSERT_THAT(v, IsIntEq(1)); - } - - TEST_F(PrimOpTest, lessThanFalse) { - auto v = eval("builtins.lessThan 3 1"); - ASSERT_THAT(v, IsFalse()); - } - - TEST_F(PrimOpTest, lessThanTrue) { - auto v = eval("builtins.lessThan 1 3"); - ASSERT_THAT(v, IsTrue()); - } - - TEST_F(PrimOpTest, toStringAttrsThrows) { - ASSERT_THROW(eval("builtins.toString {}"), EvalError); - } - - TEST_F(PrimOpTest, toStringLambdaThrows) { - ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError); - } - - class ToStringPrimOpTest : - public PrimOpTest, - public testing::WithParamInterface> - {}; - - TEST_P(ToStringPrimOpTest, toString) { - const auto [input, output] = GetParam(); - auto v = eval(input); - ASSERT_THAT(v, IsStringEq(output)); - } +TEST_P(ToStringPrimOpTest, toString) +{ + const auto [input, output] = GetParam(); + auto v = eval(input); + ASSERT_THAT(v, IsStringEq(output)); +} #define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output))) - INSTANTIATE_TEST_SUITE_P( - toString, - ToStringPrimOpTest, - testing::Values( - CASE(R"("foo")", "foo"), - CASE(R"(1)", "1"), - CASE(R"([1 2 3])", "1 2 3"), - CASE(R"(.123)", "0.123000"), - CASE(R"(true)", "1"), - CASE(R"(false)", ""), - CASE(R"(null)", ""), - CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"), - CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"), - CASE(R"({ outPath = "foo"; })", "foo"), - CASE(R"(./test)", "/test") - ) - ); +INSTANTIATE_TEST_SUITE_P( + toString, + ToStringPrimOpTest, + testing::Values( + CASE(R"("foo")", "foo"), + CASE(R"(1)", "1"), + CASE(R"([1 2 3])", "1 2 3"), + CASE(R"(.123)", "0.123000"), + CASE(R"(true)", "1"), + CASE(R"(false)", ""), + CASE(R"(null)", ""), + CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"), + CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"), + CASE(R"({ outPath = "foo"; })", "foo"), + CASE(R"(./test)", "/test"))); #undef CASE - TEST_F(PrimOpTest, substring){ - auto v = eval("builtins.substring 0 3 \"nixos\""); - ASSERT_THAT(v, IsStringEq("nix")); - } +TEST_F(PrimOpTest, substring) +{ + auto v = eval("builtins.substring 0 3 \"nixos\""); + ASSERT_THAT(v, IsStringEq("nix")); +} - TEST_F(PrimOpTest, substringSmallerString){ - auto v = eval("builtins.substring 0 3 \"n\""); - ASSERT_THAT(v, IsStringEq("n")); - } +TEST_F(PrimOpTest, substringSmallerString) +{ + auto v = eval("builtins.substring 0 3 \"n\""); + ASSERT_THAT(v, IsStringEq("n")); +} - TEST_F(PrimOpTest, substringHugeStart){ - auto v = eval("builtins.substring 4294967296 5 \"nixos\""); - ASSERT_THAT(v, IsStringEq("")); - } +TEST_F(PrimOpTest, substringHugeStart) +{ + auto v = eval("builtins.substring 4294967296 5 \"nixos\""); + ASSERT_THAT(v, IsStringEq("")); +} - TEST_F(PrimOpTest, substringHugeLength){ - auto v = eval("builtins.substring 0 4294967296 \"nixos\""); - ASSERT_THAT(v, IsStringEq("nixos")); - } +TEST_F(PrimOpTest, substringHugeLength) +{ + auto v = eval("builtins.substring 0 4294967296 \"nixos\""); + ASSERT_THAT(v, IsStringEq("nixos")); +} - TEST_F(PrimOpTest, substringEmptyString){ - auto v = eval("builtins.substring 1 3 \"\""); - ASSERT_THAT(v, IsStringEq("")); - } +TEST_F(PrimOpTest, substringEmptyString) +{ + auto v = eval("builtins.substring 1 3 \"\""); + ASSERT_THAT(v, IsStringEq("")); +} - TEST_F(PrimOpTest, stringLength) { - auto v = eval("builtins.stringLength \"123\""); - ASSERT_THAT(v, IsIntEq(3)); - } - TEST_F(PrimOpTest, hashStringMd5) { - auto v = eval("builtins.hashString \"md5\" \"asdf\""); - ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570")); - } +TEST_F(PrimOpTest, stringLength) +{ + auto v = eval("builtins.stringLength \"123\""); + ASSERT_THAT(v, IsIntEq(3)); +} - TEST_F(PrimOpTest, hashStringSha1) { - auto v = eval("builtins.hashString \"sha1\" \"asdf\""); - ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c")); - } +TEST_F(PrimOpTest, hashStringMd5) +{ + auto v = eval("builtins.hashString \"md5\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570")); +} - TEST_F(PrimOpTest, hashStringSha256) { - auto v = eval("builtins.hashString \"sha256\" \"asdf\""); - ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b")); - } +TEST_F(PrimOpTest, hashStringSha1) +{ + auto v = eval("builtins.hashString \"sha1\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c")); +} - TEST_F(PrimOpTest, hashStringSha512) { - auto v = eval("builtins.hashString \"sha512\" \"asdf\""); - ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1")); - } +TEST_F(PrimOpTest, hashStringSha256) +{ + auto v = eval("builtins.hashString \"sha256\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b")); +} - TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) { - ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error); - } +TEST_F(PrimOpTest, hashStringSha512) +{ + auto v = eval("builtins.hashString \"sha512\" \"asdf\""); + ASSERT_THAT( + v, + IsStringEq( + "401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1")); +} - TEST_F(PrimOpTest, nixPath) { - auto v = eval("builtins.nixPath"); - ASSERT_EQ(v.type(), nList); - // We can't test much more as currently the EvalSettings are a global - // that we can't easily swap / replace - } +TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) +{ + ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error); +} - TEST_F(PrimOpTest, langVersion) { - auto v = eval("builtins.langVersion"); - ASSERT_EQ(v.type(), nInt); - } +TEST_F(PrimOpTest, nixPath) +{ + auto v = eval("builtins.nixPath"); + ASSERT_EQ(v.type(), nList); + // We can't test much more as currently the EvalSettings are a global + // that we can't easily swap / replace +} - TEST_F(PrimOpTest, storeDir) { - auto v = eval("builtins.storeDir"); - ASSERT_THAT(v, IsStringEq(settings.nixStore)); - } +TEST_F(PrimOpTest, langVersion) +{ + auto v = eval("builtins.langVersion"); + ASSERT_EQ(v.type(), nInt); +} - TEST_F(PrimOpTest, nixVersion) { - auto v = eval("builtins.nixVersion"); - ASSERT_THAT(v, IsStringEq(nixVersion)); - } +TEST_F(PrimOpTest, storeDir) +{ + auto v = eval("builtins.storeDir"); + ASSERT_THAT(v, IsStringEq(settings.nixStore)); +} - TEST_F(PrimOpTest, currentSystem) { - auto v = eval("builtins.currentSystem"); - ASSERT_THAT(v, IsStringEq(evalSettings.getCurrentSystem())); - } +TEST_F(PrimOpTest, nixVersion) +{ + auto v = eval("builtins.nixVersion"); + ASSERT_THAT(v, IsStringEq(nixVersion)); +} - TEST_F(PrimOpTest, derivation) { - auto v = eval("derivation"); - ASSERT_EQ(v.type(), nFunction); - ASSERT_TRUE(v.isLambda()); - ASSERT_NE(v.lambda().fun, nullptr); - ASSERT_TRUE(v.lambda().fun->hasFormals()); - } +TEST_F(PrimOpTest, currentSystem) +{ + auto v = eval("builtins.currentSystem"); + ASSERT_THAT(v, IsStringEq(evalSettings.getCurrentSystem())); +} - TEST_F(PrimOpTest, currentTime) { - auto v = eval("builtins.currentTime"); - ASSERT_EQ(v.type(), nInt); - ASSERT_TRUE(v.integer() > 0); - } +TEST_F(PrimOpTest, derivation) +{ + auto v = eval("derivation"); + ASSERT_EQ(v.type(), nFunction); + ASSERT_TRUE(v.isLambda()); + ASSERT_NE(v.lambda().fun, nullptr); + ASSERT_TRUE(v.lambda().fun->hasFormals()); +} - TEST_F(PrimOpTest, splitVersion) { - auto v = eval("builtins.splitVersion \"1.2.3git\""); - ASSERT_THAT(v, IsListOfSize(4)); +TEST_F(PrimOpTest, currentTime) +{ + auto v = eval("builtins.currentTime"); + ASSERT_EQ(v.type(), nInt); + ASSERT_TRUE(v.integer() > 0); +} - const std::vector strings = { "1", "2", "3", "git" }; - auto listView = v.listView(); - for (const auto [n, p] : enumerate(listView)) - ASSERT_THAT(*p, IsStringEq(strings[n])); - } +TEST_F(PrimOpTest, splitVersion) +{ + auto v = eval("builtins.splitVersion \"1.2.3git\""); + ASSERT_THAT(v, IsListOfSize(4)); - class CompareVersionsPrimOpTest : - public PrimOpTest, - public testing::WithParamInterface> - {}; + const std::vector strings = {"1", "2", "3", "git"}; + auto listView = v.listView(); + for (const auto [n, p] : enumerate(listView)) + ASSERT_THAT(*p, IsStringEq(strings[n])); +} - TEST_P(CompareVersionsPrimOpTest, compareVersions) { - auto [expression, expectation] = GetParam(); - auto v = eval(expression); - ASSERT_THAT(v, IsIntEq(expectation)); - } +class CompareVersionsPrimOpTest : public PrimOpTest, + public testing::WithParamInterface> +{}; + +TEST_P(CompareVersionsPrimOpTest, compareVersions) +{ + auto [expression, expectation] = GetParam(); + auto v = eval(expression); + ASSERT_THAT(v, IsIntEq(expectation)); +} #define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected)) - INSTANTIATE_TEST_SUITE_P( - compareVersions, - CompareVersionsPrimOpTest, - testing::Values( - // The first two are weird cases. Intuition tells they should - // be the same but they aren't. - CASE(1.0, 1.0.0, -1), - CASE(1.0.0, 1.0, 1), - // the following are from the nix-env manual: - CASE(1.0, 2.3, -1), - CASE(2.1, 2.3, -1), - CASE(2.3, 2.3, 0), - CASE(2.5, 2.3, 1), - CASE(3.1, 2.3, 1), - CASE(2.3.1, 2.3, 1), - CASE(2.3.1, 2.3a, 1), - CASE(2.3pre1, 2.3, -1), - CASE(2.3pre3, 2.3pre12, -1), - CASE(2.3a, 2.3c, -1), - CASE(2.3pre1, 2.3c, -1), - CASE(2.3pre1, 2.3q, -1) - ) - ); +INSTANTIATE_TEST_SUITE_P( + compareVersions, + CompareVersionsPrimOpTest, + testing::Values( + // The first two are weird cases. Intuition tells they should + // be the same but they aren't. + CASE(1.0, 1.0.0, -1), + CASE(1.0.0, 1.0, 1), + // the following are from the nix-env manual: + CASE(1.0, 2.3, -1), + CASE(2.1, 2.3, -1), + CASE(2.3, 2.3, 0), + CASE(2.5, 2.3, 1), + CASE(3.1, 2.3, 1), + CASE(2.3.1, 2.3, 1), + CASE(2.3.1, 2.3a, 1), + CASE(2.3pre1, 2.3, -1), + CASE(2.3pre3, 2.3pre12, -1), + CASE(2.3a, 2.3c, -1), + CASE(2.3pre1, 2.3c, -1), + CASE(2.3pre1, 2.3q, -1))); #undef CASE - - class ParseDrvNamePrimOpTest : - public PrimOpTest, +class ParseDrvNamePrimOpTest + : public PrimOpTest, public testing::WithParamInterface> - {}; +{}; - TEST_P(ParseDrvNamePrimOpTest, parseDrvName) { - auto [input, expectedName, expectedVersion] = GetParam(); - const auto expr = fmt("builtins.parseDrvName \"%1%\"", input); - auto v = eval(expr); - ASSERT_THAT(v, IsAttrsOfSize(2)); +TEST_P(ParseDrvNamePrimOpTest, parseDrvName) +{ + auto [input, expectedName, expectedVersion] = GetParam(); + const auto expr = fmt("builtins.parseDrvName \"%1%\"", input); + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(2)); - auto name = v.attrs()->find(createSymbol("name")); - ASSERT_TRUE(name); - ASSERT_THAT(*name->value, IsStringEq(expectedName)); + auto name = v.attrs()->find(createSymbol("name")); + ASSERT_TRUE(name); + ASSERT_THAT(*name->value, IsStringEq(expectedName)); - auto version = v.attrs()->find(createSymbol("version")); - ASSERT_TRUE(version); - ASSERT_THAT(*version->value, IsStringEq(expectedVersion)); - } + auto version = v.attrs()->find(createSymbol("version")); + ASSERT_TRUE(version); + ASSERT_THAT(*version->value, IsStringEq(expectedVersion)); +} - INSTANTIATE_TEST_SUITE_P( - parseDrvName, - ParseDrvNamePrimOpTest, - testing::Values( - std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"), - std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git") - ) - ); +INSTANTIATE_TEST_SUITE_P( + parseDrvName, + ParseDrvNamePrimOpTest, + testing::Values( + std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"), + std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git"))); - TEST_F(PrimOpTest, replaceStrings) { - // FIXME: add a test that verifies the string context is as expected - auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\""); - ASSERT_EQ(v.type(), nString); - ASSERT_EQ(v.string_view(), "fabir"); - } +TEST_F(PrimOpTest, replaceStrings) +{ + // FIXME: add a test that verifies the string context is as expected + auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\""); + ASSERT_EQ(v.type(), nString); + ASSERT_EQ(v.string_view(), "fabir"); +} - TEST_F(PrimOpTest, concatStringsSep) { - // FIXME: add a test that verifies the string context is as expected - auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]"); - ASSERT_EQ(v.type(), nString); - ASSERT_EQ(v.string_view(), "foo%bar%baz"); - } +TEST_F(PrimOpTest, concatStringsSep) +{ + // FIXME: add a test that verifies the string context is as expected + auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]"); + ASSERT_EQ(v.type(), nString); + ASSERT_EQ(v.string_view(), "foo%bar%baz"); +} - TEST_F(PrimOpTest, split1) { - // v = [ "" [ "a" ] "c" ] - auto v = eval("builtins.split \"(a)b\" \"abc\""); - ASSERT_THAT(v, IsListOfSize(3)); +TEST_F(PrimOpTest, split1) +{ + // v = [ "" [ "a" ] "c" ] + auto v = eval("builtins.split \"(a)b\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(3)); - ASSERT_THAT(*v.listView()[0], IsStringEq("")); + ASSERT_THAT(*v.listView()[0], IsStringEq("")); - ASSERT_THAT(*v.listView()[1], IsListOfSize(1)); - ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); + ASSERT_THAT(*v.listView()[1], IsListOfSize(1)); + ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); - ASSERT_THAT(*v.listView()[2], IsStringEq("c")); - } + ASSERT_THAT(*v.listView()[2], IsStringEq("c")); +} - TEST_F(PrimOpTest, split2) { - // v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ] - auto v = eval("builtins.split \"([ac])\" \"abc\""); - ASSERT_THAT(v, IsListOfSize(5)); +TEST_F(PrimOpTest, split2) +{ + // v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ] + auto v = eval("builtins.split \"([ac])\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(5)); - ASSERT_THAT(*v.listView()[0], IsStringEq("")); + ASSERT_THAT(*v.listView()[0], IsStringEq("")); - ASSERT_THAT(*v.listView()[1], IsListOfSize(1)); - ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); + ASSERT_THAT(*v.listView()[1], IsListOfSize(1)); + ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); - ASSERT_THAT(*v.listView()[2], IsStringEq("b")); + ASSERT_THAT(*v.listView()[2], IsStringEq("b")); - ASSERT_THAT(*v.listView()[3], IsListOfSize(1)); - ASSERT_THAT(*v.listView()[3]->listView()[0], IsStringEq("c")); + ASSERT_THAT(*v.listView()[3], IsListOfSize(1)); + ASSERT_THAT(*v.listView()[3]->listView()[0], IsStringEq("c")); - ASSERT_THAT(*v.listView()[4], IsStringEq("")); - } + ASSERT_THAT(*v.listView()[4], IsStringEq("")); +} - TEST_F(PrimOpTest, split3) { - auto v = eval("builtins.split \"(a)|(c)\" \"abc\""); - ASSERT_THAT(v, IsListOfSize(5)); +TEST_F(PrimOpTest, split3) +{ + auto v = eval("builtins.split \"(a)|(c)\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(5)); - // First list element - ASSERT_THAT(*v.listView()[0], IsStringEq("")); + // First list element + ASSERT_THAT(*v.listView()[0], IsStringEq("")); - // 2nd list element is a list [ "" null ] - ASSERT_THAT(*v.listView()[1], IsListOfSize(2)); - ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); - ASSERT_THAT(*v.listView()[1]->listView()[1], IsNull()); + // 2nd list element is a list [ "" null ] + ASSERT_THAT(*v.listView()[1], IsListOfSize(2)); + ASSERT_THAT(*v.listView()[1]->listView()[0], IsStringEq("a")); + ASSERT_THAT(*v.listView()[1]->listView()[1], IsNull()); - // 3rd element - ASSERT_THAT(*v.listView()[2], IsStringEq("b")); + // 3rd element + ASSERT_THAT(*v.listView()[2], IsStringEq("b")); - // 4th element is a list: [ null "c" ] - ASSERT_THAT(*v.listView()[3], IsListOfSize(2)); - ASSERT_THAT(*v.listView()[3]->listView()[0], IsNull()); - ASSERT_THAT(*v.listView()[3]->listView()[1], IsStringEq("c")); + // 4th element is a list: [ null "c" ] + ASSERT_THAT(*v.listView()[3], IsListOfSize(2)); + ASSERT_THAT(*v.listView()[3]->listView()[0], IsNull()); + ASSERT_THAT(*v.listView()[3]->listView()[1], IsStringEq("c")); - // 5th element is the empty string - ASSERT_THAT(*v.listView()[4], IsStringEq("")); - } + // 5th element is the empty string + ASSERT_THAT(*v.listView()[4], IsStringEq("")); +} - TEST_F(PrimOpTest, split4) { - auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \""); - ASSERT_THAT(v, IsListOfSize(3)); - auto first = v.listView()[0]; - auto second = v.listView()[1]; - auto third = v.listView()[2]; +TEST_F(PrimOpTest, split4) +{ + auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \""); + ASSERT_THAT(v, IsListOfSize(3)); + auto first = v.listView()[0]; + auto second = v.listView()[1]; + auto third = v.listView()[2]; - ASSERT_THAT(*first, IsStringEq(" ")); + ASSERT_THAT(*first, IsStringEq(" ")); - ASSERT_THAT(*second, IsListOfSize(1)); - ASSERT_THAT(*second->listView()[0], IsStringEq("FOO")); + ASSERT_THAT(*second, IsListOfSize(1)); + ASSERT_THAT(*second->listView()[0], IsStringEq("FOO")); - ASSERT_THAT(*third, IsStringEq(" ")); - } + ASSERT_THAT(*third, IsStringEq(" ")); +} - TEST_F(PrimOpTest, match1) { - auto v = eval("builtins.match \"ab\" \"abc\""); - ASSERT_THAT(v, IsNull()); - } +TEST_F(PrimOpTest, match1) +{ + auto v = eval("builtins.match \"ab\" \"abc\""); + ASSERT_THAT(v, IsNull()); +} - TEST_F(PrimOpTest, match2) { - auto v = eval("builtins.match \"abc\" \"abc\""); - ASSERT_THAT(v, IsListOfSize(0)); - } +TEST_F(PrimOpTest, match2) +{ + auto v = eval("builtins.match \"abc\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(0)); +} - TEST_F(PrimOpTest, match3) { - auto v = eval("builtins.match \"a(b)(c)\" \"abc\""); - ASSERT_THAT(v, IsListOfSize(2)); - ASSERT_THAT(*v.listView()[0], IsStringEq("b")); - ASSERT_THAT(*v.listView()[1], IsStringEq("c")); - } +TEST_F(PrimOpTest, match3) +{ + auto v = eval("builtins.match \"a(b)(c)\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listView()[0], IsStringEq("b")); + ASSERT_THAT(*v.listView()[1], IsStringEq("c")); +} - TEST_F(PrimOpTest, match4) { - auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \""); - ASSERT_THAT(v, IsListOfSize(1)); - ASSERT_THAT(*v.listView()[0], IsStringEq("FOO")); - } +TEST_F(PrimOpTest, match4) +{ + auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \""); + ASSERT_THAT(v, IsListOfSize(1)); + ASSERT_THAT(*v.listView()[0], IsStringEq("FOO")); +} - TEST_F(PrimOpTest, match5) { - // The regex "\\{}" is valid and matches the string "{}". - // Caused a regression before when trying to switch from std::regex to boost::regex. - // See https://github.com/NixOS/nix/pull/7762#issuecomment-1834303659 - auto v = eval("builtins.match \"\\\\{}\" \"{}\""); - ASSERT_THAT(v, IsListOfSize(0)); - } +TEST_F(PrimOpTest, match5) +{ + // The regex "\\{}" is valid and matches the string "{}". + // Caused a regression before when trying to switch from std::regex to boost::regex. + // See https://github.com/NixOS/nix/pull/7762#issuecomment-1834303659 + auto v = eval("builtins.match \"\\\\{}\" \"{}\""); + ASSERT_THAT(v, IsListOfSize(0)); +} - TEST_F(PrimOpTest, attrNames) { - auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }"); - ASSERT_THAT(v, IsListOfSize(4)); +TEST_F(PrimOpTest, attrNames) +{ + auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }"); + ASSERT_THAT(v, IsListOfSize(4)); - // ensure that the list is sorted - const std::vector expected { "a", "x", "y", "z" }; - auto listView = v.listView(); - for (const auto [n, elem] : enumerate(listView)) - ASSERT_THAT(*elem, IsStringEq(expected[n])); - } + // ensure that the list is sorted + const std::vector expected{"a", "x", "y", "z"}; + auto listView = v.listView(); + for (const auto [n, elem] : enumerate(listView)) + ASSERT_THAT(*elem, IsStringEq(expected[n])); +} - TEST_F(PrimOpTest, genericClosure_not_strict) { - // Operator should not be used when startSet is empty - auto v = eval("builtins.genericClosure { startSet = []; }"); - ASSERT_THAT(v, IsListOfSize(0)); - } +TEST_F(PrimOpTest, genericClosure_not_strict) +{ + // Operator should not be used when startSet is empty + auto v = eval("builtins.genericClosure { startSet = []; }"); + ASSERT_THAT(v, IsListOfSize(0)); +} } /* namespace nix */ diff --git a/src/libexpr-tests/search-path.cc b/src/libexpr-tests/search-path.cc index 792bb0812..b48dcdaff 100644 --- a/src/libexpr-tests/search-path.cc +++ b/src/libexpr-tests/search-path.cc @@ -5,86 +5,98 @@ namespace nix { -TEST(LookupPathElem, parse_justPath) { +TEST(LookupPathElem, parse_justPath) +{ ASSERT_EQ( LookupPath::Elem::parse("foo"), - (LookupPath::Elem { - .prefix = LookupPath::Prefix { .s = "" }, - .path = LookupPath::Path { .s = "foo" }, + (LookupPath::Elem{ + .prefix = LookupPath::Prefix{.s = ""}, + .path = LookupPath::Path{.s = "foo"}, })); } -TEST(LookupPathElem, parse_emptyPrefix) { +TEST(LookupPathElem, parse_emptyPrefix) +{ ASSERT_EQ( LookupPath::Elem::parse("=foo"), - (LookupPath::Elem { - .prefix = LookupPath::Prefix { .s = "" }, - .path = LookupPath::Path { .s = "foo" }, + (LookupPath::Elem{ + .prefix = LookupPath::Prefix{.s = ""}, + .path = LookupPath::Path{.s = "foo"}, })); } -TEST(LookupPathElem, parse_oneEq) { +TEST(LookupPathElem, parse_oneEq) +{ ASSERT_EQ( LookupPath::Elem::parse("foo=bar"), - (LookupPath::Elem { - .prefix = LookupPath::Prefix { .s = "foo" }, - .path = LookupPath::Path { .s = "bar" }, + (LookupPath::Elem{ + .prefix = LookupPath::Prefix{.s = "foo"}, + .path = LookupPath::Path{.s = "bar"}, })); } -TEST(LookupPathElem, parse_twoEqs) { +TEST(LookupPathElem, parse_twoEqs) +{ ASSERT_EQ( LookupPath::Elem::parse("foo=bar=baz"), - (LookupPath::Elem { - .prefix = LookupPath::Prefix { .s = "foo" }, - .path = LookupPath::Path { .s = "bar=baz" }, + (LookupPath::Elem{ + .prefix = LookupPath::Prefix{.s = "foo"}, + .path = LookupPath::Path{.s = "bar=baz"}, })); } - -TEST(LookupPathElem, suffixIfPotentialMatch_justPath) { - LookupPath::Prefix prefix { .s = "" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" }); +TEST(LookupPathElem, suffixIfPotentialMatch_justPath) +{ + LookupPath::Prefix prefix{.s = ""}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional{"any/thing"}); } -TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix1) { - LookupPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix1) +{ + LookupPath::Prefix prefix{.s = "foo"}; ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt); } -TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix2) { - LookupPath::Prefix prefix { .s = "foo" }; +TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix2) +{ + LookupPath::Prefix prefix{.s = "foo"}; ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt); } -TEST(LookupPathElem, suffixIfPotentialMatch_partialPrefix) { - LookupPath::Prefix prefix { .s = "fooX" }; +TEST(LookupPathElem, suffixIfPotentialMatch_partialPrefix) +{ + LookupPath::Prefix prefix{.s = "fooX"}; ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt); } -TEST(LookupPathElem, suffixIfPotentialMatch_exactPrefix) { - LookupPath::Prefix prefix { .s = "foo" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" }); +TEST(LookupPathElem, suffixIfPotentialMatch_exactPrefix) +{ + LookupPath::Prefix prefix{.s = "foo"}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional{""}); } -TEST(LookupPathElem, suffixIfPotentialMatch_multiKey) { - LookupPath::Prefix prefix { .s = "foo/bar" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" }); +TEST(LookupPathElem, suffixIfPotentialMatch_multiKey) +{ + LookupPath::Prefix prefix{.s = "foo/bar"}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional{"baz"}); } -TEST(LookupPathElem, suffixIfPotentialMatch_trailingSlash) { - LookupPath::Prefix prefix { .s = "foo" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" }); +TEST(LookupPathElem, suffixIfPotentialMatch_trailingSlash) +{ + LookupPath::Prefix prefix{.s = "foo"}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional{""}); } -TEST(LookupPathElem, suffixIfPotentialMatch_trailingDoubleSlash) { - LookupPath::Prefix prefix { .s = "foo" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" }); +TEST(LookupPathElem, suffixIfPotentialMatch_trailingDoubleSlash) +{ + LookupPath::Prefix prefix{.s = "foo"}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional{"/"}); } -TEST(LookupPathElem, suffixIfPotentialMatch_trailingPath) { - LookupPath::Prefix prefix { .s = "foo" }; - ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" }); +TEST(LookupPathElem, suffixIfPotentialMatch_trailingPath) +{ + LookupPath::Prefix prefix{.s = "foo"}; + ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional{"bar/baz"}); } -} +} // namespace nix diff --git a/src/libexpr-tests/trivial.cc b/src/libexpr-tests/trivial.cc index 6eabad6d7..02433234e 100644 --- a/src/libexpr-tests/trivial.cc +++ b/src/libexpr-tests/trivial.cc @@ -1,181 +1,202 @@ #include "nix/expr/tests/libexpr.hh" namespace nix { - // Testing of trivial expressions - class TrivialExpressionTest : public LibExprTest {}; +// Testing of trivial expressions +class TrivialExpressionTest : public LibExprTest +{}; - TEST_F(TrivialExpressionTest, true) { - auto v = eval("true"); - ASSERT_THAT(v, IsTrue()); - } +TEST_F(TrivialExpressionTest, true) +{ + auto v = eval("true"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(TrivialExpressionTest, false) { - auto v = eval("false"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(TrivialExpressionTest, false) +{ + auto v = eval("false"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(TrivialExpressionTest, null) { - auto v = eval("null"); - ASSERT_THAT(v, IsNull()); - } +TEST_F(TrivialExpressionTest, null) +{ + auto v = eval("null"); + ASSERT_THAT(v, IsNull()); +} - TEST_F(TrivialExpressionTest, 1) { - auto v = eval("1"); - ASSERT_THAT(v, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, 1) +{ + auto v = eval("1"); + ASSERT_THAT(v, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, 1plus1) { - auto v = eval("1+1"); - ASSERT_THAT(v, IsIntEq(2)); - } +TEST_F(TrivialExpressionTest, 1plus1) +{ + auto v = eval("1+1"); + ASSERT_THAT(v, IsIntEq(2)); +} - TEST_F(TrivialExpressionTest, minus1) { - auto v = eval("-1"); - ASSERT_THAT(v, IsIntEq(-1)); - } +TEST_F(TrivialExpressionTest, minus1) +{ + auto v = eval("-1"); + ASSERT_THAT(v, IsIntEq(-1)); +} - TEST_F(TrivialExpressionTest, 1minus1) { - auto v = eval("1-1"); - ASSERT_THAT(v, IsIntEq(0)); - } +TEST_F(TrivialExpressionTest, 1minus1) +{ + auto v = eval("1-1"); + ASSERT_THAT(v, IsIntEq(0)); +} - TEST_F(TrivialExpressionTest, lambdaAdd) { - auto v = eval("let add = a: b: a + b; in add 1 2"); - ASSERT_THAT(v, IsIntEq(3)); - } +TEST_F(TrivialExpressionTest, lambdaAdd) +{ + auto v = eval("let add = a: b: a + b; in add 1 2"); + ASSERT_THAT(v, IsIntEq(3)); +} - TEST_F(TrivialExpressionTest, list) { - auto v = eval("[]"); - ASSERT_THAT(v, IsListOfSize(0)); - } +TEST_F(TrivialExpressionTest, list) +{ + auto v = eval("[]"); + ASSERT_THAT(v, IsListOfSize(0)); +} - TEST_F(TrivialExpressionTest, attrs) { - auto v = eval("{}"); - ASSERT_THAT(v, IsAttrsOfSize(0)); - } +TEST_F(TrivialExpressionTest, attrs) +{ + auto v = eval("{}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); +} - TEST_F(TrivialExpressionTest, float) { - auto v = eval("1.234"); - ASSERT_THAT(v, IsFloatEq(1.234)); - } +TEST_F(TrivialExpressionTest, float) +{ + auto v = eval("1.234"); + ASSERT_THAT(v, IsFloatEq(1.234)); +} - TEST_F(TrivialExpressionTest, updateAttrs) { - auto v = eval("{ a = 1; } // { b = 2; a = 3; }"); - ASSERT_THAT(v, IsAttrsOfSize(2)); - auto a = v.attrs()->find(createSymbol("a")); - ASSERT_NE(a, nullptr); - ASSERT_THAT(*a->value, IsIntEq(3)); +TEST_F(TrivialExpressionTest, updateAttrs) +{ + auto v = eval("{ a = 1; } // { b = 2; a = 3; }"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + auto a = v.attrs()->find(createSymbol("a")); + ASSERT_NE(a, nullptr); + ASSERT_THAT(*a->value, IsIntEq(3)); - auto b = v.attrs()->find(createSymbol("b")); - ASSERT_NE(b, nullptr); - ASSERT_THAT(*b->value, IsIntEq(2)); - } + auto b = v.attrs()->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(2)); +} - TEST_F(TrivialExpressionTest, hasAttrOpFalse) { - auto v = eval("{} ? a"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(TrivialExpressionTest, hasAttrOpFalse) +{ + auto v = eval("{} ? a"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(TrivialExpressionTest, hasAttrOpTrue) { - auto v = eval("{ a = 123; } ? a"); - ASSERT_THAT(v, IsTrue()); - } +TEST_F(TrivialExpressionTest, hasAttrOpTrue) +{ + auto v = eval("{ a = 123; } ? a"); + ASSERT_THAT(v, IsTrue()); +} - TEST_F(TrivialExpressionTest, withFound) { - auto v = eval("with { a = 23; }; a"); - ASSERT_THAT(v, IsIntEq(23)); - } +TEST_F(TrivialExpressionTest, withFound) +{ + auto v = eval("with { a = 23; }; a"); + ASSERT_THAT(v, IsIntEq(23)); +} - TEST_F(TrivialExpressionTest, withNotFound) { - ASSERT_THROW(eval("with {}; a"), Error); - } +TEST_F(TrivialExpressionTest, withNotFound) +{ + ASSERT_THROW(eval("with {}; a"), Error); +} - TEST_F(TrivialExpressionTest, withOverride) { - auto v = eval("with { a = 23; }; with { a = 42; }; a"); - ASSERT_THAT(v, IsIntEq(42)); - } +TEST_F(TrivialExpressionTest, withOverride) +{ + auto v = eval("with { a = 23; }; with { a = 42; }; a"); + ASSERT_THAT(v, IsIntEq(42)); +} - TEST_F(TrivialExpressionTest, letOverWith) { - auto v = eval("let a = 23; in with { a = 1; }; a"); - ASSERT_THAT(v, IsIntEq(23)); - } +TEST_F(TrivialExpressionTest, letOverWith) +{ + auto v = eval("let a = 23; in with { a = 1; }; a"); + ASSERT_THAT(v, IsIntEq(23)); +} - TEST_F(TrivialExpressionTest, multipleLet) { - auto v = eval("let a = 23; in let a = 42; in a"); - ASSERT_THAT(v, IsIntEq(42)); - } +TEST_F(TrivialExpressionTest, multipleLet) +{ + auto v = eval("let a = 23; in let a = 42; in a"); + ASSERT_THAT(v, IsIntEq(42)); +} - TEST_F(TrivialExpressionTest, defaultFunctionArgs) { - auto v = eval("({ a ? 123 }: a) {}"); - ASSERT_THAT(v, IsIntEq(123)); - } +TEST_F(TrivialExpressionTest, defaultFunctionArgs) +{ + auto v = eval("({ a ? 123 }: a) {}"); + ASSERT_THAT(v, IsIntEq(123)); +} - TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) { - auto v = eval("({ a ? 123 }: a) { a = 5; }"); - ASSERT_THAT(v, IsIntEq(5)); - } +TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) +{ + auto v = eval("({ a ? 123 }: a) { a = 5; }"); + ASSERT_THAT(v, IsIntEq(5)); +} - TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) { - auto v = eval("({ a ? 123 }@args: args) {}"); - ASSERT_THAT(v, IsAttrsOfSize(0)); - } +TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) +{ + auto v = eval("({ a ? 123 }@args: args) {}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); +} - TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) { - auto v = eval("(args@{ a ? 123 }: args) {}"); - ASSERT_THAT(v, IsAttrsOfSize(0)); - } +TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) +{ + auto v = eval("(args@{ a ? 123 }: args) {}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); +} - TEST_F(TrivialExpressionTest, assertThrows) { - ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error); - } +TEST_F(TrivialExpressionTest, assertThrows) +{ + ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error); +} - TEST_F(TrivialExpressionTest, assertPassed) { - auto v = eval("let x = arg: assert arg == 1; 123; in x 1"); - ASSERT_THAT(v, IsIntEq(123)); - } +TEST_F(TrivialExpressionTest, assertPassed) +{ + auto v = eval("let x = arg: assert arg == 1; 123; in x 1"); + ASSERT_THAT(v, IsIntEq(123)); +} - class AttrSetMergeTrvialExpressionTest : - public TrivialExpressionTest, - public testing::WithParamInterface - {}; +class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest, public testing::WithParamInterface +{}; - TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) { - // Usually Nix rejects duplicate keys in an attrset but it does allow - // so if it is an attribute set that contains disjoint sets of keys. - // The below is equivalent to `{a.b = 1; a.c = 2; }`. - // The attribute set `a` will be a Thunk at first as the attributes - // have to be merged (or otherwise computed) and that is done in a lazy - // manner. +TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) +{ + // Usually Nix rejects duplicate keys in an attrset but it does allow + // so if it is an attribute set that contains disjoint sets of keys. + // The below is equivalent to `{a.b = 1; a.c = 2; }`. + // The attribute set `a` will be a Thunk at first as the attributes + // have to be merged (or otherwise computed) and that is done in a lazy + // manner. - auto expr = GetParam(); - auto v = eval(expr); - ASSERT_THAT(v, IsAttrsOfSize(1)); + auto expr = GetParam(); + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(1)); - auto a = v.attrs()->find(createSymbol("a")); - ASSERT_NE(a, nullptr); + auto a = v.attrs()->find(createSymbol("a")); + ASSERT_NE(a, nullptr); - ASSERT_THAT(*a->value, IsThunk()); - state.forceValue(*a->value, noPos); + ASSERT_THAT(*a->value, IsThunk()); + state.forceValue(*a->value, noPos); - ASSERT_THAT(*a->value, IsAttrsOfSize(2)); + ASSERT_THAT(*a->value, IsAttrsOfSize(2)); - auto b = a->value->attrs()->find(createSymbol("b")); - ASSERT_NE(b, nullptr); - ASSERT_THAT(*b->value, IsIntEq(1)); + auto b = a->value->attrs()->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(1)); - auto c = a->value->attrs()->find(createSymbol("c")); - ASSERT_NE(c, nullptr); - ASSERT_THAT(*c->value, IsIntEq(2)); - } + auto c = a->value->attrs()->find(createSymbol("c")); + ASSERT_NE(c, nullptr); + ASSERT_THAT(*c->value, IsIntEq(2)); +} - INSTANTIATE_TEST_SUITE_P( - attrsetMergeLazy, - AttrSetMergeTrvialExpressionTest, - testing::Values( - "{ a.b = 1; a.c = 2; }", - "{ a = { b = 1; }; a = { c = 2; }; }" - ) - ); +INSTANTIATE_TEST_SUITE_P( + attrsetMergeLazy, + AttrSetMergeTrvialExpressionTest, + testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }")); // The following macros ultimately define 48 tests (16 variations on three // templates). Each template tests an expression that can be written in 2^4 @@ -199,28 +220,34 @@ namespace nix { // expanded. #define X_EXPAND_IF0(k, v) k "." v #define X_EXPAND_IF1(k, v) k " = { " v " };" -#define X4(w, x, y, z) \ - TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) { \ - auto v = eval("{ a.b = { c = 1; d = 2; }; } == { " \ - X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \ - X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \ - ASSERT_THAT(v, IsTrue()); \ - }; \ - TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) { \ - ASSERT_THROW(eval("{ " \ - X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \ - X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "c = 2;")) " }"), Error); \ - }; \ - TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) { \ - auto v = eval("{ b = { c = 1; d = 2; }; } == (let " \ - X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \ - X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \ - ASSERT_THAT(v, IsTrue()); \ +#define X4(w, x, y, z) \ + TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \ + { \ + auto v = eval( \ + "{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \ + "a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \ + ASSERT_THAT(v, IsTrue()); \ + }; \ + TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \ + { \ + ASSERT_THROW( \ + eval( \ + "{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \ + "a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \ + Error); \ + }; \ + TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \ + { \ + auto v = eval( \ + "{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \ + "a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \ + ASSERT_THAT(v, IsTrue()); \ }; #define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1) #define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1) #define X1(...) X2(__VA_ARGS__, 0) X2(__VA_ARGS__, 1) - X1(0) X1(1) +X1(0) +X1(1) #undef X_EXPAND_IF0 #undef X_EXPAND_IF1 #undef X1 @@ -228,74 +255,88 @@ namespace nix { #undef X3 #undef X4 - TEST_F(TrivialExpressionTest, functor) { - auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5"); - ASSERT_THAT(v, IsIntEq(15)); - } +TEST_F(TrivialExpressionTest, functor) +{ + auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5"); + ASSERT_THAT(v, IsIntEq(15)); +} - TEST_F(TrivialExpressionTest, forwardPipe) { - auto v = eval("1 |> builtins.add 2 |> builtins.mul 3"); - ASSERT_THAT(v, IsIntEq(9)); - } +TEST_F(TrivialExpressionTest, forwardPipe) +{ + auto v = eval("1 |> builtins.add 2 |> builtins.mul 3"); + ASSERT_THAT(v, IsIntEq(9)); +} - TEST_F(TrivialExpressionTest, backwardPipe) { - auto v = eval("builtins.add 1 <| builtins.mul 2 <| 3"); - ASSERT_THAT(v, IsIntEq(7)); - } +TEST_F(TrivialExpressionTest, backwardPipe) +{ + auto v = eval("builtins.add 1 <| builtins.mul 2 <| 3"); + ASSERT_THAT(v, IsIntEq(7)); +} - TEST_F(TrivialExpressionTest, forwardPipeEvaluationOrder) { - auto v = eval("1 |> null |> (x: 2)"); - ASSERT_THAT(v, IsIntEq(2)); - } +TEST_F(TrivialExpressionTest, forwardPipeEvaluationOrder) +{ + auto v = eval("1 |> null |> (x: 2)"); + ASSERT_THAT(v, IsIntEq(2)); +} - TEST_F(TrivialExpressionTest, backwardPipeEvaluationOrder) { - auto v = eval("(x: 1) <| null <| 2"); - ASSERT_THAT(v, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, backwardPipeEvaluationOrder) +{ + auto v = eval("(x: 1) <| null <| 2"); + ASSERT_THAT(v, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, differentPipeOperatorsDoNotAssociate) { - ASSERT_THROW(eval("(x: 1) <| 2 |> (x: 3)"), ParseError); - } +TEST_F(TrivialExpressionTest, differentPipeOperatorsDoNotAssociate) +{ + ASSERT_THROW(eval("(x: 1) <| 2 |> (x: 3)"), ParseError); +} - TEST_F(TrivialExpressionTest, differentPipeOperatorsParensLeft) { - auto v = eval("((x: 1) <| 2) |> (x: 3)"); - ASSERT_THAT(v, IsIntEq(3)); - } +TEST_F(TrivialExpressionTest, differentPipeOperatorsParensLeft) +{ + auto v = eval("((x: 1) <| 2) |> (x: 3)"); + ASSERT_THAT(v, IsIntEq(3)); +} - TEST_F(TrivialExpressionTest, differentPipeOperatorsParensRight) { - auto v = eval("(x: 1) <| (2 |> (x: 3))"); - ASSERT_THAT(v, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, differentPipeOperatorsParensRight) +{ + auto v = eval("(x: 1) <| (2 |> (x: 3))"); + ASSERT_THAT(v, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, forwardPipeLowestPrecedence) { - auto v = eval("false -> true |> (x: !x)"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(TrivialExpressionTest, forwardPipeLowestPrecedence) +{ + auto v = eval("false -> true |> (x: !x)"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(TrivialExpressionTest, backwardPipeLowestPrecedence) { - auto v = eval("(x: !x) <| false -> true"); - ASSERT_THAT(v, IsFalse()); - } +TEST_F(TrivialExpressionTest, backwardPipeLowestPrecedence) +{ + auto v = eval("(x: !x) <| false -> true"); + ASSERT_THAT(v, IsFalse()); +} - TEST_F(TrivialExpressionTest, forwardPipeStrongerThanElse) { - auto v = eval("if true then 1 else 2 |> 3"); - ASSERT_THAT(v, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, forwardPipeStrongerThanElse) +{ + auto v = eval("if true then 1 else 2 |> 3"); + ASSERT_THAT(v, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, backwardPipeStrongerThanElse) { - auto v = eval("if true then 1 else 2 <| 3"); - ASSERT_THAT(v, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, backwardPipeStrongerThanElse) +{ + auto v = eval("if true then 1 else 2 <| 3"); + ASSERT_THAT(v, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, bindOr) { - auto v = eval("{ or = 1; }"); - ASSERT_THAT(v, IsAttrsOfSize(1)); - auto b = v.attrs()->find(createSymbol("or")); - ASSERT_NE(b, nullptr); - ASSERT_THAT(*b->value, IsIntEq(1)); - } +TEST_F(TrivialExpressionTest, bindOr) +{ + auto v = eval("{ or = 1; }"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto b = v.attrs()->find(createSymbol("or")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(1)); +} - TEST_F(TrivialExpressionTest, orCantBeUsed) { - ASSERT_THROW(eval("let or = 1; in or"), Error); - } +TEST_F(TrivialExpressionTest, orCantBeUsed) +{ + ASSERT_THROW(eval("let or = 1; in or"), Error); +} } /* namespace nix */ diff --git a/src/libexpr-tests/value/context.cc b/src/libexpr-tests/value/context.cc index 97cd50f75..fe3072b64 100644 --- a/src/libexpr-tests/value/context.cc +++ b/src/libexpr-tests/value/context.cc @@ -10,46 +10,42 @@ namespace nix { // Test a few cases of invalid string context elements. -TEST(NixStringContextElemTest, empty_invalid) { - EXPECT_THROW( - NixStringContextElem::parse(""), - BadNixStringContextElem); +TEST(NixStringContextElemTest, empty_invalid) +{ + EXPECT_THROW(NixStringContextElem::parse(""), BadNixStringContextElem); } -TEST(NixStringContextElemTest, single_bang_invalid) { - EXPECT_THROW( - NixStringContextElem::parse("!"), - BadNixStringContextElem); +TEST(NixStringContextElemTest, single_bang_invalid) +{ + EXPECT_THROW(NixStringContextElem::parse("!"), BadNixStringContextElem); } -TEST(NixStringContextElemTest, double_bang_invalid) { - EXPECT_THROW( - NixStringContextElem::parse("!!/"), - BadStorePath); +TEST(NixStringContextElemTest, double_bang_invalid) +{ + EXPECT_THROW(NixStringContextElem::parse("!!/"), BadStorePath); } -TEST(NixStringContextElemTest, eq_slash_invalid) { - EXPECT_THROW( - NixStringContextElem::parse("=/"), - BadStorePath); +TEST(NixStringContextElemTest, eq_slash_invalid) +{ + EXPECT_THROW(NixStringContextElem::parse("=/"), BadStorePath); } -TEST(NixStringContextElemTest, slash_invalid) { - EXPECT_THROW( - NixStringContextElem::parse("/"), - BadStorePath); +TEST(NixStringContextElemTest, slash_invalid) +{ + EXPECT_THROW(NixStringContextElem::parse("/"), BadStorePath); } /** * Round trip (string <-> data structure) test for * `NixStringContextElem::Opaque`. */ -TEST(NixStringContextElemTest, opaque) { +TEST(NixStringContextElemTest, opaque) +{ std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; auto elem = NixStringContextElem::parse(opaque); auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); - ASSERT_EQ(p->path, StorePath { opaque }); + ASSERT_EQ(p->path, StorePath{opaque}); ASSERT_EQ(elem.to_string(), opaque); } @@ -57,12 +53,13 @@ TEST(NixStringContextElemTest, opaque) { * Round trip (string <-> data structure) test for * `NixStringContextElem::DrvDeep`. */ -TEST(NixStringContextElemTest, drvDeep) { +TEST(NixStringContextElemTest, drvDeep) +{ std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; auto elem = NixStringContextElem::parse(drvDeep); auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); - ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) }); + ASSERT_EQ(p->drvPath, StorePath{drvDeep.substr(1)}); ASSERT_EQ(elem.to_string(), drvDeep); } @@ -70,15 +67,18 @@ TEST(NixStringContextElemTest, drvDeep) { * Round trip (string <-> data structure) test for a simpler * `NixStringContextElem::Built`. */ -TEST(NixStringContextElemTest, built_opaque) { +TEST(NixStringContextElemTest, built_opaque) +{ std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; auto elem = NixStringContextElem::parse(built); auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); ASSERT_EQ(p->output, "foo"); - ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { - .path = StorePath { built.substr(5) }, - })); + ASSERT_EQ( + *p->drvPath, + ((SingleDerivedPath) SingleDerivedPath::Opaque{ + .path = StorePath{built.substr(5)}, + })); ASSERT_EQ(elem.to_string(), built); } @@ -86,7 +86,8 @@ TEST(NixStringContextElemTest, built_opaque) { * Round trip (string <-> data structure) test for a more complex, * inductive `NixStringContextElem::Built`. */ -TEST(NixStringContextElemTest, built_built) { +TEST(NixStringContextElemTest, built_built) +{ /** * We set these in tests rather than the regular globals so we don't have * to worry about race conditions if the tests run concurrently. @@ -102,9 +103,11 @@ TEST(NixStringContextElemTest, built_built) { auto * drvPath = std::get_if(&*p->drvPath); ASSERT_TRUE(drvPath); ASSERT_EQ(drvPath->output, "bar"); - ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { - .path = StorePath { built.substr(9) }, - })); + ASSERT_EQ( + *drvPath->drvPath, + ((SingleDerivedPath) SingleDerivedPath::Opaque{ + .path = StorePath{built.substr(9)}, + })); ASSERT_EQ(elem.to_string(), built); } @@ -112,17 +115,15 @@ TEST(NixStringContextElemTest, built_built) { * Without the right experimental features enabled, we cannot parse a * complex inductive string context element. */ -TEST(NixStringContextElemTest, built_built_xp) { +TEST(NixStringContextElemTest, built_built_xp) +{ ASSERT_THROW( - NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature); + NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature); } #ifndef COVERAGE -RC_GTEST_PROP( - NixStringContextElemTest, - prop_round_rip, - (const NixStringContextElem & o)) +RC_GTEST_PROP(NixStringContextElemTest, prop_round_rip, (const NixStringContextElem & o)) { ExperimentalFeatureSettings xpSettings; xpSettings.set("experimental-features", "dynamic-derivations"); @@ -131,4 +132,4 @@ RC_GTEST_PROP( #endif -} +} // namespace nix diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index d337a29a3..7647cd334 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -106,14 +106,11 @@ TEST_F(ValuePrintingTests, vApp) TEST_F(ValuePrintingTests, vLambda) { - Env env { - .up = nullptr, - .values = { } - }; + Env env{.up = nullptr, .values = {}}; PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1); auto posIdx = state.positions.add(origin, 0); auto body = ExprInt(0); - auto formals = Formals {}; + auto formals = Formals{}; ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); @@ -130,9 +127,7 @@ TEST_F(ValuePrintingTests, vLambda) TEST_F(ValuePrintingTests, vPrimOp) { Value vPrimOp; - PrimOp primOp{ - .name = "puppy" - }; + PrimOp primOp{.name = "puppy"}; vPrimOp.mkPrimOp(&primOp); test(vPrimOp, "«primop puppy»"); @@ -140,9 +135,7 @@ TEST_F(ValuePrintingTests, vPrimOp) TEST_F(ValuePrintingTests, vPrimOpApp) { - PrimOp primOp{ - .name = "puppy" - }; + PrimOp primOp{.name = "puppy"}; Value vPrimOp; vPrimOp.mkPrimOp(&primOp); @@ -161,16 +154,19 @@ TEST_F(ValuePrintingTests, vExternal) { return ""; } + std::string typeOf() const override { return ""; } + virtual std::ostream & print(std::ostream & str) const override { str << "testing-external!"; return str; } } myExternal; + Value vExternal; vExternal.mkExternal(&myExternal); @@ -220,10 +216,13 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vNested; vNested.mkAttrs(builder2.finish()); - test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions { .maxDepth = 1 }); - test(vNested, "{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 2 }); - test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 3 }); - test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 4 }); + test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions{.maxDepth = 1}); + test( + vNested, + "{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }", + PrintOptions{.maxDepth = 2}); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions{.maxDepth = 3}); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions{.maxDepth = 4}); } TEST_F(ValuePrintingTests, depthList) @@ -256,11 +255,11 @@ TEST_F(ValuePrintingTests, depthList) Value vList; vList.mkList(list); - test(vList, "[ 1 2 { ... } ]", PrintOptions { .maxDepth = 1 }); - test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 2 }); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 3 }); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 4 }); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 5 }); + test(vList, "[ 1 2 { ... } ]", PrintOptions{.maxDepth = 1}); + test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 2}); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 3}); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 4}); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 5}); } struct StringPrintingTests : LibExprTest @@ -272,9 +271,7 @@ struct StringPrintingTests : LibExprTest v.mkString(literal); std::stringstream out; - printValue(state, out, v, PrintOptions { - .maxStringLength = maxLength - }); + printValue(state, out, v, PrintOptions{.maxStringLength = maxLength}); ASSERT_EQ(out.str(), expected); } }; @@ -305,15 +302,9 @@ TEST_F(ValuePrintingTests, attrsTypeFirst) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ type = \"puppy\"; apple = \"apple\"; }", - PrintOptions { - .maxAttrs = 100 - }); + test(vAttrs, "{ type = \"puppy\"; apple = \"apple\"; }", PrintOptions{.maxAttrs = 100}); - test(vAttrs, - "{ apple = \"apple\"; type = \"puppy\"; }", - PrintOptions { }); + test(vAttrs, "{ apple = \"apple\"; type = \"puppy\"; }", PrintOptions{}); } TEST_F(ValuePrintingTests, ansiColorsInt) @@ -321,11 +312,7 @@ TEST_F(ValuePrintingTests, ansiColorsInt) Value v; v.mkInt(10); - test(v, - ANSI_CYAN "10" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_CYAN "10" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsFloat) @@ -333,11 +320,7 @@ TEST_F(ValuePrintingTests, ansiColorsFloat) Value v; v.mkFloat(1.6); - test(v, - ANSI_CYAN "1.6" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_CYAN "1.6" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsBool) @@ -345,11 +328,7 @@ TEST_F(ValuePrintingTests, ansiColorsBool) Value v; v.mkBool(true); - test(v, - ANSI_CYAN "true" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_CYAN "true" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsString) @@ -357,11 +336,7 @@ TEST_F(ValuePrintingTests, ansiColorsString) Value v; v.mkString("puppy"); - test(v, - ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsStringElided) @@ -369,12 +344,10 @@ TEST_F(ValuePrintingTests, ansiColorsStringElided) Value v; v.mkString("puppy"); - test(v, - ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .maxStringLength = 3 - }); + test( + v, + ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL, + PrintOptions{.ansiColors = true, .maxStringLength = 3}); } TEST_F(ValuePrintingTests, ansiColorsPath) @@ -382,11 +355,7 @@ TEST_F(ValuePrintingTests, ansiColorsPath) Value v; v.mkPath(state.rootPath(CanonPath("puppy"))); - test(v, - ANSI_GREEN "/puppy" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_GREEN "/puppy" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsNull) @@ -394,11 +363,7 @@ TEST_F(ValuePrintingTests, ansiColorsNull) Value v; v.mkNull(); - test(v, - ANSI_CYAN "null" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_CYAN "null" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsAttrs) @@ -416,11 +381,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }", - PrintOptions { - .ansiColors = true - }); + test( + vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }", + PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsDerivation) @@ -434,20 +398,15 @@ TEST_F(ValuePrintingTests, ansiColorsDerivation) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - ANSI_GREEN "«derivation»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true, - .derivationPaths = true - }); + test( + vAttrs, + ANSI_GREEN "«derivation»" ANSI_NORMAL, + PrintOptions{.ansiColors = true, .force = true, .derivationPaths = true}); - test(vAttrs, - "{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }", - PrintOptions { - .ansiColors = true, - .force = true - }); + test( + vAttrs, + "{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }", + PrintOptions{.ansiColors = true, .force = true}); } TEST_F(ValuePrintingTests, ansiColorsError) @@ -458,14 +417,13 @@ TEST_F(ValuePrintingTests, ansiColorsError) Value vError; vError.mkApp(&throw_, &message); - test(vError, - ANSI_RED - "«error: uh oh!»" - ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true, - }); + test( + vError, + ANSI_RED "«error: uh oh!»" ANSI_NORMAL, + PrintOptions{ + .ansiColors = true, + .force = true, + }); } TEST_F(ValuePrintingTests, ansiColorsDerivationError) @@ -486,30 +444,20 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ drvPath = " - ANSI_RED - "«error: uh oh!»" - ANSI_NORMAL - "; type = " - ANSI_MAGENTA - "\"derivation\"" - ANSI_NORMAL - "; }", - PrintOptions { - .ansiColors = true, - .force = true - }); + test( + vAttrs, + "{ drvPath = " ANSI_RED "«error: uh oh!»" ANSI_NORMAL "; type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL + "; }", + PrintOptions{.ansiColors = true, .force = true}); - test(vAttrs, - ANSI_RED - "«error: uh oh!»" - ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true, - .derivationPaths = true, - }); + test( + vAttrs, + ANSI_RED "«error: uh oh!»" ANSI_NORMAL, + PrintOptions{ + .ansiColors = true, + .force = true, + .derivationPaths = true, + }); } TEST_F(ValuePrintingTests, ansiColorsAssert) @@ -523,12 +471,7 @@ TEST_F(ValuePrintingTests, ansiColorsAssert) Value v; state.mkThunk_(v, &expr); - test(v, - ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true - }); + test(v, ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true}); } TEST_F(ValuePrintingTests, ansiColorsList) @@ -545,77 +488,51 @@ TEST_F(ValuePrintingTests, ansiColorsList) Value vList; vList.mkList(list); - test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]", - PrintOptions { - .ansiColors = true - }); + test( + vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]", + PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsLambda) { - Env env { - .up = nullptr, - .values = { } - }; + Env env{.up = nullptr, .values = {}}; PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1); auto posIdx = state.positions.add(origin, 0); auto body = ExprInt(0); - auto formals = Formals {}; + auto formals = Formals{}; ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); Value vLambda; vLambda.mkLambda(&env, &eLambda); - test(vLambda, - ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true - }); + test(vLambda, ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true}); eLambda.setName(createSymbol("puppy")); - test(vLambda, - ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true, - .force = true - }); + test(vLambda, ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true}); } TEST_F(ValuePrintingTests, ansiColorsPrimOp) { - PrimOp primOp{ - .name = "puppy" - }; + PrimOp primOp{.name = "puppy"}; Value v; v.mkPrimOp(&primOp); - test(v, - ANSI_BLUE "«primop puppy»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_BLUE "«primop puppy»" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsPrimOpApp) { - PrimOp primOp{ - .name = "puppy" - }; + PrimOp primOp{.name = "puppy"}; Value vPrimOp; vPrimOp.mkPrimOp(&primOp); Value v; v.mkPrimOpApp(&vPrimOp, nullptr); - test(v, - ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsThunk) @@ -623,11 +540,7 @@ TEST_F(ValuePrintingTests, ansiColorsThunk) Value v; v.mkThunk(nullptr, nullptr); - test(v, - ANSI_MAGENTA "«thunk»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_MAGENTA "«thunk»" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsBlackhole) @@ -635,11 +548,7 @@ TEST_F(ValuePrintingTests, ansiColorsBlackhole) Value v; v.mkBlackhole(); - test(v, - ANSI_RED "«potential infinite recursion»" ANSI_NORMAL, - PrintOptions { - .ansiColors = true - }); + test(v, ANSI_RED "«potential infinite recursion»" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) @@ -656,11 +565,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }", - PrintOptions { - .ansiColors = true - }); + test(vAttrs, "{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }", PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, ansiColorsListRepeated) @@ -676,11 +581,7 @@ TEST_F(ValuePrintingTests, ansiColorsListRepeated) Value vList; vList.mkList(list); - test(vList, - "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]", - PrintOptions { - .ansiColors = true - }); + test(vList, "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]", PrintOptions{.ansiColors = true}); } TEST_F(ValuePrintingTests, listRepeated) @@ -696,12 +597,8 @@ TEST_F(ValuePrintingTests, listRepeated) Value vList; vList.mkList(list); - test(vList, "[ { } «repeated» ]", PrintOptions { }); - test(vList, - "[ { } { } ]", - PrintOptions { - .trackRepeated = false - }); + test(vList, "[ { } «repeated» ]", PrintOptions{}); + test(vList, "[ { } { } ]", PrintOptions{.trackRepeated = false}); } TEST_F(ValuePrintingTests, ansiColorsAttrsElided) @@ -719,12 +616,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }", - PrintOptions { - .ansiColors = true, - .maxAttrs = 1 - }); + test( + vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }", + PrintOptions{.ansiColors = true, .maxAttrs = 1}); Value vThree; vThree.mkInt(3); @@ -732,12 +627,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) builder.insert(state.symbols.create("three"), &vThree); vAttrs.mkAttrs(builder.finish()); - test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", - PrintOptions { - .ansiColors = true, - .maxAttrs = 1 - }); + test( + vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", + PrintOptions{.ansiColors = true, .maxAttrs = 1}); } TEST_F(ValuePrintingTests, ansiColorsListElided) @@ -751,37 +644,33 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vTwo.mkInt(2); { - auto list = state.buildList(2); - list.elems[0] = &vOne; - list.elems[1] = &vTwo; - Value vList; - vList.mkList(list); + auto list = state.buildList(2); + list.elems[0] = &vOne; + list.elems[1] = &vTwo; + Value vList; + vList.mkList(list); - test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]", - PrintOptions { - .ansiColors = true, - .maxListItems = 1 - }); + test( + vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]", + PrintOptions{.ansiColors = true, .maxListItems = 1}); } Value vThree; vThree.mkInt(3); { - auto list = state.buildList(3); - list.elems[0] = &vOne; - list.elems[1] = &vTwo; - list.elems[2] = &vThree; - Value vList; - vList.mkList(list); + auto list = state.buildList(3); + list.elems[0] = &vOne; + list.elems[1] = &vTwo; + list.elems[2] = &vThree; + Value vList; + vList.mkList(list); - test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]", - PrintOptions { - .ansiColors = true, - .maxListItems = 1 - }); + test( + vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]", + PrintOptions{.ansiColors = true, .maxListItems = 1}); } } diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 111d04cf2..b02b08db4 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -1,10 +1,8 @@ #include "nix/expr/attr-path.hh" #include "nix/expr/eval-inline.hh" - namespace nix { - static Strings parseAttrPath(std::string_view s) { Strings res; @@ -19,18 +17,19 @@ static Strings parseAttrPath(std::string_view s) while (1) { if (i == s.end()) throw ParseError("missing closing quote in selection path '%1%'", s); - if (*i == '"') break; + if (*i == '"') + break; cur.push_back(*i++); } } else cur.push_back(*i); ++i; } - if (!cur.empty()) res.push_back(cur); + if (!cur.empty()) + res.push_back(cur); return res; } - std::vector parseAttrPath(EvalState & state, std::string_view s) { std::vector res; @@ -39,9 +38,8 @@ std::vector parseAttrPath(EvalState & state, std::string_view s) return res; } - -std::pair findAlongAttrPath(EvalState & state, const std::string & attrPath, - Bindings & autoArgs, Value & vIn) +std::pair +findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn) { Strings tokens = parseAttrPath(attrPath); @@ -65,10 +63,12 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin if (!attrIndex) { if (v->type() != nAttrs) - state.error( - "the expression selected by the selection path '%1%' should be a set but is %2%", - attrPath, - showType(*v)).debugThrow(); + state + .error( + "the expression selected by the selection path '%1%' should be a set but is %2%", + attrPath, + showType(*v)) + .debugThrow(); if (attr.empty()) throw Error("empty attribute name in selection path '%1%'", attrPath); @@ -79,7 +79,8 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin attrNames.insert(std::string(state.symbols[attr.name])); auto suggestions = Suggestions::bestMatches(attrNames, attr); - throw AttrPathNotFound(suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath); + throw AttrPathNotFound( + suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath); } v = &*a->value; pos = a->pos; @@ -88,23 +89,23 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin else { if (!v->isList()) - state.error( - "the expression selected by the selection path '%1%' should be a list but is %2%", - attrPath, - showType(*v)).debugThrow(); + state + .error( + "the expression selected by the selection path '%1%' should be a list but is %2%", + attrPath, + showType(*v)) + .debugThrow(); if (*attrIndex >= v->listSize()) throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath); v = v->listView()[*attrIndex]; pos = noPos; } - } return {v, pos}; } - std::pair findPackageFilename(EvalState & state, Value & v, std::string what) { Value * v2; @@ -118,17 +119,17 @@ std::pair findPackageFilename(EvalState & state, Value & v // FIXME: is it possible to extract the Pos object instead of doing this // toString + parsing? NixStringContext context; - auto path = state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation"); + auto path = + state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation"); auto fn = path.path.abs(); - auto fail = [fn]() { - throw ParseError("cannot parse 'meta.position' attribute '%s'", fn); - }; + auto fail = [fn]() { throw ParseError("cannot parse 'meta.position' attribute '%s'", fn); }; try { auto colon = fn.rfind(':'); - if (colon == std::string::npos) fail(); + if (colon == std::string::npos) + fail(); auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos)); return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno}; } catch (std::invalid_argument & e) { @@ -137,5 +138,4 @@ std::pair findPackageFilename(EvalState & state, Value & v } } - -} +} // namespace nix diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 06e245aea..3a06441e9 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -3,11 +3,8 @@ #include - namespace nix { - - /* Allocate a new array of attributes for an attribute set with a specific capacity. The space is implicitly reserved after the Bindings structure. */ @@ -22,7 +19,6 @@ Bindings * EvalState::allocBindings(size_t capacity) return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity); } - Value & BindingsBuilder::alloc(Symbol name, PosIdx pos) { auto value = state.allocValue(); @@ -30,24 +26,21 @@ Value & BindingsBuilder::alloc(Symbol name, PosIdx pos) return *value; } - Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos) { return alloc(state.symbols.create(name), pos); } - void Bindings::sort() { - if (size_) std::sort(begin(), end()); + if (size_) + std::sort(begin(), end()); } - Value & Value::mkAttrs(BindingsBuilder & bindings) { mkAttrs(bindings.finish()); return *this; } - -} +} // namespace nix diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 27d60d6ef..dfb1b1a7e 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -11,8 +11,10 @@ namespace nix::eval_cache { CachedEvalError::CachedEvalError(ref cursor, Symbol attr) : EvalError(cursor->root->state, "cached failure of attribute '%s'", cursor->getAttrPathStr(attr)) - , cursor(cursor), attr(attr) -{ } + , cursor(cursor) + , attr(attr) +{ +} void CachedEvalError::force() { @@ -25,7 +27,8 @@ void CachedEvalError::force() } // Shouldn't happen. - throw EvalError(state, "evaluation of cached failed attribute '%s' unexpectedly succeeded", cursor->getAttrPathStr(attr)); + throw EvalError( + state, "evaluation of cached failed attribute '%s' unexpectedly succeeded", cursor->getAttrPathStr(attr)); } static const char * schema = R"sql( @@ -59,10 +62,7 @@ struct AttrDb SymbolTable & symbols; - AttrDb( - const StoreDirConfig & cfg, - const Hash & fingerprint, - SymbolTable & symbols) + AttrDb(const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols) : cfg(cfg) , _state(std::make_unique>()) , symbols(symbols) @@ -78,17 +78,16 @@ struct AttrDb state->db.isCache(); state->db.exec(schema); - state->insertAttribute.create(state->db, - "insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)"); + state->insertAttribute.create( + state->db, "insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)"); - state->insertAttributeWithContext.create(state->db, - "insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)"); + state->insertAttributeWithContext.create( + state->db, "insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)"); - state->queryAttribute.create(state->db, - "select rowid, type, value, context from Attributes where parent = ? and name = ?"); + state->queryAttribute.create( + state->db, "select rowid, type, value, context from Attributes where parent = ? and name = ?"); - state->queryAttributes.create(state->db, - "select name from Attributes where parent = ?"); + state->queryAttributes.create(state->db, "select name from Attributes where parent = ?"); state->txn = std::make_unique(state->db); } @@ -108,7 +107,8 @@ struct AttrDb template AttrId doSQLite(F && fun) { - if (failed) return 0; + if (failed) + return 0; try { return fun(); } catch (SQLiteError &) { @@ -118,116 +118,76 @@ struct AttrDb } } - AttrId setAttrs( - AttrKey key, - const std::vector & attrs) + AttrId setAttrs(AttrKey key, const std::vector & attrs) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::FullAttrs) - (0, false).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs) (0, false).exec(); AttrId rowId = state->db.getLastInsertedRowId(); assert(rowId); for (auto & attr : attrs) - state->insertAttribute.use() - (rowId) - (symbols[attr]) - (AttrType::Placeholder) - (0, false).exec(); + state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder) (0, false).exec(); return rowId; }); } - AttrId setString( - AttrKey key, - std::string_view s, - const char * * context = nullptr) + AttrId setString(AttrKey key, std::string_view s, const char ** context = nullptr) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); if (context) { std::string ctx; - for (const char * * p = context; *p; ++p) { - if (p != context) ctx.push_back(' '); + for (const char ** p = context; *p; ++p) { + if (p != context) + ctx.push_back(' '); ctx.append(*p); } - state->insertAttributeWithContext.use() - (key.first) - (symbols[key.second]) - (AttrType::String) - (s) - (ctx).exec(); + state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx) + .exec(); } else { - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::String) - (s).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String) (s).exec(); } return state->db.getLastInsertedRowId(); }); } - AttrId setBool( - AttrKey key, - bool b) + AttrId setBool(AttrKey key, bool b) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Bool) - (b ? 1 : 0).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool) (b ? 1 : 0).exec(); return state->db.getLastInsertedRowId(); }); } - AttrId setInt( - AttrKey key, - int n) + AttrId setInt(AttrKey key, int n) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Int) - (n).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int) (n).exec(); return state->db.getLastInsertedRowId(); }); } - AttrId setListOfStrings( - AttrKey key, - const std::vector & l) + AttrId setListOfStrings(AttrKey key, const std::vector & l) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::ListOfStrings) - (dropEmptyInitThenConcatStringsSep("\t", l)).exec(); + state->insertAttribute + .use()(key.first)(symbols[key.second])( + AttrType::ListOfStrings) (dropEmptyInitThenConcatStringsSep("\t", l)) + .exec(); return state->db.getLastInsertedRowId(); }); @@ -235,15 +195,10 @@ struct AttrDb AttrId setPlaceholder(AttrKey key) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Placeholder) - (0, false).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder) (0, false).exec(); return state->db.getLastInsertedRowId(); }); @@ -251,15 +206,10 @@ struct AttrDb AttrId setMissing(AttrKey key) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Missing) - (0, false).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing) (0, false).exec(); return state->db.getLastInsertedRowId(); }); @@ -267,15 +217,10 @@ struct AttrDb AttrId setMisc(AttrKey key) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Misc) - (0, false).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc) (0, false).exec(); return state->db.getLastInsertedRowId(); }); @@ -283,15 +228,10 @@ struct AttrDb AttrId setFailed(AttrKey key) { - return doSQLite([&]() - { + return doSQLite([&]() { auto state(_state->lock()); - state->insertAttribute.use() - (key.first) - (symbols[key.second]) - (AttrType::Failed) - (0, false).exec(); + state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed) (0, false).exec(); return state->db.getLastInsertedRowId(); }); @@ -302,51 +242,49 @@ struct AttrDb auto state(_state->lock()); auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second])); - if (!queryAttribute.next()) return {}; + if (!queryAttribute.next()) + return {}; auto rowId = (AttrId) queryAttribute.getInt(0); auto type = (AttrType) queryAttribute.getInt(1); switch (type) { - case AttrType::Placeholder: - return {{rowId, placeholder_t()}}; - case AttrType::FullAttrs: { - // FIXME: expensive, should separate this out. - std::vector attrs; - auto queryAttributes(state->queryAttributes.use()(rowId)); - while (queryAttributes.next()) - attrs.emplace_back(symbols.create(queryAttributes.getStr(0))); - return {{rowId, attrs}}; - } - case AttrType::String: { - NixStringContext context; - if (!queryAttribute.isNull(3)) - for (auto & s : tokenizeString>(queryAttribute.getStr(3), ";")) - context.insert(NixStringContextElem::parse(s)); - return {{rowId, string_t{queryAttribute.getStr(2), context}}}; - } - case AttrType::Bool: - return {{rowId, queryAttribute.getInt(2) != 0}}; - case AttrType::Int: - return {{rowId, int_t{NixInt{queryAttribute.getInt(2)}}}}; - case AttrType::ListOfStrings: - return {{rowId, tokenizeString>(queryAttribute.getStr(2), "\t")}}; - case AttrType::Missing: - return {{rowId, missing_t()}}; - case AttrType::Misc: - return {{rowId, misc_t()}}; - case AttrType::Failed: - return {{rowId, failed_t()}}; - default: - throw Error("unexpected type in evaluation cache"); + case AttrType::Placeholder: + return {{rowId, placeholder_t()}}; + case AttrType::FullAttrs: { + // FIXME: expensive, should separate this out. + std::vector attrs; + auto queryAttributes(state->queryAttributes.use()(rowId)); + while (queryAttributes.next()) + attrs.emplace_back(symbols.create(queryAttributes.getStr(0))); + return {{rowId, attrs}}; + } + case AttrType::String: { + NixStringContext context; + if (!queryAttribute.isNull(3)) + for (auto & s : tokenizeString>(queryAttribute.getStr(3), ";")) + context.insert(NixStringContextElem::parse(s)); + return {{rowId, string_t{queryAttribute.getStr(2), context}}}; + } + case AttrType::Bool: + return {{rowId, queryAttribute.getInt(2) != 0}}; + case AttrType::Int: + return {{rowId, int_t{NixInt{queryAttribute.getInt(2)}}}}; + case AttrType::ListOfStrings: + return {{rowId, tokenizeString>(queryAttribute.getStr(2), "\t")}}; + case AttrType::Missing: + return {{rowId, missing_t()}}; + case AttrType::Misc: + return {{rowId, misc_t()}}; + case AttrType::Failed: + return {{rowId, failed_t()}}; + default: + throw Error("unexpected type in evaluation cache"); } } }; -static std::shared_ptr makeAttrDb( - const StoreDirConfig & cfg, - const Hash & fingerprint, - SymbolTable & symbols) +static std::shared_ptr makeAttrDb(const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols) { try { return std::make_shared(cfg, fingerprint, symbols); @@ -357,9 +295,7 @@ static std::shared_ptr makeAttrDb( } EvalCache::EvalCache( - std::optional> useCache, - EvalState & state, - RootLoader rootLoader) + std::optional> useCache, EvalState & state, RootLoader rootLoader) : db(useCache ? makeAttrDb(*state.store, *useCache, state.symbols) : nullptr) , state(state) , rootLoader(rootLoader) @@ -381,11 +317,10 @@ ref EvalCache::getRoot() } AttrCursor::AttrCursor( - ref root, - Parent parent, - Value * value, - std::optional> && cachedValue) - : root(root), parent(parent), cachedValue(std::move(cachedValue)) + ref root, Parent parent, Value * value, std::optional> && cachedValue) + : root(root) + , parent(parent) + , cachedValue(std::move(cachedValue)) { if (value) _value = allocRootValue(value); @@ -470,13 +405,11 @@ Value & AttrCursor::forceValue() if (root->db && (!cachedValue || std::get_if(&cachedValue->second))) { if (v.type() == nString) - cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), - string_t{v.c_str(), {}}}; + cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), string_t{v.c_str(), {}}}; else if (v.type() == nPath) { auto path = v.path().path; cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}}; - } - else if (v.type() == nBool) + } else if (v.type() == nBool) cachedValue = {root->db->setBool(getKey(), v.boolean()), v.boolean()}; else if (v.type() == nInt) cachedValue = {root->db->setInt(getKey(), v.integer().value), int_t{v.integer()}}; @@ -518,14 +451,14 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name) else if (std::get_if(&attr->second)) throw CachedEvalError(ref(shared_from_this()), name); else - return std::make_shared(root, - std::make_pair(ref(shared_from_this()), name), nullptr, std::move(attr)); + return std::make_shared( + root, std::make_pair(ref(shared_from_this()), name), nullptr, std::move(attr)); } // Incomplete attrset, so need to fall thru and // evaluate to see whether 'name' exists } else return nullptr; - //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + // error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } @@ -533,7 +466,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name) if (v.type() != nAttrs) return nullptr; - //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + // error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); auto attr = v.attrs()->get(name); @@ -618,17 +551,15 @@ string_t AttrCursor::getStringWithContext() if (auto s = std::get_if(&cachedValue->second)) { bool valid = true; for (auto & c : s->second) { - const StorePath & path = std::visit(overloaded { - [&](const NixStringContextElem::DrvDeep & d) -> const StorePath & { - return d.drvPath; + const StorePath & path = std::visit( + overloaded{ + [&](const NixStringContextElem::DrvDeep & d) -> const StorePath & { return d.drvPath; }, + [&](const NixStringContextElem::Built & b) -> const StorePath & { + return b.drvPath->getBaseStorePath(); + }, + [&](const NixStringContextElem::Opaque & o) -> const StorePath & { return o.path; }, }, - [&](const NixStringContextElem::Built & b) -> const StorePath & { - return b.drvPath->getBaseStorePath(); - }, - [&](const NixStringContextElem::Opaque & o) -> const StorePath & { - return o.path; - }, - }, c.raw); + c.raw); if (!root->state.store->isValidPath(path)) { valid = false; break; @@ -649,8 +580,7 @@ string_t AttrCursor::getStringWithContext() NixStringContext context; copyContext(v, context); return {v.c_str(), std::move(context)}; - } - else if (v.type() == nPath) + } else if (v.type() == nPath) return {v.path().to_string(), {}}; else root->state.error("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow(); @@ -722,7 +652,8 @@ std::vector AttrCursor::getListOfStrings() std::vector res; for (auto elem : v.listView()) - res.push_back(std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching"))); + res.push_back( + std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching"))); if (root->db) cachedValue = {root->db->setListOfStrings(getKey(), res), res}; @@ -778,10 +709,10 @@ StorePath AttrCursor::forceDerivation() been garbage-collected. So force it to be regenerated. */ aDrvPath->forceValue(); if (!root->state.store->isValidPath(drvPath)) - throw Error("don't know how to recreate store derivation '%s'!", - root->state.store->printStorePath(drvPath)); + throw Error( + "don't know how to recreate store derivation '%s'!", root->state.store->printStorePath(drvPath)); } return drvPath; } -} +} // namespace nix::eval_cache diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index eac135008..7f0174715 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -44,12 +44,13 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr // NOTE: This is abusing side-effects. // TODO: check compatibility with nested debugger calls. // TODO: What side-effects?? - error.state.debugTraces.push_front(DebugTrace{ - .pos = expr.getPos(), - .expr = expr, - .env = env, - .hint = HintFmt("Fake frame for debugging purposes"), - .isError = true}); + error.state.debugTraces.push_front( + DebugTrace{ + .pos = expr.getPos(), + .expr = expr, + .env = env, + .hint = HintFmt("Fake frame for debugging purposes"), + .isError = true}); return *this; } @@ -96,7 +97,8 @@ template void EvalErrorBuilder::panic() { logError(error.info()); - printError("This is a bug! An unexpected condition occurred, causing the Nix evaluator to have to stop. If you could share a reproducible example or a core dump, please open an issue at https://github.com/NixOS/nix/issues"); + printError( + "This is a bug! An unexpected condition occurred, causing the Nix evaluator to have to stop. If you could share a reproducible example or a core dump, please open an issue at https://github.com/NixOS/nix/issues"); abort(); } @@ -112,4 +114,4 @@ template class EvalErrorBuilder; template class EvalErrorBuilder; template class EvalErrorBuilder; -} +} // namespace nix diff --git a/src/libexpr/eval-profiler-settings.cc b/src/libexpr/eval-profiler-settings.cc index 1a35d4a2d..1ee5e9231 100644 --- a/src/libexpr/eval-profiler-settings.cc +++ b/src/libexpr/eval-profiler-settings.cc @@ -46,4 +46,4 @@ NLOHMANN_JSON_SERIALIZE_ENUM( /* Explicit instantiation of templates */ template class BaseSetting; -} +} // namespace nix diff --git a/src/libexpr/eval-profiler.cc b/src/libexpr/eval-profiler.cc index b65bc3a4d..7769d47d5 100644 --- a/src/libexpr/eval-profiler.cc +++ b/src/libexpr/eval-profiler.cc @@ -352,4 +352,4 @@ ref makeSampleStackProfiler(EvalState & state, std::filesystem::pa return make_ref(state, profileFile, period); } -} +} // namespace nix diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 659c01a9e..93db5aebb 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -19,12 +19,14 @@ Strings EvalSettings::parseNixPath(const std::string & s) auto start2 = p; while (p != s.end() && *p != ':') { - if (*p == '=') start2 = p + 1; + if (*p == '=') + start2 = p + 1; ++p; } if (p == s.end()) { - if (p != start) res.push_back(std::string(start, p)); + if (p != start) + res.push_back(std::string(start, p)); break; } @@ -32,10 +34,12 @@ Strings EvalSettings::parseNixPath(const std::string & s) auto prefix = std::string(start2, s.end()); if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) { ++p; - while (p != s.end() && *p != ':') ++p; + while (p != s.end() && *p != ':') + ++p; } res.push_back(std::string(start, p)); - if (p == s.end()) break; + if (p == s.end()) + break; } ++p; @@ -75,11 +79,14 @@ Strings EvalSettings::getDefaultNixPath() bool EvalSettings::isPseudoUrl(std::string_view s) { - if (s.compare(0, 8, "channel:") == 0) return true; + if (s.compare(0, 8, "channel:") == 0) + return true; size_t pos = s.find("://"); - if (pos == std::string::npos) return false; + if (pos == std::string::npos) + return false; std::string scheme(s, 0, pos); - return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh"; + return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" + || scheme == "s3" || scheme == "ssh"; } std::string EvalSettings::resolvePseudoUrl(std::string_view url) @@ -98,9 +105,7 @@ const std::string & EvalSettings::getCurrentSystem() const Path getNixDefExpr() { - return settings.useXDGBaseDirectories - ? getStateDir() + "/defexpr" - : getHome() + "/.nix-defexpr"; + return settings.useXDGBaseDirectories ? getStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; } } // namespace nix \ No newline at end of file diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 47cc35daa..f0d64d44a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -51,11 +51,11 @@ static char * allocString(size_t size) { char * t; t = (char *) GC_MALLOC_ATOMIC(size); - if (!t) throw std::bad_alloc(); + if (!t) + throw std::bad_alloc(); return t; } - // When there's no need to write to the string, we can optimize away empty // string allocations. // This function handles makeImmutableString(std::string_view()) by returning @@ -71,14 +71,14 @@ static const char * makeImmutableString(std::string_view s) return t; } - RootValue allocRootValue(Value * v) { return std::allocate_shared(traceable_allocator(), v); } // Pretty print types for assertion errors -std::ostream & operator << (std::ostream & os, const ValueType t) { +std::ostream & operator<<(std::ostream & os, const ValueType t) +{ os << showType(t); return os; } @@ -102,70 +102,84 @@ void Value::print(EvalState & state, std::ostream & str, PrintOptions options) std::string_view showType(ValueType type, bool withArticle) { - #define WA(a, w) withArticle ? a " " w : w +#define WA(a, w) withArticle ? a " " w : w switch (type) { - case nInt: return WA("an", "integer"); - case nBool: return WA("a", "Boolean"); - case nString: return WA("a", "string"); - case nPath: return WA("a", "path"); - case nNull: return "null"; - case nAttrs: return WA("a", "set"); - case nList: return WA("a", "list"); - case nFunction: return WA("a", "function"); - case nExternal: return WA("an", "external value"); - case nFloat: return WA("a", "float"); - case nThunk: return WA("a", "thunk"); + case nInt: + return WA("an", "integer"); + case nBool: + return WA("a", "Boolean"); + case nString: + return WA("a", "string"); + case nPath: + return WA("a", "path"); + case nNull: + return "null"; + case nAttrs: + return WA("a", "set"); + case nList: + return WA("a", "list"); + case nFunction: + return WA("a", "function"); + case nExternal: + return WA("an", "external value"); + case nFloat: + return WA("a", "float"); + case nThunk: + return WA("a", "thunk"); } unreachable(); } - std::string showType(const Value & v) { - // Allow selecting a subset of enum values - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wswitch-enum" +// Allow selecting a subset of enum values +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" switch (v.getInternalType()) { - case tString: return v.context() ? "a string with context" : "a string"; - case tPrimOp: - return fmt("the built-in function '%s'", std::string(v.primOp()->name)); - case tPrimOpApp: - return fmt("the partially applied built-in function '%s'", v.primOpAppPrimOp()->name); - case tExternal: return v.external()->showType(); - case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk"; - case tApp: return "a function application"; + case tString: + return v.context() ? "a string with context" : "a string"; + case tPrimOp: + return fmt("the built-in function '%s'", std::string(v.primOp()->name)); + case tPrimOpApp: + return fmt("the partially applied built-in function '%s'", v.primOpAppPrimOp()->name); + case tExternal: + return v.external()->showType(); + case tThunk: + return v.isBlackhole() ? "a black hole" : "a thunk"; + case tApp: + return "a function application"; default: return std::string(showType(v.type())); } - #pragma GCC diagnostic pop +#pragma GCC diagnostic pop } PosIdx Value::determinePos(const PosIdx pos) const { - // Allow selecting a subset of enum values - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wswitch-enum" +// Allow selecting a subset of enum values +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" switch (getInternalType()) { - case tAttrs: return attrs()->pos; - case tLambda: return lambda().fun->pos; - case tApp: return app().left->determinePos(pos); - default: return pos; + case tAttrs: + return attrs()->pos; + case tLambda: + return lambda().fun->pos; + case tApp: + return app().left->determinePos(pos); + default: + return pos; } - #pragma GCC diagnostic pop +#pragma GCC diagnostic pop } bool Value::isTrivial() const { - return - !isa() - && (!isa() - || (dynamic_cast(thunk().expr) - && ((ExprAttrs *) thunk().expr)->dynamicAttrs.empty()) - || dynamic_cast(thunk().expr) - || dynamic_cast(thunk().expr)); + return !isa() + && (!isa() + || (dynamic_cast(thunk().expr) && ((ExprAttrs *) thunk().expr)->dynamicAttrs.empty()) + || dynamic_cast(thunk().expr) || dynamic_cast(thunk().expr)); } - static Symbol getName(const AttrName & name, EvalState & state, Env & env) { if (name.symbol) { @@ -304,7 +318,7 @@ EvalState::EvalState( , internalFS(make_ref()) , derivationInternal{corepkgsFS->addFile( CanonPath("derivation-internal.nix"), - #include "primops/derivation.nix.gen.hh" +#include "primops/derivation.nix.gen.hh" )} , store(store) , buildStore(buildStore ? buildStore : store) @@ -345,7 +359,7 @@ EvalState::EvalState( assert(lookupPath.elements.empty()); if (!settings.pureEval) { for (auto & i : lookupPathFromArguments.elements) { - lookupPath.elements.emplace_back(LookupPath::Elem {i}); + lookupPath.elements.emplace_back(LookupPath::Elem{i}); } /* $NIX_PATH overriding regular settings is implemented as a hack in `initGC()` */ for (auto & i : settings.nixPath.get()) { @@ -365,7 +379,7 @@ EvalState::EvalState( corepkgsFS->addFile( CanonPath("fetchurl.nix"), - #include "fetchurl.nix.gen.hh" +#include "fetchurl.nix.gen.hh" ); createBaseEnv(settings); @@ -376,18 +390,15 @@ EvalState::EvalState( switch (settings.evalProfilerMode) { case EvalProfilerMode::flamegraph: - profiler.addProfiler(makeSampleStackProfiler( - *this, settings.evalProfileFile.get(), settings.evalProfilerFrequency)); + profiler.addProfiler( + makeSampleStackProfiler(*this, settings.evalProfileFile.get(), settings.evalProfilerFrequency)); break; case EvalProfilerMode::disabled: break; } } -EvalState::~EvalState() -{ -} - +EvalState::~EvalState() {} void EvalState::allowPath(const Path & path) { @@ -403,7 +414,8 @@ void EvalState::allowPath(const StorePath & storePath) void EvalState::allowClosure(const StorePath & storePath) { - if (!rootFS.dynamic_pointer_cast()) return; + if (!rootFS.dynamic_pointer_cast()) + return; StorePathSet closure; store->computeFSClosure(storePath, closure); @@ -420,10 +432,8 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & inline static bool isJustSchemePrefix(std::string_view prefix) { - return - !prefix.empty() - && prefix[prefix.size() - 1] == ':' - && isValidSchemeName(prefix.substr(0, prefix.size() - 1)); + return !prefix.empty() && prefix[prefix.size() - 1] == ':' + && isValidSchemeName(prefix.substr(0, prefix.size() - 1)); } bool isAllowedURI(std::string_view uri, const Strings & allowedUris) @@ -434,18 +444,14 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris) for (auto & prefix : allowedUris) { if (uri == prefix // Allow access to subdirectories of the prefix. - || (uri.size() > prefix.size() - && prefix.size() > 0 - && hasPrefix(uri, prefix) + || (uri.size() > prefix.size() && prefix.size() > 0 && hasPrefix(uri, prefix) && ( // Allow access to subdirectories of the prefix. prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/' // Allow access to whole schemes - || isJustSchemePrefix(prefix) - ) - )) + || isJustSchemePrefix(prefix)))) return true; } @@ -454,9 +460,11 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris) void EvalState::checkURI(const std::string & uri) { - if (!settings.restrictEval) return; + if (!settings.restrictEval) + return; - if (isAllowedURI(uri, settings.allowedUris.get())) return; + if (isAllowedURI(uri, settings.allowedUris.get())) + return; /* If the URI is a path, then check it against allowedPaths as well. */ @@ -475,7 +483,6 @@ void EvalState::checkURI(const std::string & uri) throw RestrictedPathError("access to URI '%s' is forbidden in restricted mode", uri); } - Value * EvalState::addConstant(const std::string & name, Value & v, Constant info) { Value * v2 = allocValue(); @@ -484,7 +491,6 @@ Value * EvalState::addConstant(const std::string & name, Value & v, Constant inf return v2; } - void EvalState::addConstant(const std::string & name, Value * v, Constant info) { auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name; @@ -506,7 +512,6 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info) } } - void PrimOp::check() { if (arity > maxPrimOpArity) { @@ -514,14 +519,12 @@ void PrimOp::check() } } - std::ostream & operator<<(std::ostream & output, const PrimOp & primOp) { output << "primop " << primOp.name; return output; } - const PrimOp * Value::primOpAppPrimOp() const { Value * left = primOpApp().left; @@ -536,14 +539,12 @@ const PrimOp * Value::primOpAppPrimOp() const return left->primOp(); } - void Value::mkPrimOp(PrimOp * p) { p->check(); setStorage(p); } - Value * EvalState::addPrimOp(PrimOp && primOp) { /* Hack to make constants lazy: turn them into a application of @@ -554,10 +555,13 @@ Value * EvalState::addPrimOp(PrimOp && primOp) vPrimOp->mkPrimOp(new PrimOp(primOp)); Value v; v.mkApp(vPrimOp, vPrimOp); - return addConstant(primOp.name, v, { - .type = nThunk, // FIXME - .doc = primOp.doc, - }); + return addConstant( + primOp.name, + v, + { + .type = nThunk, // FIXME + .doc = primOp.doc, + }); } auto envName = symbols.create(primOp.name); @@ -578,13 +582,11 @@ Value * EvalState::addPrimOp(PrimOp && primOp) return v; } - Value & EvalState::getBuiltins() { return *baseEnv.values[0]; } - Value & EvalState::getBuiltin(const std::string & name) { auto it = getBuiltins().attrs()->get(symbols.create(name)); @@ -594,13 +596,12 @@ Value & EvalState::getBuiltin(const std::string & name) error("builtin '%1%' not found", name).debugThrow(); } - std::optional EvalState::getDoc(Value & v) { if (v.isPrimOp()) { auto v2 = &v; if (auto * doc = v2->primOp()->doc) - return Doc { + return Doc{ .pos = {}, .name = v2->primOp()->name, .arity = v2->primOp()->arity, @@ -626,11 +627,10 @@ std::optional EvalState::getDoc(Value & v) if (name.empty()) { s << "Function "; - } - else { + } else { s << "Function `" << name << "`"; if (pos) - s << "\\\n … " ; + s << "\\\n … "; else s << "\\\n"; } @@ -643,7 +643,7 @@ std::optional EvalState::getDoc(Value & v) s << docStr; - return Doc { + return Doc{ .pos = pos, .name = name, .arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though... @@ -664,8 +664,7 @@ std::optional EvalState::getDoc(Value & v) callFunction(functor, vp, partiallyApplied, noPos); auto _level = addCallDepth(noPos); return getDoc(partiallyApplied); - } - catch (Error & e) { + } catch (Error & e) { e.addTrace(nullptr, "while partially calling '%1%' to retrieve documentation", "__functor"); throw; } @@ -673,7 +672,6 @@ std::optional EvalState::getDoc(Value & v) return {}; } - // just for the current level of StaticEnv, not the whole chain. void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se) { @@ -721,13 +719,12 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & std::cout << ANSI_NORMAL; std::cout << std::endl; if (se.isWith) - printWithBindings(st, env); // probably nothing there for the top level. + printWithBindings(st, env); // probably nothing there for the top level. std::cout << std::endl; - } } -void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env) +void printEnvBindings(const EvalState & es, const Expr & expr, const Env & env) { // just print the names for now auto se = es.getStaticEnv(expr); @@ -765,13 +762,18 @@ std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const Stati /** * Sets `inDebugger` to true on construction and false on destruction. */ -class DebuggerGuard { +class DebuggerGuard +{ bool & inDebugger; public: - DebuggerGuard(bool & inDebugger) : inDebugger(inDebugger) { + DebuggerGuard(bool & inDebugger) + : inDebugger(inDebugger) + { inDebugger = true; } - ~DebuggerGuard() { + + ~DebuggerGuard() + { inDebugger = false; } }; @@ -821,60 +823,52 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & return nullptr; }(); - if (error) - { + if (error) { printError("%s\n", error->what()); if (trylevel > 0 && error->info().level != lvlInfo) - printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n"); + printError( + "This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL + " to skip these.\n"); } auto se = getStaticEnv(expr); if (se) { auto vm = mapStaticEnvBindings(symbols, *se.get(), env); DebuggerGuard _guard(inDebugger); - auto exitStatus = (debugRepl)(ref(shared_from_this()), *vm); + auto exitStatus = (debugRepl) (ref(shared_from_this()), *vm); switch (exitStatus) { - case ReplExitStatus::QuitAll: - if (error) - throw *error; - throw Exit(0); - case ReplExitStatus::Continue: - break; - default: - unreachable(); + case ReplExitStatus::QuitAll: + if (error) + throw *error; + throw Exit(0); + case ReplExitStatus::Continue: + break; + default: + unreachable(); } } } template -void EvalState::addErrorTrace(Error & e, const Args & ... formatArgs) const +void EvalState::addErrorTrace(Error & e, const Args &... formatArgs) const { e.addTrace(nullptr, HintFmt(formatArgs...)); } template -void EvalState::addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const +void EvalState::addErrorTrace(Error & e, const PosIdx pos, const Args &... formatArgs) const { e.addTrace(positions[pos], HintFmt(formatArgs...)); } template static std::unique_ptr makeDebugTraceStacker( - EvalState & state, - Expr & expr, - Env & env, - std::variant pos, - const Args & ... formatArgs) + EvalState & state, Expr & expr, Env & env, std::variant pos, const Args &... formatArgs) { - return std::make_unique(state, - DebugTrace { - .pos = std::move(pos), - .expr = expr, - .env = env, - .hint = HintFmt(formatArgs...), - .isError = false - }); + return std::make_unique( + state, + DebugTrace{.pos = std::move(pos), .expr = expr, .env = env, .hint = HintFmt(formatArgs...), .isError = false}); } DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t) @@ -891,13 +885,11 @@ void Value::mkString(std::string_view s) mkString(makeImmutableString(s)); } - -static const char * * encodeContext(const NixStringContext & context) +static const char ** encodeContext(const NixStringContext & context) { if (!context.empty()) { size_t n = 0; - auto ctx = (const char * *) - allocBytes((context.size() + 1) * sizeof(char *)); + auto ctx = (const char **) allocBytes((context.size() + 1) * sizeof(char *)); for (auto & i : context) { ctx[n++] = makeImmutableString({i.to_string()}); } @@ -922,40 +914,48 @@ void Value::mkPath(const SourcePath & path) mkPath(&*path.accessor, makeImmutableString(path.path.abs())); } - inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) { - for (auto l = var.level; l; --l, env = env->up) ; + for (auto l = var.level; l; --l, env = env->up) + ; - if (!var.fromWith) return env->values[var.displ]; + if (!var.fromWith) + return env->values[var.displ]; // This early exit defeats the `maybeThunk` optimization for variables from `with`, // The added complexity of handling this appears to be similarly in cost, or // the cases where applicable were insignificant in the first place. - if (noEval) return nullptr; + if (noEval) + return nullptr; auto * fromWith = var.fromWith; while (1) { forceAttrs(*env->values[0], fromWith->pos, "while evaluating the first subexpression of a with expression"); if (auto j = env->values[0]->attrs()->get(var.name)) { - if (countCalls) attrSelects[j->pos]++; + if (countCalls) + attrSelects[j->pos]++; return j->value; } if (!fromWith->parentWith) - error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); - for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; + error("undefined variable '%1%'", symbols[var.name]) + .atPos(var.pos) + .withFrame(*env, var) + .debugThrow(); + for (size_t l = fromWith->prevWith; l; --l, env = env->up) + ; fromWith = fromWith->parentWith; } } ListBuilder::ListBuilder(EvalState & state, size_t size) : size(size) - , elems(size <= 2 ? inlineElems : (Value * *) allocBytes(size * sizeof(Value *))) + , elems(size <= 2 ? inlineElems : (Value **) allocBytes(size * sizeof(Value *))) { state.nrListElems += size; } -Value * EvalState::getBool(bool b) { +Value * EvalState::getBool(bool b) +{ return b ? &vTrue : &vFalse; } @@ -967,13 +967,11 @@ static inline void mkThunk(Value & v, Env & env, Expr * expr) nrThunks++; } - void EvalState::mkThunk_(Value & v, Expr * expr) { mkThunk(v, baseEnv, expr); } - void EvalState::mkPos(Value & v, PosIdx p) { auto origin = positions.originOf(p); @@ -986,17 +984,15 @@ void EvalState::mkPos(Value & v, PosIdx p) v.mkNull(); } - void EvalState::mkStorePathString(const StorePath & p, Value & v) { v.mkString( store->printStorePath(p), - NixStringContext { - NixStringContextElem::Opaque { .path = p }, + NixStringContext{ + NixStringContextElem::Opaque{.path = p}, }); } - std::string EvalState::mkOutputStringRaw( const SingleDerivedPath::Built & b, std::optional optStaticOutputPath, @@ -1004,64 +1000,56 @@ std::string EvalState::mkOutputStringRaw( { /* In practice, this is testing for the case of CA derivations, or dynamic derivations. */ - return optStaticOutputPath - ? store->printStorePath(std::move(*optStaticOutputPath)) - /* Downstream we would substitute this for an actual path once - we build the floating CA derivation */ - : DownstreamPlaceholder::fromSingleDerivedPathBuilt(b, xpSettings).render(); + return optStaticOutputPath ? store->printStorePath(std::move(*optStaticOutputPath)) + /* Downstream we would substitute this for an actual path once + we build the floating CA derivation */ + : DownstreamPlaceholder::fromSingleDerivedPathBuilt(b, xpSettings).render(); } - void EvalState::mkOutputString( Value & value, const SingleDerivedPath::Built & b, std::optional optStaticOutputPath, const ExperimentalFeatureSettings & xpSettings) { - value.mkString( - mkOutputStringRaw(b, optStaticOutputPath, xpSettings), - NixStringContext { b }); + value.mkString(mkOutputStringRaw(b, optStaticOutputPath, xpSettings), NixStringContext{b}); } - -std::string EvalState::mkSingleDerivedPathStringRaw( - const SingleDerivedPath & p) +std::string EvalState::mkSingleDerivedPathStringRaw(const SingleDerivedPath & p) { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) { - return store->printStorePath(o.path); - }, - [&](const SingleDerivedPath::Built & b) { - auto optStaticOutputPath = std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) { - auto drv = store->readDerivation(o.path); - auto i = drv.outputs.find(b.output); - if (i == drv.outputs.end()) - throw Error("derivation '%s' does not have output '%s'", b.drvPath->to_string(*store), b.output); - return i->second.path(*store, drv.name, b.output); - }, - [&](const SingleDerivedPath::Built & o) -> std::optional { - return std::nullopt; - }, - }, b.drvPath->raw()); - return mkOutputStringRaw(b, optStaticOutputPath); - } - }, p.raw()); + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) { return store->printStorePath(o.path); }, + [&](const SingleDerivedPath::Built & b) { + auto optStaticOutputPath = std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) { + auto drv = store->readDerivation(o.path); + auto i = drv.outputs.find(b.output); + if (i == drv.outputs.end()) + throw Error( + "derivation '%s' does not have output '%s'", + b.drvPath->to_string(*store), + b.output); + return i->second.path(*store, drv.name, b.output); + }, + [&](const SingleDerivedPath::Built & o) -> std::optional { return std::nullopt; }, + }, + b.drvPath->raw()); + return mkOutputStringRaw(b, optStaticOutputPath); + }}, + p.raw()); } - -void EvalState::mkSingleDerivedPathString( - const SingleDerivedPath & p, - Value & v) +void EvalState::mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v) { v.mkString( mkSingleDerivedPathStringRaw(p), - NixStringContext { + NixStringContext{ std::visit([](auto && v) -> NixStringContextElem { return v; }, p), }); } - /* Create a thunk for the delayed computation of the given expression in the given environment. But if the expression is a variable, then look it up right away. This significantly reduces the number @@ -1073,17 +1061,18 @@ Value * Expr::maybeThunk(EvalState & state, Env & env) return v; } - Value * ExprVar::maybeThunk(EvalState & state, Env & env) { Value * v = state.lookupVar(&env, *this, true); /* The value might not be initialised in the environment yet. In that case, ignore it. */ - if (v) { state.nrAvoided++; return v; } + if (v) { + state.nrAvoided++; + return v; + } return Expr::maybeThunk(state, env); } - Value * ExprString::maybeThunk(EvalState & state, Env & env) { state.nrAvoided++; @@ -1108,7 +1097,6 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) return &v; } - void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) { FileEvalCache::iterator i; @@ -1136,19 +1124,18 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) fileParseCache.emplace(resolvedPath, e); try { - auto dts = debugRepl - ? makeDebugTraceStacker( - *this, - *e, - this->baseEnv, - e->getPos(), - "while evaluating the file '%1%':", resolvedPath.to_string()) - : nullptr; + auto dts = debugRepl ? makeDebugTraceStacker( + *this, + *e, + this->baseEnv, + e->getPos(), + "while evaluating the file '%1%':", + resolvedPath.to_string()) + : nullptr; // Enforce that 'flake.nix' is a direct attrset, not a // computation. - if (mustBeTrivial && - !(dynamic_cast(e))) + if (mustBeTrivial && !(dynamic_cast(e))) error("file '%s' must be an attribute set", path).debugThrow(); eval(e, v); } catch (Error & e) { @@ -1157,10 +1144,10 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) } fileEvalCache.emplace(resolvedPath, v); - if (path != resolvedPath) fileEvalCache.emplace(path, v); + if (path != resolvedPath) + fileEvalCache.emplace(path, v); } - void EvalState::resetFileCache() { fileEvalCache.clear(); @@ -1168,13 +1155,11 @@ void EvalState::resetFileCache() inputCache->clear(); } - void EvalState::eval(Expr * e, Value & v) { e->eval(*this, baseEnv, v); } - inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::string_view errorCtx) { try { @@ -1182,10 +1167,10 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri e->eval(*this, env, v); if (v.type() != nBool) error( - "expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).withFrame(env, *e).debugThrow(); + "expected a Boolean but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .withFrame(env, *e) + .debugThrow(); return v.boolean(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1193,36 +1178,31 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri } } - inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx pos, std::string_view errorCtx) { try { e->eval(*this, env, v); if (v.type() != nAttrs) error( - "expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).withFrame(env, *e).debugThrow(); + "expected a set but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .withFrame(env, *e) + .debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } } - void Expr::eval(EvalState & state, Env & env, Value & v) { unreachable(); } - void ExprInt::eval(EvalState & state, Env & env, Value & v) { v = this->v; } - void ExprFloat::eval(EvalState & state, Env & env, Value & v) { v = this->v; @@ -1233,13 +1213,11 @@ void ExprString::eval(EvalState & state, Env & env, Value & v) v = this->v; } - void ExprPath::eval(EvalState & state, Env & env, Value & v) { v = this->v; } - Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up) { Env & inheritEnv = state.allocEnv(inheritFromExprs->size()); @@ -1294,7 +1272,10 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Hence we need __overrides.) */ if (hasOverrides) { Value * vOverrides = (*bindings.bindings)[overrides->second.displ].value; - state.forceAttrs(*vOverrides, [&]() { return vOverrides->determinePos(noPos); }, "while evaluating the `__overrides` attribute"); + state.forceAttrs( + *vOverrides, + [&]() { return vOverrides->determinePos(noPos); }, + "while evaluating the `__overrides` attribute"); bindings.grow(state.allocBindings(bindings.capacity() + vOverrides->attrs()->size())); for (auto & i : *vOverrides->attrs()) { AttrDefs::iterator j = attrs.find(i.name); @@ -1312,9 +1293,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env) : nullptr; for (auto & i : attrs) bindings.insert( - i.first, - i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)), - i.second.pos); + i.first, i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)), i.second.pos); } /* Dynamic attrs apply *after* rec and __overrides. */ @@ -1330,7 +1309,12 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) // FIXME: inefficient bindings.bindings->sort(); if (auto j = bindings.bindings->get(nameSym)) - state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); + state + .error( + "dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]) + .atPos(i.pos) + .withFrame(env, *this) + .debugThrow(); i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ @@ -1343,7 +1327,6 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) v.mkAttrs(sort ? bindings.finish() : bindings.alreadySorted()); } - void ExprLet::eval(EvalState & state, Env & env, Value & v) { /* Create a new environment that contains the attributes in this @@ -1358,26 +1341,16 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) environment. */ Displacement displ = 0; for (auto & i : attrs->attrs) { - env2.values[displ++] = i.second.e->maybeThunk( - state, - *i.second.chooseByKind(&env2, &env, inheritEnv)); + env2.values[displ++] = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, inheritEnv)); } auto dts = state.debugRepl - ? makeDebugTraceStacker( - state, - *this, - env2, - getPos(), - "while evaluating a '%1%' expression", - "let" - ) - : nullptr; + ? makeDebugTraceStacker(state, *this, env2, getPos(), "while evaluating a '%1%' expression", "let") + : nullptr; body->eval(state, env2, v); } - void ExprList::eval(EvalState & state, Env & env, Value & v) { auto list = state.buildList(elems.size()); @@ -1386,7 +1359,6 @@ void ExprList::eval(EvalState & state, Env & env, Value & v) v.mkList(list); } - Value * ExprList::maybeThunk(EvalState & state, Env & env) { if (elems.empty()) { @@ -1395,7 +1367,6 @@ Value * ExprList::maybeThunk(EvalState & state, Env & env) return Expr::maybeThunk(state, env); } - void ExprVar::eval(EvalState & state, Env & env, Value & v) { Value * v2 = state.lookupVar(&env, *this, false); @@ -1403,13 +1374,15 @@ void ExprVar::eval(EvalState & state, Env & env, Value & v) v = *v2; } - static std::string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPath) { std::ostringstream out; bool first = true; for (auto & i : attrPath) { - if (!first) out << '.'; else first = false; + if (!first) + out << '.'; + else + first = false; try { out << state.symbols[getName(i, state, env)]; } catch (Error & e) { @@ -1422,7 +1395,6 @@ static std::string showAttrPath(EvalState & state, Env & env, const AttrPath & a return out.str(); } - void ExprSelect::eval(EvalState & state, Env & env, Value & v) { Value vTmp; @@ -1432,15 +1404,14 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) e->eval(state, env, vTmp); try { - auto dts = state.debugRepl - ? makeDebugTraceStacker( - state, - *this, - env, - getPos(), - "while evaluating the attribute '%1%'", - showAttrPath(state, env, attrPath)) - : nullptr; + auto dts = state.debugRepl ? makeDebugTraceStacker( + state, + *this, + env, + getPos(), + "while evaluating the attribute '%1%'", + showAttrPath(state, env, attrPath)) + : nullptr; for (auto & i : attrPath) { state.nrLookups++; @@ -1448,9 +1419,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) auto name = getName(i, state, env); if (def) { state.forceValue(*vAttrs, pos); - if (vAttrs->type() != nAttrs || - !(j = vAttrs->attrs()->get(name))) - { + if (vAttrs->type() != nAttrs || !(j = vAttrs->attrs()->get(name))) { def->eval(state, env, v); return; } @@ -1462,23 +1431,27 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) allAttrNames.insert(std::string(state.symbols[attr.name])); auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]); state.error("attribute '%1%' missing", state.symbols[name]) - .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); + .atPos(pos) + .withSuggestions(suggestions) + .withFrame(env, *this) + .debugThrow(); } } vAttrs = j->value; pos2 = j->pos; - if (state.countCalls) state.attrSelects[pos2]++; + if (state.countCalls) + state.attrSelects[pos2]++; } - state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) ); + state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos)); } catch (Error & e) { if (pos2) { auto pos2r = state.positions[pos2]; auto origin = std::get_if(&pos2r.origin); if (!(origin && *origin == state.derivationInternal)) - state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'", - showAttrPath(state, env, attrPath)); + state.addErrorTrace( + e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)); } throw; } @@ -1502,7 +1475,6 @@ Symbol ExprSelect::evalExceptFinalSelect(EvalState & state, Env & env, Value & a return name; } - void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) { Value vTmp; @@ -1514,9 +1486,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) state.forceValue(*vAttrs, getPos()); const Attr * j; auto name = getName(i, state, env); - if (vAttrs->type() == nAttrs && - (j = vAttrs->attrs()->get(name))) - { + if (vAttrs->type() == nAttrs && (j = vAttrs->attrs()->get(name))) { vAttrs = j->value; } else { v.mkBool(false); @@ -1527,7 +1497,6 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) v.mkBool(true); } - void ExprLambda::eval(EvalState & state, Env & env, Value & v) { v.mkLambda(&env, this); @@ -1541,7 +1510,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, if (neededHooks.test(EvalProfiler::preFunctionCall)) [[unlikely]] profiler.preFunctionCallHook(*this, fun, args, pos); - Finally traceExit_{[&](){ + Finally traceExit_{[&]() { if (profiler.getNeededHooks().test(EvalProfiler::postFunctionCall)) [[unlikely]] profiler.postFunctionCallHook(*this, fun, args, pos); }}; @@ -1550,8 +1519,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, Value vCur(fun); - auto makeAppChain = [&]() - { + auto makeAppChain = [&]() { vRes = vCur; for (auto arg : args) { auto fun2 = allocValue(); @@ -1568,9 +1536,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, ExprLambda & lambda(*vCur.lambda().fun); - auto size = - (!lambda.arg ? 0 : 1) + - (lambda.hasFormals() ? lambda.formals->formals.size() : 0); + auto size = (!lambda.arg ? 0 : 1) + (lambda.hasFormals() ? lambda.formals->formals.size() : 0); Env & env2(allocEnv(size)); env2.up = vCur.lambda().env; @@ -1582,7 +1548,8 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, try { forceAttrs(*args[0], lambda.pos, "while evaluating the value passed for the lambda argument"); } catch (Error & e) { - if (pos) e.addTrace(positions[pos], "from call site"); + if (pos) + e.addTrace(positions[pos], "from call site"); throw; } @@ -1597,13 +1564,14 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, auto j = args[0]->attrs()->get(i.name); if (!j) { if (!i.def) { - error("function '%1%' called without required argument '%2%'", - (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), - symbols[i.name]) - .atPos(lambda.pos) - .withTrace(pos, "from call site") - .withFrame(*vCur.lambda().env, lambda) - .debugThrow(); + error( + "function '%1%' called without required argument '%2%'", + (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), + symbols[i.name]) + .atPos(lambda.pos) + .withTrace(pos, "from call site") + .withFrame(*vCur.lambda().env, lambda) + .debugThrow(); } env2.values[displ++] = i.def->maybeThunk(*this, env2); } else { @@ -1623,9 +1591,10 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, for (auto & formal : lambda.formals->formals) formalNames.insert(std::string(symbols[formal.name])); auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]); - error("function '%1%' called with unexpected argument '%2%'", - (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), - symbols[i.name]) + error( + "function '%1%' called with unexpected argument '%2%'", + (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), + symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withSuggestions(suggestions) @@ -1637,18 +1606,20 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, } nrFunctionCalls++; - if (countCalls) incrFunctionCall(&lambda); + if (countCalls) + incrFunctionCall(&lambda); /* Evaluate the body. */ try { auto dts = debugRepl - ? makeDebugTraceStacker( - *this, *lambda.body, env2, lambda.pos, - "while calling %s", - lambda.name - ? concatStrings("'", symbols[lambda.name], "'") - : "anonymous lambda") - : nullptr; + ? makeDebugTraceStacker( + *this, + *lambda.body, + env2, + lambda.pos, + "while calling %s", + lambda.name ? concatStrings("'", symbols[lambda.name], "'") : "anonymous lambda") + : nullptr; lambda.body->eval(*this, env2, vCur); } catch (Error & e) { @@ -1657,10 +1628,9 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, e, lambda.pos, "while calling %s", - lambda.name - ? concatStrings("'", symbols[lambda.name], "'") - : "anonymous lambda"); - if (pos) addErrorTrace(e, pos, "from call site"); + lambda.name ? concatStrings("'", symbols[lambda.name], "'") : "anonymous lambda"); + if (pos) + addErrorTrace(e, pos, "from call site"); } throw; } @@ -1681,7 +1651,8 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, auto * fn = vCur.primOp(); nrPrimOpCalls++; - if (countCalls) primOpCalls[fn->name]++; + if (countCalls) + primOpCalls[fn->name]++; try { fn->fun(*this, vCur.determinePos(noPos), args.data(), vCur); @@ -1725,12 +1696,14 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, auto fn = primOp->primOp(); nrPrimOpCalls++; - if (countCalls) primOpCalls[fn->name]++; + if (countCalls) + primOpCalls[fn->name]++; try { // TODO: // 1. Unify this and above code. Heavily redundant. - // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc) + // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 + // etc) // so the debugger allows to inspect the wrong parameters passed to the builtin. fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { @@ -1760,9 +1733,9 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, else error( - "attempt to call something which is not a function but %1%: %2%", - showType(vCur), - ValuePrinter(*this, vCur, errorPrintOptions)) + "attempt to call something which is not a function but %1%: %2%", + showType(vCur), + ValuePrinter(*this, vCur, errorPrintOptions)) .atPos(pos) .debugThrow(); } @@ -1770,18 +1743,10 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, vRes = vCur; } - void ExprCall::eval(EvalState & state, Env & env, Value & v) { - auto dts = state.debugRepl - ? makeDebugTraceStacker( - state, - *this, - env, - getPos(), - "while calling a function" - ) - : nullptr; + auto dts = + state.debugRepl ? makeDebugTraceStacker(state, *this, env, getPos(), "while calling a function") : nullptr; Value vFun; fun->eval(state, env, vFun); @@ -1799,7 +1764,6 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v) state.callFunction(vFun, vArgs, v, pos); } - // Lifted out of callFunction() because it creates a temporary that // prevents tail-call optimisation. void EvalState::incrFunctionCall(ExprLambda * fun) @@ -1807,7 +1771,6 @@ void EvalState::incrFunctionCall(ExprLambda * fun) functionCalls[fun]++; } - void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res) { auto pos = fun.determinePos(noPos); @@ -1844,12 +1807,16 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res if (j) { attrs.insert(*j); } else if (!i.def) { - error(R"(cannot evaluate a function that has an argument without a value ('%1%') + error( + R"(cannot evaluate a function that has an argument without a value ('%1%') Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See -https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name]) - .atPos(i.pos).withFrame(*fun.lambda().env, *fun.lambda().fun).debugThrow(); +https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", + symbols[i.name]) + .atPos(i.pos) + .withFrame(*fun.lambda().env, *fun.lambda().fun) + .debugThrow(); } } } @@ -1857,7 +1824,6 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo callFunction(fun, allocValue()->mkAttrs(attrs), res, pos); } - void ExprWith::eval(EvalState & state, Env & env, Value & v) { Env & env2(state.allocEnv(1)); @@ -1867,14 +1833,12 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v) body->eval(state, env2, v); } - void ExprIf::eval(EvalState & state, Env & env, Value & v) { // We cheat in the parser, and pass the position of the condition as the position of the if itself. (state.evalBool(env, cond, pos, "while evaluating a branch condition") ? then : else_)->eval(state, env, v); } - void ExprAssert::eval(EvalState & state, Env & env, Value & v) { if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) { @@ -1884,8 +1848,10 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) if (auto eq = dynamic_cast(cond)) { try { - Value v1; eq->e1->eval(state, env, v1); - Value v2; eq->e2->eval(state, env, v2); + Value v1; + eq->e1->eval(state, env, v1); + Value v2; + eq->e2->eval(state, env, v2); state.assertEqValues(v1, v2, eq->pos, "in an equality assertion"); } catch (AssertionError & e) { e.addTrace(state.positions[pos], "while evaluating the condition of the assertion '%s'", exprStr); @@ -1898,47 +1864,50 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) body->eval(state, env, v); } - void ExprOpNot::eval(EvalState & state, Env & env, Value & v) { v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: ! } - void ExprOpEq::eval(EvalState & state, Env & env, Value & v) { - Value v1; e1->eval(state, env, v1); - Value v2; e2->eval(state, env, v2); + Value v1; + e1->eval(state, env, v1); + Value v2; + e2->eval(state, env, v2); v.mkBool(state.eqValues(v1, v2, pos, "while testing two values for equality")); } - void ExprOpNEq::eval(EvalState & state, Env & env, Value & v) { - Value v1; e1->eval(state, env, v1); - Value v2; e2->eval(state, env, v2); + Value v1; + e1->eval(state, env, v1); + Value v2; + e2->eval(state, env, v2); v.mkBool(!state.eqValues(v1, v2, pos, "while testing two values for inequality")); } - void ExprOpAnd::eval(EvalState & state, Env & env, Value & v) { - v.mkBool(state.evalBool(env, e1, pos, "in the left operand of the AND (&&) operator") && state.evalBool(env, e2, pos, "in the right operand of the AND (&&) operator")); + v.mkBool( + state.evalBool(env, e1, pos, "in the left operand of the AND (&&) operator") + && state.evalBool(env, e2, pos, "in the right operand of the AND (&&) operator")); } - void ExprOpOr::eval(EvalState & state, Env & env, Value & v) { - v.mkBool(state.evalBool(env, e1, pos, "in the left operand of the OR (||) operator") || state.evalBool(env, e2, pos, "in the right operand of the OR (||) operator")); + v.mkBool( + state.evalBool(env, e1, pos, "in the left operand of the OR (||) operator") + || state.evalBool(env, e2, pos, "in the right operand of the OR (||) operator")); } - void ExprOpImpl::eval(EvalState & state, Env & env, Value & v) { - v.mkBool(!state.evalBool(env, e1, pos, "in the left operand of the IMPL (->) operator") || state.evalBool(env, e2, pos, "in the right operand of the IMPL (->) operator")); + v.mkBool( + !state.evalBool(env, e1, pos, "in the left operand of the IMPL (->) operator") + || state.evalBool(env, e2, pos, "in the right operand of the IMPL (->) operator")); } - void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v) { Value v1, v2; @@ -1947,8 +1916,14 @@ void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v) state.nrOpUpdates++; - if (v1.attrs()->size() == 0) { v = v2; return; } - if (v2.attrs()->size() == 0) { v = v1; return; } + if (v1.attrs()->size() == 0) { + v = v2; + return; + } + if (v2.attrs()->size() == 0) { + v = v1; + return; + } auto attrs = state.buildBindings(v1.attrs()->size() + v2.attrs()->size()); @@ -1960,33 +1935,36 @@ void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v) while (i != v1.attrs()->end() && j != v2.attrs()->end()) { if (i->name == j->name) { attrs.insert(*j); - ++i; ++j; - } - else if (i->name < j->name) + ++i; + ++j; + } else if (i->name < j->name) attrs.insert(*i++); else attrs.insert(*j++); } - while (i != v1.attrs()->end()) attrs.insert(*i++); - while (j != v2.attrs()->end()) attrs.insert(*j++); + while (i != v1.attrs()->end()) + attrs.insert(*i++); + while (j != v2.attrs()->end()) + attrs.insert(*j++); v.mkAttrs(attrs.alreadySorted()); state.nrOpUpdateValuesCopied += v.attrs()->size(); } - void ExprOpConcatLists::eval(EvalState & state, Env & env, Value & v) { - Value v1; e1->eval(state, env, v1); - Value v2; e2->eval(state, env, v2); - Value * lists[2] = { &v1, &v2 }; + Value v1; + e1->eval(state, env, v1); + Value v2; + e2->eval(state, env, v2); + Value * lists[2] = {&v1, &v2}; state.concatLists(v, 2, lists, pos, "while evaluating one of the elements to concatenate"); } - -void EvalState::concatLists(Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx) +void EvalState::concatLists( + Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx) { nrListConcats++; @@ -1996,7 +1974,8 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * const * lists, co forceList(*lists[n], pos, errorCtx); auto l = lists[n]->listSize(); len += l; - if (l) nonEmpty = lists[n]; + if (l) + nonEmpty = lists[n]; } if (nonEmpty && len == nonEmpty->listSize()) { @@ -2016,7 +1995,6 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * const * lists, co v.mkList(list); } - void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) { NixStringContext context; @@ -2031,7 +2009,8 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) const auto str = [&] { std::string result; result.reserve(sSize); - for (const auto & part : s) result += *part; + for (const auto & part : s) + result += *part; return result; }; /* c_str() is not str().c_str() because we want to create a string @@ -2070,7 +2049,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) if (auto checked = newN.valueChecked(); checked.has_value()) { n = NixInt(*checked); } else { - state.error("integer overflow in adding %1% + %2%", n, vTmp.integer()).atPos(i_pos).debugThrow(); + state.error("integer overflow in adding %1% + %2%", n, vTmp.integer()) + .atPos(i_pos) + .debugThrow(); } } else if (vTmp.type() == nFloat) { // Upgrade the type from int to float; @@ -2078,22 +2059,28 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf = n.value; nf += vTmp.fpoint(); } else - state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to an integer", showType(vTmp)) + .atPos(i_pos) + .withFrame(env, *this) + .debugThrow(); } else if (firstType == nFloat) { if (vTmp.type() == nInt) { nf += vTmp.integer().value; } else if (vTmp.type() == nFloat) { nf += vTmp.fpoint(); } else - state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to a float", showType(vTmp)) + .atPos(i_pos) + .withFrame(env, *this) + .debugThrow(); } else { - if (s.empty()) s.reserve(es->size()); + if (s.empty()) + s.reserve(es->size()); /* skip canonization of first path, which would only be not canonized in the first place if it's coming from a ./${foo} type path */ - auto part = state.coerceToString(i_pos, vTmp, context, - "while evaluating a path segment", - false, firstType == nString, !first); + auto part = state.coerceToString( + i_pos, vTmp, context, "while evaluating a path segment", false, firstType == nString, !first); sSize += part->size(); s.emplace_back(std::move(part)); } @@ -2107,13 +2094,15 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) v.mkFloat(nf); else if (firstType == nPath) { if (!context.empty()) - state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); + state.error("a string that refers to a store path cannot be appended to a path") + .atPos(pos) + .withFrame(env, *this) + .debugThrow(); v.mkPath(state.rootPath(CanonPath(str()))); } else v.mkStringMove(c_str(), context); } - void ExprPos::eval(EvalState & state, Env & env, Value & v) { state.mkPos(v, pos); @@ -2124,10 +2113,9 @@ void ExprBlackHole::eval(EvalState & state, [[maybe_unused]] Env & env, Value & throwInfiniteRecursionError(state, v); } -[[gnu::noinline]] [[noreturn]] void ExprBlackHole::throwInfiniteRecursionError(EvalState & state, Value &v) { - state.error("infinite recursion encountered") - .atPos(v.determinePos(noPos)) - .debugThrow(); +[[gnu::noinline]] [[noreturn]] void ExprBlackHole::throwInfiniteRecursionError(EvalState & state, Value & v) +{ + state.error("infinite recursion encountered").atPos(v.determinePos(noPos)).debugThrow(); } // always force this to be separate, otherwise forceValue may inline it and take @@ -2146,7 +2134,6 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) } } - void EvalState::forceValueDeep(Value & v) { std::set seen; @@ -2154,7 +2141,8 @@ void EvalState::forceValueDeep(Value & v) std::function recurse; recurse = [&](Value & v) { - if (!seen.insert(&v).second) return; + if (!seen.insert(&v).second) + return; forceValue(v, v.determinePos(noPos)); @@ -2162,10 +2150,14 @@ void EvalState::forceValueDeep(Value & v) for (auto & i : *v.attrs()) try { // If the value is a thunk, we're evaling. Otherwise no trace necessary. - auto dts = debugRepl && i.value->isThunk() - ? makeDebugTraceStacker(*this, *i.value->thunk().expr, *i.value->thunk().env, i.pos, - "while evaluating the attribute '%1%'", symbols[i.name]) - : nullptr; + auto dts = debugRepl && i.value->isThunk() ? makeDebugTraceStacker( + *this, + *i.value->thunk().expr, + *i.value->thunk().env, + i.pos, + "while evaluating the attribute '%1%'", + symbols[i.name]) + : nullptr; recurse(*i.value); } catch (Error & e) { @@ -2183,17 +2175,15 @@ void EvalState::forceValueDeep(Value & v) recurse(v); } - NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCtx) { try { forceValue(v, pos); if (v.type() != nInt) error( - "expected an integer but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).debugThrow(); + "expected an integer but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .debugThrow(); return v.integer(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2203,7 +2193,6 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt return v.integer(); } - NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view errorCtx) { try { @@ -2212,10 +2201,9 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err return v.integer().value; else if (v.type() != nFloat) error( - "expected a float but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).debugThrow(); + "expected a float but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .debugThrow(); return v.fpoint(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2223,17 +2211,15 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err } } - bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx) { try { forceValue(v, pos); if (v.type() != nBool) error( - "expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).debugThrow(); + "expected a Boolean but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .debugThrow(); return v.boolean(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2247,9 +2233,7 @@ Bindings::const_iterator EvalState::getAttr(Symbol attrSym, const Bindings * att { auto value = attrSet->find(attrSym); if (value == attrSet->end()) { - error("attribute '%s' missing", symbols[attrSym]) - .withTrace(noPos, errorCtx) - .debugThrow(); + error("attribute '%s' missing", symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); } return value; } @@ -2259,34 +2243,30 @@ bool EvalState::isFunctor(const Value & fun) const return fun.type() == nAttrs && fun.attrs()->find(sFunctor) != fun.attrs()->end(); } - void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx) { try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) error( - "expected a function but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).debugThrow(); + "expected a function but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } } - std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string_view errorCtx) { try { forceValue(v, pos); if (v.type() != nString) error( - "expected a string but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).atPos(pos).debugThrow(); + "expected a string but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .atPos(pos) + .debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2294,54 +2274,65 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string } } - void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings) { if (v.context()) - for (const char * * p = v.context(); *p; ++p) + for (const char ** p = v.context(); *p; ++p) context.insert(NixStringContextElem::parse(*p, xpSettings)); } - -std::string_view EvalState::forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings) +std::string_view EvalState::forceString( + Value & v, + NixStringContext & context, + const PosIdx pos, + std::string_view errorCtx, + const ExperimentalFeatureSettings & xpSettings) { auto s = forceString(v, pos, errorCtx); copyContext(v, context, xpSettings); return s; } - std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx) { auto s = forceString(v, pos, errorCtx); if (v.context()) { - error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); + error( + "the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]) + .withTrace(pos, errorCtx) + .debugThrow(); } return s; } - bool EvalState::isDerivation(Value & v) { - if (v.type() != nAttrs) return false; + if (v.type() != nAttrs) + return false; auto i = v.attrs()->get(sType); - if (!i) return false; + if (!i) + return false; forceValue(*i->value, i->pos); - if (i->value->type() != nString) return false; + if (i->value->type() != nString) + return false; return i->value->string_view().compare("derivation") == 0; } - -std::optional EvalState::tryAttrsToString(const PosIdx pos, Value & v, - NixStringContext & context, bool coerceMore, bool copyToStore) +std::optional +EvalState::tryAttrsToString(const PosIdx pos, Value & v, NixStringContext & context, bool coerceMore, bool copyToStore) { auto i = v.attrs()->find(sToString); if (i != v.attrs()->end()) { Value v1; callFunction(*i->value, v, v1, pos); - return coerceToString(pos, v1, context, - "while evaluating the result of the `__toString` attribute", - coerceMore, copyToStore).toOwned(); + return coerceToString( + pos, + v1, + context, + "while evaluating the result of the `__toString` attribute", + coerceMore, + copyToStore) + .toOwned(); } return {}; @@ -2364,14 +2355,12 @@ BackedStringView EvalState::coerceToString( } if (v.type() == nPath) { - return - !canonicalizePath && !copyToStore - ? // FIXME: hack to preserve path literals that end in a - // slash, as in /foo/${x}. - v.pathStr() - : copyToStore - ? store->printStorePath(copyPathToStore(context, v.path())) - : std::string(v.path().path.abs()); + return !canonicalizePath && !copyToStore + ? // FIXME: hack to preserve path literals that end in a + // slash, as in /foo/${x}. + v.pathStr() + : copyToStore ? store->printStorePath(copyPathToStore(context, v.path())) + : std::string(v.path().path.abs()); } if (v.type() == nAttrs) { @@ -2381,15 +2370,11 @@ BackedStringView EvalState::coerceToString( auto i = v.attrs()->find(sOutPath); if (i == v.attrs()->end()) { error( - "cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ) + "cannot coerce %1% to a string: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } - return coerceToString(pos, *i->value, context, errorCtx, - coerceMore, copyToStore, canonicalizePath); + return coerceToString(pos, *i->value, context, errorCtx, coerceMore, copyToStore, canonicalizePath); } if (v.type() == nExternal) { @@ -2404,20 +2389,30 @@ BackedStringView EvalState::coerceToString( if (coerceMore) { /* Note that `false' is represented as an empty string for shell scripting convenience, just like `null'. */ - if (v.type() == nBool && v.boolean()) return "1"; - if (v.type() == nBool && !v.boolean()) return ""; - if (v.type() == nInt) return std::to_string(v.integer().value); - if (v.type() == nFloat) return std::to_string(v.fpoint()); - if (v.type() == nNull) return ""; + if (v.type() == nBool && v.boolean()) + return "1"; + if (v.type() == nBool && !v.boolean()) + return ""; + if (v.type() == nInt) + return std::to_string(v.integer().value); + if (v.type() == nFloat) + return std::to_string(v.fpoint()); + if (v.type() == nNull) + return ""; if (v.isList()) { std::string result; auto listView = v.listView(); for (auto [n, v2] : enumerate(listView)) { try { - result += *coerceToString(pos, *v2, context, - "while evaluating one element of the list", - coerceMore, copyToStore, canonicalizePath); + result += *coerceToString( + pos, + *v2, + context, + "while evaluating one element of the list", + coerceMore, + copyToStore, + canonicalizePath); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2431,15 +2426,11 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ) + error("cannot coerce %1% to a string: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } - StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path) { if (nix::isDerivation(path.path.abs())) @@ -2447,31 +2438,26 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPathCached = get(*srcToStore.lock(), path); - auto dstPath = dstPathCached - ? *dstPathCached - : [&]() { - auto dstPath = fetchToStore( - fetchSettings, - *store, - path.resolveSymlinks(SymlinkResolution::Ancestors), - settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, - path.baseName(), - ContentAddressMethod::Raw::NixArchive, - nullptr, - repair); - allowPath(dstPath); - srcToStore.lock()->try_emplace(path, dstPath); - printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); - return dstPath; - }(); + auto dstPath = dstPathCached ? *dstPathCached : [&]() { + auto dstPath = fetchToStore( + fetchSettings, + *store, + path.resolveSymlinks(SymlinkResolution::Ancestors), + settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy, + path.baseName(), + ContentAddressMethod::Raw::NixArchive, + nullptr, + repair); + allowPath(dstPath); + srcToStore.lock()->try_emplace(path, dstPath); + printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); + return dstPath; + }(); - context.insert(NixStringContextElem::Opaque { - .path = dstPath - }); + context.insert(NixStringContextElem::Opaque{.path = dstPath}); return dstPath; } - SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) { try { @@ -2504,8 +2490,8 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext return rootPath(path); } - -StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) +StorePath +EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) { auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (auto storePath = store->maybeParseStorePath(path)) @@ -2513,37 +2499,35 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); } - -std::pair EvalState::coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings) +std::pair EvalState::coerceToSingleDerivedPathUnchecked( + const PosIdx pos, Value & v, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings) { NixStringContext context; auto s = forceString(v, context, pos, errorCtx, xpSettings); auto csize = context.size(); if (csize != 1) - error( - "string '%s' has %d entries in its context. It should only have exactly one entry", - s, csize) - .withTrace(pos, errorCtx).debugThrow(); - auto derivedPath = std::visit(overloaded { - [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { - return std::move(o); + error("string '%s' has %d entries in its context. It should only have exactly one entry", s, csize) + .withTrace(pos, errorCtx) + .debugThrow(); + auto derivedPath = std::visit( + overloaded{ + [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { return std::move(o); }, + [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { + error( + "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", + s) + .withTrace(pos, errorCtx) + .debugThrow(); + }, + [&](NixStringContextElem::Built && b) -> SingleDerivedPath { return std::move(b); }, }, - [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { - error( - "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", - s).withTrace(pos, errorCtx).debugThrow(); - }, - [&](NixStringContextElem::Built && b) -> SingleDerivedPath { - return std::move(b); - }, - }, ((NixStringContextElem &&) *context.begin()).raw); + ((NixStringContextElem &&) *context.begin()).raw); return { std::move(derivedPath), std::move(s), }; } - SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx) { auto [derivedPath, s_] = coerceToSingleDerivedPathUnchecked(pos, v, errorCtx); @@ -2552,26 +2536,28 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & if (s != sExpected) { /* `std::visit` is used here just to provide a more precise error message. */ - std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) { - error( - "path string '%s' has context with the different path '%s'", - s, sExpected) - .withTrace(pos, errorCtx).debugThrow(); - }, - [&](const SingleDerivedPath::Built & b) { - error( - "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", - s, b.output, b.drvPath->to_string(*store), sExpected) - .withTrace(pos, errorCtx).debugThrow(); - } - }, derivedPath.raw()); + std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) { + error("path string '%s' has context with the different path '%s'", s, sExpected) + .withTrace(pos, errorCtx) + .debugThrow(); + }, + [&](const SingleDerivedPath::Built & b) { + error( + "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", + s, + b.output, + b.drvPath->to_string(*store), + sExpected) + .withTrace(pos, errorCtx) + .debugThrow(); + }}, + derivedPath.raw()); } return derivedPath; } - - // NOTE: This implementation must match eqValues! // We accept this burden because informative error messages for // `assert a == b; x` are critical for our users' testing UX. @@ -2774,7 +2760,9 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st // Also note that this probably ran after `eqValues`, which implements // the same logic more efficiently (without having to unwind stacks), // so maybe `assertEqValues` and `eqValues` are out of sync. Check it for solutions. - error("assertEqValues: cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).panic(); + error("assertEqValues: cannot compare %1% with %2%", showType(v1), showType(v2)) + .withTrace(pos, errorCtx) + .panic(); } } @@ -2787,7 +2775,8 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v /* !!! Hack to support some old broken code that relies on pointer equality tests between sets. (Specifically, builderDefs calls uniqList on a list of sets.) Will remove this eventually. */ - if (&v1 == &v2) return true; + if (&v1 == &v2) + return true; // Special case type-compatibility between float and int if (v1.type() == nInt && v2.type() == nFloat) @@ -2796,73 +2785,79 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.fpoint() == v2.integer().value; // All other types are not compatible with each other. - if (v1.type() != v2.type()) return false; + if (v1.type() != v2.type()) + return false; switch (v1.type()) { - case nInt: - return v1.integer() == v2.integer(); + case nInt: + return v1.integer() == v2.integer(); - case nBool: - return v1.boolean() == v2.boolean(); + case nBool: + return v1.boolean() == v2.boolean(); - case nString: - return strcmp(v1.c_str(), v2.c_str()) == 0; + case nString: + return strcmp(v1.c_str(), v2.c_str()) == 0; - case nPath: - return - // FIXME: compare accessors by their fingerprint. - v1.pathAccessor() == v2.pathAccessor() - && strcmp(v1.pathStr(), v2.pathStr()) == 0; + case nPath: + return + // FIXME: compare accessors by their fingerprint. + v1.pathAccessor() == v2.pathAccessor() && strcmp(v1.pathStr(), v2.pathStr()) == 0; - case nNull: - return true; + case nNull: + return true; - case nList: - if (v1.listSize() != v2.listSize()) return false; - for (size_t n = 0; n < v1.listSize(); ++n) - if (!eqValues(*v1.listView()[n], *v2.listView()[n], pos, errorCtx)) return false; - return true; + case nList: + if (v1.listSize() != v2.listSize()) + return false; + for (size_t n = 0; n < v1.listSize(); ++n) + if (!eqValues(*v1.listView()[n], *v2.listView()[n], pos, errorCtx)) + return false; + return true; - case nAttrs: { - /* If both sets denote a derivation (type = "derivation"), - then compare their outPaths. */ - if (isDerivation(v1) && isDerivation(v2)) { - auto i = v1.attrs()->get(sOutPath); - auto j = v2.attrs()->get(sOutPath); - if (i && j) - return eqValues(*i->value, *j->value, pos, errorCtx); - } - - if (v1.attrs()->size() != v2.attrs()->size()) return false; - - /* Otherwise, compare the attributes one by one. */ - Bindings::const_iterator i, j; - for (i = v1.attrs()->begin(), j = v2.attrs()->begin(); i != v1.attrs()->end(); ++i, ++j) - if (i->name != j->name || !eqValues(*i->value, *j->value, pos, errorCtx)) - return false; - - return true; + case nAttrs: { + /* If both sets denote a derivation (type = "derivation"), + then compare their outPaths. */ + if (isDerivation(v1) && isDerivation(v2)) { + auto i = v1.attrs()->get(sOutPath); + auto j = v2.attrs()->get(sOutPath); + if (i && j) + return eqValues(*i->value, *j->value, pos, errorCtx); } - /* Functions are incomparable. */ - case nFunction: + if (v1.attrs()->size() != v2.attrs()->size()) return false; - case nExternal: - return *v1.external() == *v2.external(); + /* Otherwise, compare the attributes one by one. */ + Bindings::const_iterator i, j; + for (i = v1.attrs()->begin(), j = v2.attrs()->begin(); i != v1.attrs()->end(); ++i, ++j) + if (i->name != j->name || !eqValues(*i->value, *j->value, pos, errorCtx)) + return false; - case nFloat: - // !!! - return v1.fpoint() == v2.fpoint(); + return true; + } - case nThunk: // Must not be left by forceValue - assert(false); - default: // Note that we pass compiler flags that should make `default:` unreachable. - error("eqValues: cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).panic(); + /* Functions are incomparable. */ + case nFunction: + return false; + + case nExternal: + return *v1.external() == *v2.external(); + + case nFloat: + // !!! + return v1.fpoint() == v2.fpoint(); + + case nThunk: // Must not be left by forceValue + assert(false); + default: // Note that we pass compiler flags that should make `default:` unreachable. + error("eqValues: cannot compare %1% with %2%", showType(v1), showType(v2)) + .withTrace(pos, errorCtx) + .panic(); } } -bool EvalState::fullGC() { +bool EvalState::fullGC() +{ #if NIX_USE_BOEHMGC GC_gcollect(); // Check that it ran. We might replace this with a version that uses more @@ -2928,9 +2923,9 @@ void EvalState::printStatistics() #endif #if NIX_USE_BOEHMGC {GC_is_incremental_mode() ? "gcNonIncremental" : "gc", gcFullOnlyTime}, -#ifndef _WIN32 // TODO implement +# ifndef _WIN32 // TODO implement {GC_is_incremental_mode() ? "gcNonIncrementalFraction" : "gcFraction", gcFullOnlyTime / cpuTime}, -#endif +# endif #endif }; topObj["envs"] = { @@ -2981,7 +2976,7 @@ void EvalState::printStatistics() if (countCalls) { topObj["primops"] = primOpCalls; { - auto& list = topObj["functions"]; + auto & list = topObj["functions"]; list = json::array(); for (auto & [fun, count] : functionCalls) { json obj = json::object(); @@ -3019,7 +3014,7 @@ void EvalState::printStatistics() if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") { // XXX: overrides earlier assignment topObj["symbols"] = json::array(); - auto &list = topObj["symbols"]; + auto & list = topObj["symbols"]; symbols.dump([&](std::string_view s) { list.emplace_back(s); }); } if (outPath == "-") { @@ -3029,7 +3024,6 @@ void EvalState::printStatistics() } } - SourcePath resolveExprPath(SourcePath path, bool addDefaultNix) { unsigned int followCount = 0, maxFollow = 1024; @@ -3041,7 +3035,8 @@ SourcePath resolveExprPath(SourcePath path, bool addDefaultNix) if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); auto p = path.parent().resolveSymlinks() / path.baseName(); - if (p.lstat().type != SourceAccessor::tSymlink) break; + if (p.lstat().type != SourceAccessor::tSymlink) + break; path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } @@ -3052,13 +3047,11 @@ SourcePath resolveExprPath(SourcePath path, bool addDefaultNix) return path; } - Expr * EvalState::parseExprFromFile(const SourcePath & path) { return parseExprFromFile(path, staticBaseEnv); } - Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) { auto buffer = path.resolveSymlinks().readFile(); @@ -3067,8 +3060,8 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +Expr * +EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) { // NOTE this method (and parseStdin) must take care to *fully copy* their input // into their respective Pos::Origin until the parser stops overwriting its input @@ -3078,19 +3071,17 @@ Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePat return parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv); } - Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) { return parseExprFromString(std::move(s), basePath, staticBaseEnv); } - Expr * EvalState::parseStdin() { // NOTE this method (and parseExprFromString) must take care to *fully copy* their // input into their respective Pos::Origin until the parser stops overwriting its // input data. - //Activity act(*logger, lvlTalkative, "parsing standard input"); + // Activity act(*logger, lvlTalkative, "parsing standard input"); auto buffer = drainFD(0); // drainFD should have left some extra space for terminators buffer.append("\0\0", 2); @@ -3098,46 +3089,47 @@ Expr * EvalState::parseStdin() return parse(buffer.data(), buffer.size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv); } - SourcePath EvalState::findFile(const std::string_view path) { return findFile(lookupPath, path); } - SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_view path, const PosIdx pos) { for (auto & i : lookupPath.elements) { auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); - if (!suffixOpt) continue; + if (!suffixOpt) + continue; auto suffix = *suffixOpt; auto rOpt = resolveLookupPathPath(i.path); - if (!rOpt) continue; + if (!rOpt) + continue; auto r = *rOpt; auto res = (r / CanonPath(suffix)).resolveSymlinks(); - if (res.pathExists()) return res; + if (res.pathExists()) + return res; } if (hasPrefix(path, "nix/")) return {corepkgsFS, CanonPath(path.substr(3))}; error( - settings.pureEval - ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" - : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path - ).atPos(pos).debugThrow(); + settings.pureEval ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" + : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", + path) + .atPos(pos) + .debugThrow(); } - std::optional EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl) { auto & value = value0.s; auto i = lookupPathResolved.find(value); - if (i != lookupPathResolved.end()) return i->second; + if (i != lookupPathResolved.end()) + return i->second; auto finish = [&](std::optional res) { if (res) @@ -3150,16 +3142,11 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat if (EvalSettings::isPseudoUrl(value)) { try { - auto accessor = fetchers::downloadTarball( - store, - fetchSettings, - EvalSettings::resolvePseudoUrl(value)); + auto accessor = fetchers::downloadTarball(store, fetchSettings, EvalSettings::resolvePseudoUrl(value)); auto storePath = fetchToStore(fetchSettings, *store, SourcePath(accessor), FetchMode::Copy); return finish(this->storePath(storePath)); } catch (Error & e) { - logWarning({ - .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) - }); + logWarning({.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)}); } } @@ -3182,39 +3169,34 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat if (store->isInStore(path.path.abs())) { try { allowClosure(store->toStorePath(path.path.abs()).first); - } catch (InvalidPath &) { } + } catch (InvalidPath &) { + } } } if (path.resolveSymlinks().pathExists()) return finish(std::move(path)); else { - logWarning({ - .msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value) - }); + logWarning({.msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value)}); } } return finish(std::nullopt); } - Expr * EvalState::parse( - char * text, - size_t length, - Pos::Origin origin, - const SourcePath & basePath, - std::shared_ptr & staticEnv) + char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, std::shared_ptr & staticEnv) { DocCommentMap tmpDocComments; // Only used when not origin is not a SourcePath - DocCommentMap *docComments = &tmpDocComments; + DocCommentMap * docComments = &tmpDocComments; if (auto sourcePath = std::get_if(&origin)) { auto [it, _] = positionToDocComment.try_emplace(*sourcePath); docComments = &it->second; } - auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS, exprSymbols); + auto result = parseExprFromBuf( + text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS, exprSymbols); result->bindVars(*this, staticEnv); @@ -3238,21 +3220,19 @@ DocComment EvalState::getDocCommentForPos(PosIdx pos) return it->second; } -std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const +std::string ExternalValueBase::coerceToString( + EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - state.error( - "cannot coerce %1% to a string: %2%", showType(), *this - ).atPos(pos).debugThrow(); + state.error("cannot coerce %1% to a string: %2%", showType(), *this).atPos(pos).debugThrow(); } - bool ExternalValueBase::operator==(const ExternalValueBase & b) const noexcept { return false; } - -std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) { +std::ostream & operator<<(std::ostream & str, const ExternalValueBase & v) +{ return v.print(str); } @@ -3269,5 +3249,4 @@ void forceNoNullByte(std::string_view s, std::function pos) } } - -} +} // namespace nix diff --git a/src/libexpr/function-trace.cc b/src/libexpr/function-trace.cc index cda3bc2db..55ccfc791 100644 --- a/src/libexpr/function-trace.cc +++ b/src/libexpr/function-trace.cc @@ -19,4 +19,4 @@ void FunctionCallTrace::postFunctionCallHook( printMsg(lvlInfo, "function-trace exited %1% at %2%", state.positions[pos], ns.count()); } -} +} // namespace nix diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 3c9ff9ff3..a1c3e5611 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -7,18 +7,19 @@ #include #include - namespace nix { - PackageInfo::PackageInfo(EvalState & state, std::string attrPath, const Bindings * attrs) - : state(&state), attrs(attrs), attrPath(std::move(attrPath)) + : state(&state) + , attrs(attrs) + , attrPath(std::move(attrPath)) { } - PackageInfo::PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) - : state(&state), attrs(nullptr), attrPath("") + : state(&state) + , attrs(nullptr) + , attrPath("") { auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); @@ -31,10 +32,7 @@ PackageInfo::PackageInfo(EvalState & state, ref store, const std::string if (selectedOutputs.size() > 1) throw Error("building more than one derivation output is not supported, in '%s'", drvPathWithOutputs); - outputName = - selectedOutputs.empty() - ? getOr(drv.env, "outputName", "out") - : *selectedOutputs.begin(); + outputName = selectedOutputs.empty() ? getOr(drv.env, "outputName", "out") : *selectedOutputs.begin(); auto i = drv.outputs.find(outputName); if (i == drv.outputs.end()) @@ -44,34 +42,36 @@ PackageInfo::PackageInfo(EvalState & state, ref store, const std::string outPath = {output.path(*store, drv.name, outputName)}; } - std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); - if (i == attrs->end()) state->error("derivation name missing").debugThrow(); + if (i == attrs->end()) + state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); } return name; } - std::string PackageInfo::querySystem() const { if (system == "" && attrs) { auto i = attrs->find(state->sSystem); - system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation"); + system = + i == attrs->end() + ? "unknown" + : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation"); } return system; } - std::optional PackageInfo::queryDrvPath() const { if (!drvPath && attrs) { if (auto i = attrs->get(state->sDrvPath)) { NixStringContext context; - auto found = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation"); + auto found = state->coerceToStorePath( + i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation"); try { found.requireDerivation(); } catch (Error & e) { @@ -85,7 +85,6 @@ std::optional PackageInfo::queryDrvPath() const return drvPath.value_or(std::nullopt); } - StorePath PackageInfo::requireDrvPath() const { if (auto drvPath = queryDrvPath()) @@ -93,21 +92,20 @@ StorePath PackageInfo::requireDrvPath() const throw Error("derivation does not contain a 'drvPath' attribute"); } - StorePath PackageInfo::queryOutPath() const { if (!outPath && attrs) { auto i = attrs->find(state->sOutPath); NixStringContext context; if (i != attrs->end()) - outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation"); + outPath = state->coerceToStorePath( + i->pos, *i->value, context, "while evaluating the output path of a derivation"); } if (!outPath) throw UnimplementedError("CA derivations are not yet supported"); return *outPath; } - PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) { if (outputs.empty()) { @@ -118,19 +116,25 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT /* For each output... */ for (auto elem : i->value->listView()) { - std::string output(state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation")); + std::string output( + state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation")); if (withPaths) { /* Evaluate the corresponding set. */ auto out = attrs->get(state->symbols.create(output)); - if (!out) continue; // FIXME: throw error? + if (!out) + continue; // FIXME: throw error? state->forceAttrs(*out->value, i->pos, "while evaluating an output of a derivation"); /* And evaluate its ‘outPath’ attribute. */ auto outPath = out->value->attrs()->get(state->sOutPath); - if (!outPath) continue; // FIXME: throw error? + if (!outPath) + continue; // FIXME: throw error? NixStringContext context; - outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation")); + outputs.emplace( + output, + state->coerceToStorePath( + outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation")); } else outputs.emplace(output, std::nullopt); } @@ -142,7 +146,8 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT return outputs; const Attr * i; - if (attrs && (i = attrs->get(state->sOutputSpecified)) && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) { + if (attrs && (i = attrs->get(state->sOutputSpecified)) + && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) { Outputs result; auto out = outputs.find(queryOutputName()); if (out == outputs.end()) @@ -154,95 +159,103 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT else { /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */ const Value * outTI = queryMeta("outputsToInstall"); - if (!outTI) return outputs; + if (!outTI) + return outputs; auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'"); - /* ^ this shows during `nix-env -i` right under the bad derivation */ - if (!outTI->isList()) throw errMsg; + /* ^ this shows during `nix-env -i` right under the bad derivation */ + if (!outTI->isList()) + throw errMsg; Outputs result; for (auto elem : outTI->listView()) { - if (elem->type() != nString) throw errMsg; + if (elem->type() != nString) + throw errMsg; auto out = outputs.find(elem->c_str()); - if (out == outputs.end()) throw errMsg; + if (out == outputs.end()) + throw errMsg; result.insert(*out); } return result; } } - std::string PackageInfo::queryOutputName() const { if (outputName == "" && attrs) { auto i = attrs->get(state->sOutputName); - outputName = i ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : ""; + outputName = + i ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : ""; } return outputName; } - const Bindings * PackageInfo::getMeta() { - if (meta) return meta; - if (!attrs) return 0; + if (meta) + return meta; + if (!attrs) + return 0; auto a = attrs->get(state->sMeta); - if (!a) return 0; + if (!a) + return 0; state->forceAttrs(*a->value, a->pos, "while evaluating the 'meta' attribute of a derivation"); meta = a->value->attrs(); return meta; } - StringSet PackageInfo::queryMetaNames() { StringSet res; - if (!getMeta()) return res; + if (!getMeta()) + return res; for (auto & i : *meta) res.emplace(state->symbols[i.name]); return res; } - bool PackageInfo::checkMeta(Value & v) { state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { for (auto elem : v.listView()) - if (!checkMeta(*elem)) return false; + if (!checkMeta(*elem)) + return false; return true; - } - else if (v.type() == nAttrs) { - if (v.attrs()->get(state->sOutPath)) return false; + } else if (v.type() == nAttrs) { + if (v.attrs()->get(state->sOutPath)) + return false; for (auto & i : *v.attrs()) - if (!checkMeta(*i.value)) return false; + if (!checkMeta(*i.value)) + return false; return true; - } - else return v.type() == nInt || v.type() == nBool || v.type() == nString || - v.type() == nFloat; + } else + return v.type() == nInt || v.type() == nBool || v.type() == nString || v.type() == nFloat; } - Value * PackageInfo::queryMeta(const std::string & name) { - if (!getMeta()) return 0; + if (!getMeta()) + return 0; auto a = meta->get(state->symbols.create(name)); - if (!a || !checkMeta(*a->value)) return 0; + if (!a || !checkMeta(*a->value)) + return 0; return a->value; } - std::string PackageInfo::queryMetaString(const std::string & name) { Value * v = queryMeta(name); - if (!v || v->type() != nString) return ""; + if (!v || v->type() != nString) + return ""; return v->c_str(); } - NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) { Value * v = queryMeta(name); - if (!v) return def; - if (v->type() == nInt) return v->integer(); + if (!v) + return def; + if (v->type() == nInt) + return v->integer(); if (v->type() == nString) { /* Backwards compatibility with before we had support for integer meta fields. */ @@ -255,8 +268,10 @@ NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) { Value * v = queryMeta(name); - if (!v) return def; - if (v->type() == nFloat) return v->fpoint(); + if (!v) + return def; + if (v->type() == nFloat) + return v->fpoint(); if (v->type() == nString) { /* Backwards compatibility with before we had support for float meta fields. */ @@ -266,22 +281,24 @@ NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) return def; } - bool PackageInfo::queryMetaBool(const std::string & name, bool def) { Value * v = queryMeta(name); - if (!v) return def; - if (v->type() == nBool) return v->boolean(); + if (!v) + return def; + if (v->type() == nBool) + return v->boolean(); if (v->type() == nString) { /* Backwards compatibility with before we had support for Boolean meta fields. */ - if (v->string_view() == "true") return true; - if (v->string_view() == "false") return false; + if (v->string_view() == "true") + return true; + if (v->string_view() == "false") + return false; } return def; } - void PackageInfo::setMeta(const std::string & name, Value * v) { getMeta(); @@ -291,30 +308,35 @@ void PackageInfo::setMeta(const std::string & name, Value * v) for (auto i : *meta) if (i.name != sym) attrs.insert(i); - if (v) attrs.insert(sym, v); + if (v) + attrs.insert(sym, v); meta = attrs.finish(); } - /* Cache for already considered attrsets. */ typedef std::set Done; - /* Evaluate value `v'. If it evaluates to a set of type `derivation', then put information about it in `drvs' (unless it's already in `done'). The result boolean indicates whether it makes sense for the caller to recursively search for derivations in `v'. */ -static bool getDerivation(EvalState & state, Value & v, - const std::string & attrPath, PackageInfos & drvs, Done & done, +static bool getDerivation( + EvalState & state, + Value & v, + const std::string & attrPath, + PackageInfos & drvs, + Done & done, bool ignoreAssertionFailures) { try { state.forceValue(v, v.determinePos(noPos)); - if (!state.isDerivation(v)) return true; + if (!state.isDerivation(v)) + return true; /* Remove spurious duplicates (e.g., a set like `rec { x = derivation {...}; y = x;}'. */ - if (!done.insert(v.attrs()).second) return false; + if (!done.insert(v.attrs()).second) + return false; PackageInfo drv(state, attrPath, v.attrs()); @@ -325,42 +347,44 @@ static bool getDerivation(EvalState & state, Value & v, return false; } catch (AssertionError & e) { - if (ignoreAssertionFailures) return false; + if (ignoreAssertionFailures) + return false; throw; } } - -std::optional getDerivation(EvalState & state, Value & v, - bool ignoreAssertionFailures) +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { Done done; PackageInfos drvs; getDerivation(state, v, "", drvs, done, ignoreAssertionFailures); - if (drvs.size() != 1) return {}; + if (drvs.size() != 1) + return {}; return std::move(drvs.front()); } - static std::string addToPath(const std::string & s1, std::string_view s2) { return s1.empty() ? std::string(s2) : s1 + "." + s2; } - static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*"); - -static void getDerivations(EvalState & state, Value & vIn, - const std::string & pathPrefix, Bindings & autoArgs, - PackageInfos & drvs, Done & done, +static void getDerivations( + EvalState & state, + Value & vIn, + const std::string & pathPrefix, + Bindings & autoArgs, + PackageInfos & drvs, + Done & done, bool ignoreAssertionFailures) { Value v; state.autoCallFunction(autoArgs, vIn, v); /* Process the expression. */ - if (!getDerivation(state, v, pathPrefix, drvs, done, ignoreAssertionFailures)) ; + if (!getDerivation(state, v, pathPrefix, drvs, done, ignoreAssertionFailures)) + ; else if (v.type() == nAttrs) { @@ -388,8 +412,11 @@ static void getDerivations(EvalState & state, Value & vIn, `recurseForDerivations = true' attribute. */ if (i->value->type() == nAttrs) { auto j = i->value->attrs()->get(state.sRecurseForDerivations); - if (j && state.forceBool(*j->value, j->pos, "while evaluating the attribute `recurseForDerivations`")) - getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); + if (j + && state.forceBool( + *j->value, j->pos, "while evaluating the attribute `recurseForDerivations`")) + getDerivations( + state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); } } } catch (Error & e) { @@ -412,13 +439,16 @@ static void getDerivations(EvalState & state, Value & vIn, state.error("expression does not evaluate to a derivation (or a set or list of those)").debugThrow(); } - -void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures) +void getDerivations( + EvalState & state, + Value & v, + const std::string & pathPrefix, + Bindings & autoArgs, + PackageInfos & drvs, + bool ignoreAssertionFailures) { Done done; getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures); } - -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/attr-path.hh b/src/libexpr/include/nix/expr/attr-path.hh index 66a3f4e00..10e3e300f 100644 --- a/src/libexpr/include/nix/expr/attr-path.hh +++ b/src/libexpr/include/nix/expr/attr-path.hh @@ -11,11 +11,8 @@ namespace nix { MakeError(AttrPathNotFound, Error); MakeError(NoPositionInfo, Error); -std::pair findAlongAttrPath( - EvalState & state, - const std::string & attrPath, - Bindings & autoArgs, - Value & vIn); +std::pair +findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn); /** * Heuristic to find the filename and lineno or a nix value. @@ -24,4 +21,4 @@ std::pair findPackageFilename(EvalState & state, Value & v std::vector parseAttrPath(EvalState & state, std::string_view s); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 283786f4d..e01b6729c 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -8,7 +8,6 @@ namespace nix { - class EvalState; struct Value; @@ -25,15 +24,19 @@ struct Attr PosIdx pos; Value * value; Attr(Symbol name, Value * value, PosIdx pos = noPos) - : name(name), pos(pos), value(value) { }; - Attr() { }; - auto operator <=> (const Attr & a) const + : name(name) + , pos(pos) + , value(value) {}; + Attr() {}; + + auto operator<=>(const Attr & a) const { return name <=> a.name; } }; -static_assert(sizeof(Attr) == 2 * sizeof(uint32_t) + sizeof(Value *), +static_assert( + sizeof(Attr) == 2 * sizeof(uint32_t) + sizeof(Value *), "performance of the evaluator is highly sensitive to the size of Attr. " "avoid introducing any padding into Attr if at all possible, and do not " "introduce new fields that need not be present for almost every instance."); @@ -54,13 +57,24 @@ private: size_t size_, capacity_; Attr attrs[0]; - Bindings(size_t capacity) : size_(0), capacity_(capacity) { } + Bindings(size_t capacity) + : size_(0) + , capacity_(capacity) + { + } + Bindings(const Bindings & bindings) = delete; public: - size_t size() const { return size_; } + size_t size() const + { + return size_; + } - bool empty() const { return !size_; } + bool empty() const + { + return !size_; + } typedef Attr * iterator; @@ -76,7 +90,8 @@ public: { Attr key(name, 0); const_iterator i = std::lower_bound(begin(), end(), key); - if (i != end() && i->name == name) return i; + if (i != end() && i->name == name) + return i; return end(); } @@ -84,15 +99,30 @@ public: { Attr key(name, 0); const_iterator i = std::lower_bound(begin(), end(), key); - if (i != end() && i->name == name) return &*i; + if (i != end() && i->name == name) + return &*i; return nullptr; } - iterator begin() { return &attrs[0]; } - iterator end() { return &attrs[size_]; } + iterator begin() + { + return &attrs[0]; + } - const_iterator begin() const { return &attrs[0]; } - const_iterator end() const { return &attrs[size_]; } + iterator end() + { + return &attrs[size_]; + } + + const_iterator begin() const + { + return &attrs[0]; + } + + const_iterator end() const + { + return &attrs[size_]; + } Attr & operator[](size_t pos) { @@ -106,7 +136,10 @@ public: void sort(); - size_t capacity() const { return capacity_; } + size_t capacity() const + { + return capacity_; + } /** * Returns the attributes in lexicographically sorted order. @@ -143,8 +176,10 @@ public: EvalState & state; BindingsBuilder(EvalState & state, Bindings * bindings) - : bindings(bindings), state(state) - { } + : bindings(bindings) + , state(state) + { + } void insert(Symbol name, Value * value, PosIdx pos = noPos) { @@ -191,4 +226,4 @@ public: friend struct ExprAttrs; }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-cache.hh b/src/libexpr/include/nix/expr/eval-cache.hh index 31873f7a3..0a0461c19 100644 --- a/src/libexpr/include/nix/expr/eval-cache.hh +++ b/src/libexpr/include/nix/expr/eval-cache.hh @@ -43,10 +43,7 @@ class EvalCache : public std::enable_shared_from_this public: - EvalCache( - std::optional> useCache, - EvalState & state, - RootLoader rootLoader); + EvalCache(std::optional> useCache, EvalState & state, RootLoader rootLoader); ref getRoot(); }; @@ -63,11 +60,23 @@ enum AttrType { Int = 8, }; -struct placeholder_t {}; -struct missing_t {}; -struct misc_t {}; -struct failed_t {}; -struct int_t { NixInt x; }; +struct placeholder_t +{}; + +struct missing_t +{}; + +struct misc_t +{}; + +struct failed_t +{}; + +struct int_t +{ + NixInt x; +}; + typedef uint64_t AttrId; typedef std::pair AttrKey; typedef std::pair string_t; @@ -81,8 +90,8 @@ typedef std::variant< failed_t, bool, int_t, - std::vector - > AttrValue; + std::vector> + AttrValue; class AttrCursor : public std::enable_shared_from_this { @@ -161,4 +170,4 @@ public: StorePath forceDerivation(); }; -} +} // namespace nix::eval_cache diff --git a/src/libexpr/include/nix/expr/eval-error.hh b/src/libexpr/include/nix/expr/eval-error.hh index 6f4c37f90..38db9b706 100644 --- a/src/libexpr/include/nix/expr/eval-error.hh +++ b/src/libexpr/include/nix/expr/eval-error.hh @@ -60,6 +60,7 @@ struct InvalidPathError : public EvalError { public: Path path; + InvalidPathError(EvalState & state, const Path & path) : EvalError(state, "path '%s' is not valid", path) { @@ -119,4 +120,4 @@ public: [[gnu::noinline, gnu::noreturn]] void panic(); }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-inline.hh b/src/libexpr/include/nix/expr/eval-inline.hh index 7d13d7cc7..a1fd0ae4a 100644 --- a/src/libexpr/include/nix/expr/eval-inline.hh +++ b/src/libexpr/include/nix/expr/eval-inline.hh @@ -23,11 +23,11 @@ inline void * allocBytes(size_t n) #else p = calloc(n, 1); #endif - if (!p) throw std::bad_alloc(); + if (!p) + throw std::bad_alloc(); return p; } - [[gnu::always_inline]] Value * EvalState::allocValue() { @@ -38,7 +38,8 @@ Value * EvalState::allocValue() have to explicitly clear the first word of every object we take. */ if (!*valueAllocCache) { *valueAllocCache = GC_malloc_many(sizeof(Value)); - if (!*valueAllocCache) throw std::bad_alloc(); + if (!*valueAllocCache) + throw std::bad_alloc(); } /* GC_NEXT is a convenience macro for accessing the first word of an object. @@ -54,7 +55,6 @@ Value * EvalState::allocValue() return (Value *) p; } - [[gnu::always_inline]] Env & EvalState::allocEnv(size_t size) { @@ -68,7 +68,8 @@ Env & EvalState::allocEnv(size_t size) /* see allocValue for explanations. */ if (!*env1AllocCache) { *env1AllocCache = GC_malloc_many(sizeof(Env) + sizeof(Value *)); - if (!*env1AllocCache) throw std::bad_alloc(); + if (!*env1AllocCache) + throw std::bad_alloc(); } void * p = *env1AllocCache; @@ -84,7 +85,6 @@ Env & EvalState::allocEnv(size_t size) return *env; } - [[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) { @@ -94,7 +94,7 @@ void EvalState::forceValue(Value & v, const PosIdx pos) Expr * expr = v.thunk().expr; try { v.mkBlackhole(); - //checkInterrupt(); + // checkInterrupt(); if (env) [[likely]] expr->eval(*this, *env, v); else @@ -104,54 +104,47 @@ void EvalState::forceValue(Value & v, const PosIdx pos) tryFixupBlackHolePos(v, pos); throw; } - } - else if (v.isApp()) + } else if (v.isApp()) callFunction(*v.app().left, *v.app().right, v, pos); } - [[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx) { forceAttrs(v, [&]() { return pos; }, errorCtx); } - -template +template [[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx) { PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error( - "expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).withTrace(pos, errorCtx).debugThrow(); + error("expected a set but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx) + .debugThrow(); } } - [[gnu::always_inline]] inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx) { forceValue(v, pos); if (!v.isList()) { - error( - "expected a list but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions) - ).withTrace(pos, errorCtx).debugThrow(); + error("expected a list but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx) + .debugThrow(); } } [[gnu::always_inline]] -inline CallDepth EvalState::addCallDepth(const PosIdx pos) { +inline CallDepth EvalState::addCallDepth(const PosIdx pos) +{ if (callDepth > settings.maxCallDepth) error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); return CallDepth(callDepth); }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-profiler-settings.hh b/src/libexpr/include/nix/expr/eval-profiler-settings.hh index a94cde042..32138e7f1 100644 --- a/src/libexpr/include/nix/expr/eval-profiler-settings.hh +++ b/src/libexpr/include/nix/expr/eval-profiler-settings.hh @@ -13,4 +13,4 @@ EvalProfilerMode BaseSetting::parse(const std::string & str) c template<> std::string BaseSetting::to_string() const; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-profiler.hh b/src/libexpr/include/nix/expr/eval-profiler.hh index 21629eebc..c632b7c42 100644 --- a/src/libexpr/include/nix/expr/eval-profiler.hh +++ b/src/libexpr/include/nix/expr/eval-profiler.hh @@ -111,4 +111,4 @@ public: ref makeSampleStackProfiler(EvalState & state, std::filesystem::path profileFile, uint64_t frequency); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval-settings.hh b/src/libexpr/include/nix/expr/eval-settings.hh index eee3b0f0e..25ba84ac9 100644 --- a/src/libexpr/include/nix/expr/eval-settings.hh +++ b/src/libexpr/include/nix/expr/eval-settings.hh @@ -74,7 +74,9 @@ struct EvalSettings : Config )"}; Setting nixPath{ - this, {}, "nix-path", + this, + {}, + "nix-path", R"( List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution. This setting determines the value of @@ -107,10 +109,14 @@ struct EvalSettings : Config > If [restricted evaluation](@docroot@/command-ref/conf-file.md#conf-restrict-eval) is enabled, the default value is empty. > > If [pure evaluation](#conf-pure-eval) is enabled, `builtins.nixPath` *always* evaluates to the empty list `[ ]`. - )", {}, false}; + )", + {}, + false}; Setting currentSystem{ - this, "", "eval-system", + this, + "", + "eval-system", R"( This option defines [`builtins.currentSystem`](@docroot@/language/builtins.md#builtins-currentSystem) @@ -130,7 +136,9 @@ struct EvalSettings : Config const std::string & getCurrentSystem() const; Setting restrictEval{ - this, false, "restrict-eval", + this, + false, + "restrict-eval", R"( If set to `true`, the Nix evaluator doesn't allow access to any files outside of @@ -139,7 +147,10 @@ struct EvalSettings : Config [`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris). )"}; - Setting pureEval{this, false, "pure-eval", + Setting pureEval{ + this, + false, + "pure-eval", R"( Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state: @@ -149,21 +160,23 @@ struct EvalSettings : Config - [`builtins.currentTime`](@docroot@/language/builtins.md#builtins-currentTime) - [`builtins.nixPath`](@docroot@/language/builtins.md#builtins-nixPath) - [`builtins.storePath`](@docroot@/language/builtins.md#builtins-storePath) - )" - }; + )"}; Setting traceImportFromDerivation{ - this, false, "trace-import-from-derivation", + this, + false, + "trace-import-from-derivation", R"( By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md). When this setting is `true`, Nix logs a warning indicating that it performed such an import. This option has no effect if `allow-import-from-derivation` is disabled. - )" - }; + )"}; Setting enableImportFromDerivation{ - this, true, "allow-import-from-derivation", + this, + true, + "allow-import-from-derivation", R"( By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md). @@ -173,7 +186,10 @@ struct EvalSettings : Config regardless of the state of the store. )"}; - Setting allowedUris{this, {}, "allowed-uris", + Setting allowedUris{ + this, + {}, + "allowed-uris", R"( A list of URI prefixes to which access is allowed in restricted evaluation mode. For example, when set to @@ -186,7 +202,10 @@ struct EvalSettings : Config - or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme. )"}; - Setting traceFunctionCalls{this, false, "trace-function-calls", + Setting traceFunctionCalls{ + this, + false, + "trace-function-calls", R"( If set to `true`, the Nix evaluator traces every function call. Nix prints a log message at the "vomit" level for every function @@ -204,7 +223,10 @@ struct EvalSettings : Config `flamegraph.pl`. )"}; - Setting evalProfilerMode{this, EvalProfilerMode::disabled, "eval-profiler", + Setting evalProfilerMode{ + this, + EvalProfilerMode::disabled, + "eval-profiler", R"( Enables evaluation profiling. The following modes are supported: @@ -215,38 +237,56 @@ struct EvalSettings : Config See [Using the `eval-profiler`](@docroot@/advanced-topics/eval-profiler.md). )"}; - Setting evalProfileFile{this, "nix.profile", "eval-profile-file", + Setting evalProfileFile{ + this, + "nix.profile", + "eval-profile-file", R"( Specifies the file where [evaluation profile](#conf-eval-profiler) is saved. )"}; - Setting evalProfilerFrequency{this, 99, "eval-profiler-frequency", + Setting evalProfilerFrequency{ + this, + 99, + "eval-profiler-frequency", R"( Specifies the sampling rate in hertz for sampling evaluation profilers. Use `0` to sample the stack after each function call. See [`eval-profiler`](#conf-eval-profiler). )"}; - Setting useEvalCache{this, true, "eval-cache", + Setting useEvalCache{ + this, + true, + "eval-cache", R"( Whether to use the flake evaluation cache. Certain commands won't have to evaluate when invoked for the second time with a particular version of a flake. Intermediate results are not cached. )"}; - Setting ignoreExceptionsDuringTry{this, false, "ignore-try", + Setting ignoreExceptionsDuringTry{ + this, + false, + "ignore-try", R"( If set to true, ignore exceptions inside 'tryEval' calls when evaluating Nix expressions in debug mode (using the --debugger flag). By default the debugger pauses on all exceptions. )"}; - Setting traceVerbose{this, false, "trace-verbose", + Setting traceVerbose{ + this, + false, + "trace-verbose", "Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; - Setting maxCallDepth{this, 10000, "max-call-depth", - "The maximum function call depth to allow before erroring."}; + Setting maxCallDepth{ + this, 10000, "max-call-depth", "The maximum function call depth to allow before erroring."}; - Setting builtinsTraceDebugger{this, false, "debugger-on-trace", + Setting builtinsTraceDebugger{ + this, + false, + "debugger-on-trace", R"( If set to true and the `--debugger` flag is given, the following functions enter the debugger like [`builtins.break`](@docroot@/language/builtins.md#builtins-break): @@ -259,7 +299,10 @@ struct EvalSettings : Config This is useful for debugging warnings in third-party Nix code. )"}; - Setting builtinsDebuggerOnWarn{this, false, "debugger-on-warn", + Setting builtinsDebuggerOnWarn{ + this, + false, + "debugger-on-warn", R"( If set to true and the `--debugger` flag is given, [`builtins.warn`](@docroot@/language/builtins.md#builtins-warn) will enter the debugger like [`builtins.break`](@docroot@/language/builtins.md#builtins-break). @@ -269,7 +312,10 @@ struct EvalSettings : Config Use [`debugger-on-trace`](#conf-debugger-on-trace) to also enter the debugger on legacy warnings that are logged with [`builtins.trace`](@docroot@/language/builtins.md#builtins-trace). )"}; - Setting builtinsAbortOnWarn{this, false, "abort-on-warn", + Setting builtinsAbortOnWarn{ + this, + false, + "abort-on-warn", R"( If set to true, [`builtins.warn`](@docroot@/language/builtins.md#builtins-warn) throws an error when logging a warning. @@ -288,4 +334,4 @@ struct EvalSettings : Config */ Path getNixDefExpr(); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 27294d114..d52ccb545 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -34,39 +34,46 @@ namespace nix { constexpr size_t maxPrimOpArity = 8; class Store; + namespace fetchers { struct Settings; struct InputCache; -} +} // namespace fetchers struct EvalSettings; class EvalState; class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; struct MemorySourceAccessor; + namespace eval_cache { - class EvalCache; +class EvalCache; } /** * Increments a count on construction and decrements on destruction. */ -class CallDepth { - size_t & count; +class CallDepth +{ + size_t & count; public: - CallDepth(size_t & count) : count(count) { - ++count; - } - ~CallDepth() { - --count; - } + CallDepth(size_t & count) + : count(count) + { + ++count; + } + + ~CallDepth() + { + --count; + } }; /** * Function that implements a primop. */ -using PrimOpFun = void(EvalState & state, const PosIdx pos, Value * * args, Value & v); +using PrimOpFun = void(EvalState & state, const PosIdx pos, Value ** args, Value & v); /** * Info about a primitive operation, and its implementation @@ -151,7 +158,9 @@ struct Constant bool impureOnly = false; }; -typedef std::map, traceable_allocator > > ValMap; +typedef std:: + map, traceable_allocator>> + ValMap; typedef std::unordered_map DocCommentMap; @@ -161,23 +170,25 @@ struct Env Value * values[0]; }; -void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env); +void printEnvBindings(const EvalState & es, const Expr & expr, const Env & env); void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl = 0); std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env); -void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - +void copyContext( + const Value & v, + NixStringContext & context, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); std::string printValue(EvalState & state, Value & v); -std::ostream & operator << (std::ostream & os, const ValueType t); - +std::ostream & operator<<(std::ostream & os, const ValueType t); struct RegexCache; std::shared_ptr makeRegexCache(); -struct DebugTrace { +struct DebugTrace +{ /* WARNING: Converting PosIdx -> Pos should be done with extra care. This is due to the fact that operator[] of PosTable is incredibly expensive. */ std::variant pos; @@ -210,18 +221,11 @@ public: SymbolTable symbols; PosTable positions; - const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, - sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, - sFile, sLine, sColumn, sFunctor, sToString, - sRight, sWrong, sStructuredAttrs, sJson, - sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites, - sMaxSize, sMaxClosureSize, - sBuilder, sArgs, - sContentAddressed, sImpure, - sOutputHash, sOutputHashAlgo, sOutputHashMode, - sRecurseForDerivations, - sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, - sPrefix, + const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, + sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, sRight, sWrong, sStructuredAttrs, sJson, + sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites, sMaxSize, sMaxClosureSize, + sBuilder, sArgs, sContentAddressed, sImpure, sOutputHash, sOutputHashAlgo, sOutputHashMode, + sRecurseForDerivations, sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, sPrefix, sOutputSpecified; const Expr::AstSymbols exprSymbols; @@ -309,19 +313,21 @@ public: /** * Debugger */ - ReplExitStatus (* debugRepl)(ref es, const ValMap & extraEnv); + ReplExitStatus (*debugRepl)(ref es, const ValMap & extraEnv); bool debugStop; bool inDebugger = false; int trylevel; std::list debugTraces; - std::map> exprEnvs; + std::map> exprEnvs; + const std::shared_ptr getStaticEnv(const Expr & expr) const { auto i = exprEnvs.find(&expr); if (i != exprEnvs.end()) return i->second; else - return std::shared_ptr();; + return std::shared_ptr(); + ; } /** Whether a debug repl can be started. If `false`, `runDebugRepl(error)` will return without starting a repl. */ @@ -340,7 +346,8 @@ public: template [[nodiscard, gnu::noinline]] - EvalErrorBuilder & error(const Args & ... args) { + EvalErrorBuilder & error(const Args &... args) + { // `EvalErrorBuilder::debugThrow` performs the corresponding `delete`. return *new EvalErrorBuilder(*this, args...); } @@ -359,13 +366,25 @@ private: /** * A cache from path names to parse trees. */ - typedef std::unordered_map, std::equal_to, traceable_allocator>> FileParseCache; + typedef std::unordered_map< + SourcePath, + Expr *, + std::hash, + std::equal_to, + traceable_allocator>> + FileParseCache; FileParseCache fileParseCache; /** * A cache from path names to values. */ - typedef std::unordered_map, std::equal_to, traceable_allocator>> FileEvalCache; + typedef std::unordered_map< + SourcePath, + Value, + std::hash, + std::equal_to, + traceable_allocator>> + FileEvalCache; FileEvalCache fileEvalCache; /** @@ -405,7 +424,10 @@ public: std::shared_ptr buildStore = nullptr); ~EvalState(); - LookupPath getLookupPath() { return lookupPath; } + LookupPath getLookupPath() + { + return lookupPath; + } /** * Return a `SourcePath` that refers to `path` in the root @@ -486,9 +508,7 @@ public: * * If it is not found, return `std::nullopt`. */ - std::optional resolveLookupPathPath( - const LookupPath::Path & elem, - bool initAccessControl = false); + std::optional resolveLookupPathPath(const LookupPath::Path & elem, bool initAccessControl = false); /** * Evaluate an expression to normal form @@ -530,7 +550,7 @@ public: void forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx); - template + template inline void forceAttrs(Value & v, Callable getPos, std::string_view errorCtx); inline void forceList(Value & v, const PosIdx pos, std::string_view errorCtx); @@ -539,7 +559,12 @@ public: */ void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx); std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx); - std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + std::string_view forceString( + Value & v, + NixStringContext & context, + const PosIdx pos, + std::string_view errorCtx, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx); /** @@ -549,10 +574,10 @@ public: template [[gnu::noinline]] - void addErrorTrace(Error & e, const Args & ... formatArgs) const; + void addErrorTrace(Error & e, const Args &... formatArgs) const; template [[gnu::noinline]] - void addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const; + void addErrorTrace(Error & e, const PosIdx pos, const Args &... formatArgs) const; public: /** @@ -561,8 +586,8 @@ public: */ bool isDerivation(Value & v); - std::optional tryAttrsToString(const PosIdx pos, Value & v, - NixStringContext & context, bool coerceMore = false, bool copyToStore = true); + std::optional tryAttrsToString( + const PosIdx pos, Value & v, NixStringContext & context, bool coerceMore = false, bool copyToStore = true); /** * String coercion. @@ -572,9 +597,13 @@ public: * booleans and lists to a string. If `copyToStore` is set, * referenced paths are copied to the Nix store as a side effect. */ - BackedStringView coerceToString(const PosIdx pos, Value & v, NixStringContext & context, + BackedStringView coerceToString( + const PosIdx pos, + Value & v, + NixStringContext & context, std::string_view errorCtx, - bool coerceMore = false, bool copyToStore = true, + bool coerceMore = false, + bool copyToStore = true, bool canonicalizePath = true); StorePath copyPathToStore(NixStringContext & context, const SourcePath & path); @@ -596,7 +625,11 @@ public: /** * Part of `coerceToSingleDerivedPath()` without any store IO which is exposed for unit testing only. */ - std::pair coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + std::pair coerceToSingleDerivedPathUnchecked( + const PosIdx pos, + Value & v, + std::string_view errorCtx, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * Coerce to `SingleDerivedPath`. @@ -636,7 +669,13 @@ public: /** * Internal primops not exposed to the user. */ - std::unordered_map, std::equal_to, traceable_allocator>> internalPrimOps; + std::unordered_map< + std::string, + Value *, + std::hash, + std::equal_to, + traceable_allocator>> + internalPrimOps; /** * Name and documentation about every constant. @@ -710,7 +749,8 @@ private: std::shared_ptr & staticEnv); /** - * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack. + * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run + * out of system stack. */ size_t callDepth = 0; @@ -773,7 +813,7 @@ public: /** * Return a boolean `Value *` without allocating. */ - Value *getBool(bool b); + Value * getBool(bool b); void mkThunk_(Value & v, Expr * expr); void mkPos(Value & v, PosIdx pos); @@ -817,9 +857,7 @@ public: * * A combination of `mkStorePathString` and `mkOutputString`. */ - void mkSingleDerivedPathString( - const SingleDerivedPath & p, - Value & v); + void mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v); void concatLists(Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx); @@ -850,22 +888,22 @@ public: * @param[out] maybePaths if not nullptr, all built or referenced store paths will be added to this set * @return a mapping from the placeholders used to construct the associated value to their final store path. */ - [[nodiscard]] StringMap realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true); + [[nodiscard]] StringMap + realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true); /** - * Realise the given string with context, and return the string with outputs instead of downstream output placeholders. + * Realise the given string with context, and return the string with outputs instead of downstream output + * placeholders. * @param[in] str the string to realise * @param[out] paths all referenced store paths will be added to this set * @return the realised string * @throw EvalError if the value is not a string, path or derivation (see `coerceToString`) */ - std::string realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos); + std::string + realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos); /* Call the binary path filter predicate used builtins.path etc. */ - bool callPathFilter( - Value * filterFun, - const SourcePath & path, - PosIdx pos); + bool callPathFilter(Value * filterFun, const SourcePath & path, PosIdx pos); DocComment getDocCommentForPos(PosIdx pos); @@ -884,8 +922,7 @@ private: * Like `mkSingleDerivedPathStringRaw` but just creates a raw string * Value, which would also have a string context. */ - std::string mkSingleDerivedPathStringRaw( - const SingleDerivedPath & p); + std::string mkSingleDerivedPathStringRaw(const SingleDerivedPath & p); unsigned long nrEnvs = 0; unsigned long nrValuesInEnvs = 0; @@ -925,20 +962,23 @@ private: friend struct ExprFloat; friend struct ExprPath; friend struct ExprSelect; - friend void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v); - friend void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v); - friend void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v); + friend void prim_getAttr(EvalState & state, const PosIdx pos, Value ** args, Value & v); + friend void prim_match(EvalState & state, const PosIdx pos, Value ** args, Value & v); + friend void prim_split(EvalState & state, const PosIdx pos, Value ** args, Value & v); friend struct Value; friend class ListBuilder; }; -struct DebugTraceStacker { +struct DebugTraceStacker +{ DebugTraceStacker(EvalState & evalState, DebugTrace t); + ~DebugTraceStacker() { evalState.debugTraces.pop_front(); } + EvalState & evalState; DebugTrace trace; }; @@ -964,6 +1004,6 @@ SourcePath resolveExprPath(SourcePath path, bool addDefaultNix = true); */ bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); -} +} // namespace nix #include "nix/expr/eval-inline.hh" diff --git a/src/libexpr/include/nix/expr/function-trace.hh b/src/libexpr/include/nix/expr/function-trace.hh index ed1fc6452..1606d125a 100644 --- a/src/libexpr/include/nix/expr/function-trace.hh +++ b/src/libexpr/include/nix/expr/function-trace.hh @@ -22,4 +22,4 @@ public: postFunctionCallHook(EvalState & state, const Value & v, std::span args, const PosIdx pos) override; }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/gc-small-vector.hh b/src/libexpr/include/nix/expr/gc-small-vector.hh index ad4503de7..fdd80b2c7 100644 --- a/src/libexpr/include/nix/expr/gc-small-vector.hh +++ b/src/libexpr/include/nix/expr/gc-small-vector.hh @@ -9,13 +9,13 @@ namespace nix { /** * A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap. */ -template +template using SmallVector = boost::container::small_vector>; /** * A vector of value pointers. See `SmallVector`. */ -template +template using SmallValueVector = SmallVector; /** @@ -23,7 +23,7 @@ using SmallValueVector = SmallVector; * * See also `SmallValueVector`. */ -template +template using SmallTemporaryValueVector = SmallVector; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/get-drvs.hh b/src/libexpr/include/nix/expr/get-drvs.hh index 0787c44a8..3d42188bf 100644 --- a/src/libexpr/include/nix/expr/get-drvs.hh +++ b/src/libexpr/include/nix/expr/get-drvs.hh @@ -7,7 +7,6 @@ #include #include - namespace nix { /** @@ -33,7 +32,7 @@ private: */ bool failed = false; - const Bindings * attrs = nullptr, * meta = nullptr; + const Bindings *attrs = nullptr, *meta = nullptr; const Bindings * getMeta(); @@ -45,7 +44,8 @@ public: */ std::string attrPath; - PackageInfo(EvalState & state) : state(&state) { }; + PackageInfo(EvalState & state) + : state(&state) {}; PackageInfo(EvalState & state, std::string attrPath, const Bindings * attrs); PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs); @@ -74,28 +74,46 @@ public: MetaValue queryMetaInfo(EvalState & state, const string & name) const; */ - void setName(const std::string & s) { name = s; } - void setDrvPath(StorePath path) { drvPath = {{std::move(path)}}; } - void setOutPath(StorePath path) { outPath = {{std::move(path)}}; } + void setName(const std::string & s) + { + name = s; + } - void setFailed() { failed = true; }; - bool hasFailed() { return failed; }; + void setDrvPath(StorePath path) + { + drvPath = {{std::move(path)}}; + } + + void setOutPath(StorePath path) + { + outPath = {{std::move(path)}}; + } + + void setFailed() + { + failed = true; + }; + + bool hasFailed() + { + return failed; + }; }; - typedef std::list> PackageInfos; - /** * If value `v` denotes a derivation, return a PackageInfo object * describing it. Otherwise return nothing. */ -std::optional getDerivation(EvalState & state, - Value & v, bool ignoreAssertionFailures); +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures); -void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, PackageInfos & drvs, +void getDerivations( + EvalState & state, + Value & v, + const std::string & pathPrefix, + Bindings & autoArgs, + PackageInfos & drvs, bool ignoreAssertionFailures); - -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/json-to-value.hh b/src/libexpr/include/nix/expr/json-to-value.hh index b01d63bfe..2a2913d68 100644 --- a/src/libexpr/include/nix/expr/json-to-value.hh +++ b/src/libexpr/include/nix/expr/json-to-value.hh @@ -14,4 +14,4 @@ MakeError(JSONParseError, Error); void parseJSON(EvalState & state, const std::string_view & s, Value & v); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 6ede91948..49bd7a3b6 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -19,7 +19,8 @@ struct StaticEnv; struct Value; /** - * A documentation comment, in the sense of [RFC 145](https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md) + * A documentation comment, in the sense of [RFC + * 145](https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md) * * Note that this does not implement the following: * - argument attribute names ("formals"): TBD @@ -34,7 +35,8 @@ struct Value; * `f: g: final: prev: <...>`. The parameters `final` and `prev` are part * of the overlay concept, while distracting from the function's purpose. */ -struct DocComment { +struct DocComment +{ /** * Start of the comment, including the opening, ie `/` and `**`. @@ -53,10 +55,12 @@ struct DocComment { * therefore baking optionality into it is also useful, to avoiding the memory * overhead of `std::optional`. */ - operator bool() const { return static_cast(begin); } + operator bool() const + { + return static_cast(begin); + } std::string getInnerText(const PosTable & positions) const; - }; /** @@ -66,52 +70,69 @@ struct AttrName { Symbol symbol; Expr * expr = nullptr; - AttrName(Symbol s) : symbol(s) {}; - AttrName(Expr * e) : expr(e) {}; + AttrName(Symbol s) + : symbol(s) {}; + AttrName(Expr * e) + : expr(e) {}; }; typedef std::vector AttrPath; std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath); - /* Abstract syntax of Nix expressions. */ struct Expr { - struct AstSymbols { + struct AstSymbols + { Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body; }; - static unsigned long nrExprs; - Expr() { + + Expr() + { nrExprs++; } - virtual ~Expr() { }; + + virtual ~Expr() {}; virtual void show(const SymbolTable & symbols, std::ostream & str) const; virtual void bindVars(EvalState & es, const std::shared_ptr & env); virtual void eval(EvalState & state, Env & env, Value & v); virtual Value * maybeThunk(EvalState & state, Env & env); virtual void setName(Symbol name); - virtual void setDocComment(DocComment docComment) { }; - virtual PosIdx getPos() const { return noPos; } + virtual void setDocComment(DocComment docComment) {}; + + virtual PosIdx getPos() const + { + return noPos; + } // These are temporary methods to be used only in parser.y - virtual void resetCursedOr() { }; - virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) { }; + virtual void resetCursedOr() {}; + virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) {}; }; -#define COMMON_METHODS \ +#define COMMON_METHODS \ void show(const SymbolTable & symbols, std::ostream & str) const override; \ - void eval(EvalState & state, Env & env, Value & v) override; \ + void eval(EvalState & state, Env & env, Value & v) override; \ void bindVars(EvalState & es, const std::shared_ptr & env) override; struct ExprInt : Expr { Value v; - ExprInt(NixInt n) { v.mkInt(n); }; - ExprInt(NixInt::Inner n) { v.mkInt(n); }; + + ExprInt(NixInt n) + { + v.mkInt(n); + }; + + ExprInt(NixInt::Inner n) + { + v.mkInt(n); + }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -119,7 +140,12 @@ struct ExprInt : Expr struct ExprFloat : Expr { Value v; - ExprFloat(NixFloat nf) { v.mkFloat(nf); }; + + ExprFloat(NixFloat nf) + { + v.mkFloat(nf); + }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -128,7 +154,13 @@ struct ExprString : Expr { std::string s; Value v; - ExprString(std::string &&s) : s(std::move(s)) { v.mkString(this->s.data()); }; + + ExprString(std::string && s) + : s(std::move(s)) + { + v.mkString(this->s.data()); + }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -138,10 +170,14 @@ struct ExprPath : Expr ref accessor; std::string s; Value v; - ExprPath(ref accessor, std::string s) : accessor(accessor), s(std::move(s)) + + ExprPath(ref accessor, std::string s) + : accessor(accessor) + , s(std::move(s)) { v.mkPath(&*accessor, this->s.c_str()); } + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -170,10 +206,18 @@ struct ExprVar : Expr Level level = 0; Displacement displ = 0; - ExprVar(Symbol name) : name(name) { }; - ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { }; + ExprVar(Symbol name) + : name(name) {}; + ExprVar(const PosIdx & pos, Symbol name) + : pos(pos) + , name(name) {}; Value * maybeThunk(EvalState & state, Env & env) override; - PosIdx getPos() const override { return pos; } + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; @@ -184,7 +228,8 @@ struct ExprVar : Expr */ struct ExprInheritFrom : ExprVar { - ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {}) + ExprInheritFrom(PosIdx pos, Displacement displ) + : ExprVar(pos, {}) { this->level = 0; this->displ = displ; @@ -197,11 +242,26 @@ struct ExprInheritFrom : ExprVar struct ExprSelect : Expr { PosIdx pos; - Expr * e, * def; + Expr *e, *def; AttrPath attrPath; - ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { }; - ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); }; - PosIdx getPos() const override { return pos; } + ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) + : pos(pos) + , e(e) + , def(def) + , attrPath(std::move(attrPath)) {}; + + ExprSelect(const PosIdx & pos, Expr * e, Symbol name) + : pos(pos) + , e(e) + , def(0) + { + attrPath.push_back(AttrName(name)); + }; + + PosIdx getPos() const override + { + return pos; + } /** * Evaluate the `a.b.c` part of `a.b.c.d`. This exists mostly for the purpose of :doc in the repl. @@ -209,7 +269,8 @@ struct ExprSelect : Expr * @param[out] attrs The attribute set that should contain the last attribute name (if it exists). * @return The last attribute name in `attrPath` * - * @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist. + * @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not + * exist. */ Symbol evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs); @@ -220,8 +281,15 @@ struct ExprOpHasAttr : Expr { Expr * e; AttrPath attrPath; - ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { }; - PosIdx getPos() const override { return e->getPos(); } + ExprOpHasAttr(Expr * e, AttrPath attrPath) + : e(e) + , attrPath(std::move(attrPath)) {}; + + PosIdx getPos() const override + { + return e->getPos(); + } + COMMON_METHODS }; @@ -229,7 +297,9 @@ struct ExprAttrs : Expr { bool recursive; PosIdx pos; - struct AttrDef { + + struct AttrDef + { enum class Kind { /** `attr = expr;` */ Plain, @@ -244,8 +314,10 @@ struct ExprAttrs : Expr PosIdx pos; Displacement displ = 0; // displacement AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain) - : kind(kind), e(e), pos(pos) { }; - AttrDef() { }; + : kind(kind) + , e(e) + , pos(pos) {}; + AttrDef() {}; template const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const @@ -261,24 +333,37 @@ struct ExprAttrs : Expr } } }; + typedef std::map AttrDefs; AttrDefs attrs; std::unique_ptr> inheritFromExprs; - struct DynamicAttrDef { - Expr * nameExpr, * valueExpr; + + struct DynamicAttrDef + { + Expr *nameExpr, *valueExpr; PosIdx pos; DynamicAttrDef(Expr * nameExpr, Expr * valueExpr, const PosIdx & pos) - : nameExpr(nameExpr), valueExpr(valueExpr), pos(pos) { }; + : nameExpr(nameExpr) + , valueExpr(valueExpr) + , pos(pos) {}; }; + typedef std::vector DynamicAttrDefs; DynamicAttrDefs dynamicAttrs; - ExprAttrs(const PosIdx &pos) : recursive(false), pos(pos) { }; - ExprAttrs() : recursive(false) { }; - PosIdx getPos() const override { return pos; } + ExprAttrs(const PosIdx & pos) + : recursive(false) + , pos(pos) {}; + ExprAttrs() + : recursive(false) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS - std::shared_ptr bindInheritSources( - EvalState & es, const std::shared_ptr & env); + std::shared_ptr bindInheritSources(EvalState & es, const std::shared_ptr & env); Env * buildInheritFromEnv(EvalState & state, Env & up); void showBindings(const SymbolTable & symbols, std::ostream & str) const; }; @@ -286,7 +371,7 @@ struct ExprAttrs : Expr struct ExprList : Expr { std::vector elems; - ExprList() { }; + ExprList() {}; COMMON_METHODS Value * maybeThunk(EvalState & state, Env & env) override; @@ -314,19 +399,18 @@ struct Formals bool has(Symbol arg) const { - auto it = std::lower_bound(formals.begin(), formals.end(), arg, - [] (const Formal & f, const Symbol & sym) { return f.name < sym; }); + auto it = std::lower_bound( + formals.begin(), formals.end(), arg, [](const Formal & f, const Symbol & sym) { return f.name < sym; }); return it != formals.end() && it->name == arg; } std::vector lexicographicOrder(const SymbolTable & symbols) const { std::vector result(formals.begin(), formals.end()); - std::sort(result.begin(), result.end(), - [&] (const Formal & a, const Formal & b) { - std::string_view sa = symbols[a.name], sb = symbols[b.name]; - return sa < sb; - }); + std::sort(result.begin(), result.end(), [&](const Formal & a, const Formal & b) { + std::string_view sa = symbols[a.name], sb = symbols[b.name]; + return sa < sb; + }); return result; } }; @@ -341,17 +425,31 @@ struct ExprLambda : Expr DocComment docComment; ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body) - : pos(pos), arg(arg), formals(formals), body(body) - { - }; + : pos(pos) + , arg(arg) + , formals(formals) + , body(body) {}; + ExprLambda(PosIdx pos, Formals * formals, Expr * body) - : pos(pos), formals(formals), body(body) + : pos(pos) + , formals(formals) + , body(body) { } + void setName(Symbol name) override; std::string showNamePos(const EvalState & state) const; - inline bool hasFormals() const { return formals != nullptr; } - PosIdx getPos() const override { return pos; } + + inline bool hasFormals() const + { + return formals != nullptr; + } + + PosIdx getPos() const override + { + return pos; + } + virtual void setDocComment(DocComment docComment) override; COMMON_METHODS }; @@ -362,13 +460,28 @@ struct ExprCall : Expr std::vector args; PosIdx pos; std::optional cursedOrEndPos; // used during parsing to warn about https://github.com/NixOS/nix/issues/11118 + ExprCall(const PosIdx & pos, Expr * fun, std::vector && args) - : fun(fun), args(args), pos(pos), cursedOrEndPos({}) - { } + : fun(fun) + , args(args) + , pos(pos) + , cursedOrEndPos({}) + { + } + ExprCall(const PosIdx & pos, Expr * fun, std::vector && args, PosIdx && cursedOrEndPos) - : fun(fun), args(args), pos(pos), cursedOrEndPos(cursedOrEndPos) - { } - PosIdx getPos() const override { return pos; } + : fun(fun) + , args(args) + , pos(pos) + , cursedOrEndPos(cursedOrEndPos) + { + } + + PosIdx getPos() const override + { + return pos; + } + virtual void resetCursedOr() override; virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) override; COMMON_METHODS @@ -378,90 +491,144 @@ struct ExprLet : Expr { ExprAttrs * attrs; Expr * body; - ExprLet(ExprAttrs * attrs, Expr * body) : attrs(attrs), body(body) { }; + ExprLet(ExprAttrs * attrs, Expr * body) + : attrs(attrs) + , body(body) {}; COMMON_METHODS }; struct ExprWith : Expr { PosIdx pos; - Expr * attrs, * body; + Expr *attrs, *body; size_t prevWith; ExprWith * parentWith; - ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; - PosIdx getPos() const override { return pos; } + ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) + : pos(pos) + , attrs(attrs) + , body(body) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; struct ExprIf : Expr { PosIdx pos; - Expr * cond, * then, * else_; - ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { }; - PosIdx getPos() const override { return pos; } + Expr *cond, *then, *else_; + ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_) + : pos(pos) + , cond(cond) + , then(then) + , else_(else_) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; struct ExprAssert : Expr { PosIdx pos; - Expr * cond, * body; - ExprAssert(const PosIdx & pos, Expr * cond, Expr * body) : pos(pos), cond(cond), body(body) { }; - PosIdx getPos() const override { return pos; } + Expr *cond, *body; + ExprAssert(const PosIdx & pos, Expr * cond, Expr * body) + : pos(pos) + , cond(cond) + , body(body) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; struct ExprOpNot : Expr { Expr * e; - ExprOpNot(Expr * e) : e(e) { }; - PosIdx getPos() const override { return e->getPos(); } + ExprOpNot(Expr * e) + : e(e) {}; + + PosIdx getPos() const override + { + return e->getPos(); + } + COMMON_METHODS }; -#define MakeBinOp(name, s) \ - struct name : Expr \ - { \ - PosIdx pos; \ - Expr * e1, * e2; \ - name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \ - name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \ - void show(const SymbolTable & symbols, std::ostream & str) const override \ - { \ - str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \ - } \ +#define MakeBinOp(name, s) \ + struct name : Expr \ + { \ + PosIdx pos; \ + Expr *e1, *e2; \ + name(Expr * e1, Expr * e2) \ + : e1(e1) \ + , e2(e2) {}; \ + name(const PosIdx & pos, Expr * e1, Expr * e2) \ + : pos(pos) \ + , e1(e1) \ + , e2(e2) {}; \ + void show(const SymbolTable & symbols, std::ostream & str) const override \ + { \ + str << "("; \ + e1->show(symbols, str); \ + str << " " s " "; \ + e2->show(symbols, str); \ + str << ")"; \ + } \ void bindVars(EvalState & es, const std::shared_ptr & env) override \ - { \ - e1->bindVars(es, env); e2->bindVars(es, env); \ - } \ - void eval(EvalState & state, Env & env, Value & v) override; \ - PosIdx getPos() const override { return pos; } \ + { \ + e1->bindVars(es, env); \ + e2->bindVars(es, env); \ + } \ + void eval(EvalState & state, Env & env, Value & v) override; \ + PosIdx getPos() const override \ + { \ + return pos; \ + } \ }; -MakeBinOp(ExprOpEq, "==") -MakeBinOp(ExprOpNEq, "!=") -MakeBinOp(ExprOpAnd, "&&") -MakeBinOp(ExprOpOr, "||") -MakeBinOp(ExprOpImpl, "->") -MakeBinOp(ExprOpUpdate, "//") -MakeBinOp(ExprOpConcatLists, "++") +MakeBinOp(ExprOpEq, "==") MakeBinOp(ExprOpNEq, "!=") MakeBinOp(ExprOpAnd, "&&") MakeBinOp(ExprOpOr, "||") + MakeBinOp(ExprOpImpl, "->") MakeBinOp(ExprOpUpdate, "//") MakeBinOp(ExprOpConcatLists, "++") -struct ExprConcatStrings : Expr + struct ExprConcatStrings : Expr { PosIdx pos; bool forceString; std::vector> * es; ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector> * es) - : pos(pos), forceString(forceString), es(es) { }; - PosIdx getPos() const override { return pos; } + : pos(pos) + , forceString(forceString) + , es(es) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; struct ExprPos : Expr { PosIdx pos; - ExprPos(const PosIdx & pos) : pos(pos) { }; - PosIdx getPos() const override { return pos; } + ExprPos(const PosIdx & pos) + : pos(pos) {}; + + PosIdx getPos() const override + { + return pos; + } + COMMON_METHODS }; @@ -469,14 +636,16 @@ struct ExprPos : Expr struct ExprBlackHole : Expr { void show(const SymbolTable & symbols, std::ostream & str) const override {} + void eval(EvalState & state, Env & env, Value & v) override; + void bindVars(EvalState & es, const std::shared_ptr & env) override {} + [[noreturn]] static void throwInfiniteRecursionError(EvalState & state, Value & v); }; extern ExprBlackHole eBlackHole; - /* Static environments are used to map variable names onto (level, displacement) pairs used to obtain the value of the variable at runtime. */ @@ -498,8 +667,9 @@ struct StaticEnv void sort() { - std::stable_sort(vars.begin(), vars.end(), - [](const Vars::value_type & a, const Vars::value_type & b) { return a.first < b.first; }); + std::stable_sort(vars.begin(), vars.end(), [](const Vars::value_type & a, const Vars::value_type & b) { + return a.first < b.first; + }); } void deduplicate() @@ -507,7 +677,8 @@ struct StaticEnv auto it = vars.begin(), jt = it, end = vars.end(); while (jt != end) { *it = *jt++; - while (jt != end && it->first == jt->first) *it = *jt++; + while (jt != end && it->first == jt->first) + *it = *jt++; it++; } vars.erase(it, end); @@ -517,10 +688,10 @@ struct StaticEnv { Vars::value_type key(name, 0); auto i = std::lower_bound(vars.begin(), vars.end(), key); - if (i != vars.end() && i->first == name) return i; + if (i != vars.end() && i->first == name) + return i; return vars.end(); } }; - -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 0505913d0..dd99192c0 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -17,7 +17,11 @@ struct StringToken const char * p; size_t l; bool hasIndentation; - operator std::string_view() const { return {p, l}; } + + operator std::string_view() const + { + return {p, l}; + } }; // This type must be trivially copyable; see YYLTYPE_IS_TRIVIAL in parser.y. @@ -29,12 +33,14 @@ struct ParserLocation // backup to recover from yyless(0) int stashedBeginOffset, stashedEndOffset; - void stash() { + void stash() + { stashedBeginOffset = beginOffset; stashedEndOffset = endOffset; } - void unstash() { + void unstash() + { beginOffset = stashedBeginOffset; endOffset = stashedEndOffset; } @@ -87,32 +93,30 @@ struct ParserState void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); - void addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc); + void addAttr( + ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc); void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def); Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * stripIndentation(const PosIdx pos, - std::vector>> && es); + Expr * stripIndentation(const PosIdx pos, std::vector>> && es); PosIdx at(const ParserLocation & loc); }; inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { - throw ParseError({ - .msg = HintFmt("attribute '%1%' already defined at %2%", - showAttrPath(symbols, attrPath), positions[prevPos]), - .pos = positions[pos] - }); + throw ParseError( + {.msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), + .pos = positions[pos]}); } inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { - throw ParseError({ - .msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), - .pos = positions[pos] - }); + throw ParseError( + {.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .pos = positions[pos]}); } -inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc) +inline void ParserState::addAttr( + ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc) { AttrPath::iterator i; // All attrpaths have at least one attr @@ -159,7 +163,8 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const * Precondition: attrPath is used for error messages and should already contain * symbol as its last element. */ -inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def) +inline void +ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(symbol); if (j != attrs->attrs.end()) { @@ -189,12 +194,14 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const S attrPath.pop_back(); } ae->attrs.clear(); - jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), + jAttrs->dynamicAttrs.insert( + jAttrs->dynamicAttrs.end(), std::make_move_iterator(ae->dynamicAttrs.begin()), std::make_move_iterator(ae->dynamicAttrs.end())); ae->dynamicAttrs.clear(); if (ae->inheritFromExprs) { - jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(), + jAttrs->inheritFromExprs->insert( + jAttrs->inheritFromExprs->end(), std::make_move_iterator(ae->inheritFromExprs->begin()), std::make_move_iterator(ae->inheritFromExprs->end())); ae->inheritFromExprs = nullptr; @@ -211,10 +218,9 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const S inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg) { - std::sort(formals->formals.begin(), formals->formals.end(), - [] (const auto & a, const auto & b) { - return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); - }); + std::sort(formals->formals.begin(), formals->formals.end(), [](const auto & a, const auto & b) { + return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); + }); std::optional> duplicate; for (size_t i = 0; i + 1 < formals->formals.size(); i++) { @@ -224,24 +230,22 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym duplicate = std::min(thisDup, duplicate.value_or(thisDup)); } if (duplicate) - throw ParseError({ - .msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .pos = positions[duplicate->second] - }); + throw ParseError( + {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .pos = positions[duplicate->second]}); if (arg && formals->has(arg)) - throw ParseError({ - .msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), - .pos = positions[pos] - }); + throw ParseError( + {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos]}); return formals; } -inline Expr * ParserState::stripIndentation(const PosIdx pos, - std::vector>> && es) +inline Expr * +ParserState::stripIndentation(const PosIdx pos, std::vector>> && es) { - if (es.empty()) return new ExprString(""); + if (es.empty()) + return new ExprString(""); /* Figure out the minimum indentation. Note that by design whitespace-only final lines are not taken into account. (So @@ -255,7 +259,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, /* Anti-quotations and escaped characters end the current start-of-line whitespace. */ if (atStartOfLine) { atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; + if (curIndent < minIndent) + minIndent = curIndent; } continue; } @@ -269,7 +274,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, curIndent = 0; } else { atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; + if (curIndent < minIndent) + minIndent = curIndent; } } else if (str->p[j] == '\n') { atStartOfLine = true; @@ -284,20 +290,19 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, size_t curDropped = 0; size_t n = es.size(); auto i = es.begin(); - const auto trimExpr = [&] (Expr * e) { + const auto trimExpr = [&](Expr * e) { atStartOfLine = false; curDropped = 0; es2->emplace_back(i->first, e); }; - const auto trimString = [&] (const StringToken & t) { + const auto trimString = [&](const StringToken & t) { std::string s2; for (size_t j = 0; j < t.l; ++j) { if (atStartOfLine) { if (t.p[j] == ' ') { if (curDropped++ >= minIndent) s2 += t.p[j]; - } - else if (t.p[j] == '\n') { + } else if (t.p[j] == '\n') { curDropped = 0; s2 += t.p[j]; } else { @@ -307,7 +312,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, } } else { s2 += t.p[j]; - if (t.p[j] == '\n') atStartOfLine = true; + if (t.p[j] == '\n') + atStartOfLine = true; } } @@ -325,20 +331,20 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, } }; for (; i != es.end(); ++i, --n) { - std::visit(overloaded { trimExpr, trimString }, i->second); + std::visit(overloaded{trimExpr, trimString}, i->second); } // If there is nothing at all, return the empty string directly. // This also ensures that equivalent empty strings result in the same ast, which is helpful when testing formatters. if (es2->size() == 0) { - auto *const result = new ExprString(""); + auto * const result = new ExprString(""); delete es2; return result; } /* If this is a single string, then don't do a concatenation. */ if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { - auto *const result = (*es2)[0].second; + auto * const result = (*es2)[0].second; delete es2; return result; } @@ -355,4 +361,4 @@ inline PosIdx ParserState::at(const ParserLocation & loc) return positions.add(origin, loc.beginOffset); } -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/primops.hh b/src/libexpr/include/nix/expr/primops.hh index 0b4ecdd50..885a53e9a 100644 --- a/src/libexpr/include/nix/expr/primops.hh +++ b/src/libexpr/include/nix/expr/primops.hh @@ -49,13 +49,13 @@ struct RegisterPrimOp /** * Load a ValueInitializer from a DSO and return whatever it initializes */ -void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v); +void prim_importNative(EvalState & state, const PosIdx pos, Value ** args, Value & v); /** * Execute a program and parse its output */ -void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); +void prim_exec(EvalState & state, const PosIdx pos, Value ** args, Value & v); void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/print-ambiguous.hh b/src/libexpr/include/nix/expr/print-ambiguous.hh index 9e5a27e6d..c0d811d4b 100644 --- a/src/libexpr/include/nix/expr/print-ambiguous.hh +++ b/src/libexpr/include/nix/expr/print-ambiguous.hh @@ -16,10 +16,6 @@ namespace nix { * See: https://github.com/NixOS/nix/issues/9730 */ void printAmbiguous( - Value &v, - const SymbolTable &symbols, - std::ostream &str, - std::set *seen, - int depth); + Value & v, const SymbolTable & symbols, std::ostream & str, std::set * seen, int depth); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/print-options.hh b/src/libexpr/include/nix/expr/print-options.hh index 9ad54e532..ffb80abc3 100644 --- a/src/libexpr/include/nix/expr/print-options.hh +++ b/src/libexpr/include/nix/expr/print-options.hh @@ -110,7 +110,7 @@ struct PrintOptions * `PrintOptions` for unknown and therefore potentially large values in error messages, * to avoid printing "too much" output. */ -static PrintOptions errorPrintOptions = PrintOptions { +static PrintOptions errorPrintOptions = PrintOptions{ .ansiColors = true, .maxDepth = 10, .maxAttrs = 10, @@ -118,4 +118,4 @@ static PrintOptions errorPrintOptions = PrintOptions { .maxStringLength = 1024, }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/print.hh b/src/libexpr/include/nix/expr/print.hh index ac9bf23a4..229f7159d 100644 --- a/src/libexpr/include/nix/expr/print.hh +++ b/src/libexpr/include/nix/expr/print.hh @@ -26,10 +26,14 @@ struct Value; * @param s The logical string */ std::ostream & printLiteralString(std::ostream & o, std::string_view s); -inline std::ostream & printLiteralString(std::ostream & o, const char * s) { + +inline std::ostream & printLiteralString(std::ostream & o, const char * s) +{ return printLiteralString(o, std::string_view(s)); } -inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) { + +inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) +{ return printLiteralString(o, std::string_view(s)); } @@ -60,27 +64,31 @@ bool isReservedKeyword(const std::string_view str); */ std::ostream & printIdentifier(std::ostream & o, std::string_view s); -void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {}); +void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions{}); /** * A partially-applied form of `printValue` which can be formatted using `<<` * without allocating an intermediate string. */ -class ValuePrinter { - friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer); +class ValuePrinter +{ + friend std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); private: EvalState & state; Value & value; PrintOptions options; public: - ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {}) - : state(state), value(value), options(options) { } + ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions{}) + : state(state) + , value(value) + , options(options) + { + } }; std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); - /** * `ValuePrinter` does its own ANSI formatting, so we don't color it * magenta. @@ -88,4 +96,4 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); template<> HintFmt & HintFmt::operator%(const ValuePrinter & value); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/repl-exit-status.hh b/src/libexpr/include/nix/expr/repl-exit-status.hh index 08299ff61..5437e1541 100644 --- a/src/libexpr/include/nix/expr/repl-exit-status.hh +++ b/src/libexpr/include/nix/expr/repl-exit-status.hh @@ -17,4 +17,4 @@ enum class ReplExitStatus { Continue, }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/search-path.hh b/src/libexpr/include/nix/expr/search-path.hh index 202527fd2..7d7664e8e 100644 --- a/src/libexpr/include/nix/expr/search-path.hh +++ b/src/libexpr/include/nix/expr/search-path.hh @@ -105,4 +105,4 @@ struct LookupPath::Elem static LookupPath::Elem parse(std::string_view rawElem); }; -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index 4dedf3d91..ec1456e2d 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -41,16 +41,32 @@ class Symbol private: uint32_t id; - explicit Symbol(uint32_t id) noexcept : id(id) {} + explicit Symbol(uint32_t id) noexcept + : id(id) + { + } public: - Symbol() noexcept : id(0) {} + Symbol() noexcept + : id(0) + { + } [[gnu::always_inline]] - explicit operator bool() const noexcept { return id > 0; } + explicit operator bool() const noexcept + { + return id > 0; + } - auto operator<=>(const Symbol other) const noexcept { return id <=> other.id; } - bool operator==(const Symbol other) const noexcept { return id == other.id; } + auto operator<=>(const Symbol other) const noexcept + { + return id <=> other.id; + } + + bool operator==(const Symbol other) const noexcept + { + return id == other.id; + } friend class std::hash; }; @@ -82,11 +98,16 @@ class SymbolStr : store(store) , s(s) , hash(HashType{}(s)) - , alloc(stringAlloc) {} + , alloc(stringAlloc) + { + } }; public: - SymbolStr(const SymbolValue & s) noexcept : s(&s) {} + SymbolStr(const SymbolValue & s) noexcept + : s(&s) + { + } SymbolStr(const Key & key) { @@ -109,7 +130,7 @@ public: this->s = &v; } - bool operator == (std::string_view s2) const noexcept + bool operator==(std::string_view s2) const noexcept { return *s == s2; } @@ -120,13 +141,12 @@ public: return s->c_str(); } - [[gnu::always_inline]] - operator std::string_view () const noexcept + [[gnu::always_inline]] operator std::string_view() const noexcept { return *s; } - friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol); + friend std::ostream & operator<<(std::ostream & os, const SymbolStr & symbol); [[gnu::always_inline]] bool empty() const noexcept @@ -216,7 +236,8 @@ public: /** * Converts a string into a symbol. */ - Symbol create(std::string_view s) { + Symbol create(std::string_view s) + { // Most symbols are looked up more than once, so we trade off insertion performance // for lookup performance. // FIXME: make this thread-safe. @@ -255,7 +276,7 @@ public: } }; -} +} // namespace nix template<> struct std::hash diff --git a/src/libexpr/include/nix/expr/value-to-json.hh b/src/libexpr/include/nix/expr/value-to-json.hh index 1a6911347..b19c16726 100644 --- a/src/libexpr/include/nix/expr/value-to-json.hh +++ b/src/libexpr/include/nix/expr/value-to-json.hh @@ -10,13 +10,18 @@ namespace nix { -nlohmann::json printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true); - -void printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true); +nlohmann::json printValueAsJSON( + EvalState & state, bool strict, Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true); +void printValueAsJSON( + EvalState & state, + bool strict, + Value & v, + const PosIdx pos, + std::ostream & str, + NixStringContext & context, + bool copyToStore = true); MakeError(JSONSerializationError, Error); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/value-to-xml.hh b/src/libexpr/include/nix/expr/value-to-xml.hh index e22325de5..50a7c43cc 100644 --- a/src/libexpr/include/nix/expr/value-to-xml.hh +++ b/src/libexpr/include/nix/expr/value-to-xml.hh @@ -9,7 +9,13 @@ namespace nix { -void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos); +void printValueAsXML( + EvalState & state, + bool strict, + bool location, + Value & v, + std::ostream & out, + NixStringContext & context, + const PosIdx pos); } diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 098effa29..a2833679b 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -176,6 +176,7 @@ public: { return &elems[0]; } + iterator end() { return &elems[size]; @@ -306,7 +307,7 @@ NIX_VALUE_STORAGE_FOR_EACH_FIELD(NIX_VALUE_PAYLOAD_TYPE) template inline constexpr InternalType payloadTypeToInternalType = PayloadTypeToInternalType::value; -} +} // namespace detail /** * Discriminated union of types stored in the value. @@ -865,10 +866,12 @@ public: { return isa(); }; + inline bool isApp() const { return isa(); }; + inline bool isBlackhole() const; // type() == nFunction @@ -876,10 +879,12 @@ public: { return isa(); }; + inline bool isPrimOp() const { return isa(); }; + inline bool isPrimOpApp() const { return isa(); @@ -1171,4 +1176,4 @@ typedef std::shared_ptr RootValue; RootValue allocRootValue(Value * v); void forceNoNullByte(std::string_view s, std::function = nullptr); -} +} // namespace nix diff --git a/src/libexpr/include/nix/expr/value/context.hh b/src/libexpr/include/nix/expr/value/context.hh index f2de184ea..dcfacbb21 100644 --- a/src/libexpr/include/nix/expr/value/context.hh +++ b/src/libexpr/include/nix/expr/value/context.hh @@ -15,7 +15,7 @@ public: std::string_view raw; template - BadNixStringContextElem(std::string_view raw_, const Args & ... args) + BadNixStringContextElem(std::string_view raw_, const Args &... args) : Error("") { raw = raw_; @@ -24,7 +24,8 @@ public: } }; -struct NixStringContextElem { +struct NixStringContextElem +{ /** * Plain opaque path to some store object. * @@ -41,7 +42,8 @@ struct NixStringContextElem { * * Encoded in the form `=`. */ - struct DrvDeep { + struct DrvDeep + { StorePath drvPath; GENERATE_CMP(DrvDeep, me->drvPath); @@ -54,11 +56,7 @@ struct NixStringContextElem { */ using Built = SingleDerivedPath::Built; - using Raw = std::variant< - Opaque, - DrvDeep, - Built - >; + using Raw = std::variant; Raw raw; @@ -74,12 +72,11 @@ struct NixStringContextElem { * * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static NixStringContextElem parse( - std::string_view s, - const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + static NixStringContextElem + parse(std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); std::string to_string() const; }; typedef std::set NixStringContext; -} +} // namespace nix diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index e38ac7db4..9c645e7fd 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -12,8 +12,10 @@ namespace nix { // for more information, refer to // https://github.com/nlohmann/json/blob/master/include/nlohmann/detail/input/json_sax.hpp -class JSONSax : nlohmann::json_sax { - class JSONState { +class JSONSax : nlohmann::json_sax +{ + class JSONState + { protected: std::unique_ptr parent; RootValue v; @@ -22,22 +24,36 @@ class JSONSax : nlohmann::json_sax { { throw std::logic_error("tried to close toplevel json parser state"); } - explicit JSONState(std::unique_ptr && p) : parent(std::move(p)) {} - explicit JSONState(Value * v) : v(allocRootValue(v)) {} + + explicit JSONState(std::unique_ptr && p) + : parent(std::move(p)) + { + } + + explicit JSONState(Value * v) + : v(allocRootValue(v)) + { + } + JSONState(JSONState & p) = delete; + Value & value(EvalState & state) { if (!v) v = allocRootValue(state.allocValue()); return **v; } + virtual ~JSONState() {} + virtual void add() {} }; - class JSONObjectState : public JSONState { + class JSONObjectState : public JSONState + { using JSONState::JSONState; ValueMap attrs; + std::unique_ptr resolve(EvalState & state) override { auto attrs2 = state.buildBindings(attrs.size()); @@ -46,7 +62,11 @@ class JSONSax : nlohmann::json_sax { parent->value(state).mkAttrs(attrs2); return std::move(parent); } - void add() override { v = nullptr; } + + void add() override + { + v = nullptr; + } public: void key(string_t & name, EvalState & state) { @@ -55,8 +75,10 @@ class JSONSax : nlohmann::json_sax { } }; - class JSONListState : public JSONState { + class JSONListState : public JSONState + { ValueVector values; + std::unique_ptr resolve(EvalState & state) override { auto list = state.buildList(values.size()); @@ -65,12 +87,15 @@ class JSONSax : nlohmann::json_sax { parent->value(state).mkList(list); return std::move(parent); } - void add() override { + + void add() override + { values.push_back(*v); v = nullptr; } public: - JSONListState(std::unique_ptr && p, std::size_t reserve) : JSONState(std::move(p)) + JSONListState(std::unique_ptr && p, std::size_t reserve) + : JSONState(std::move(p)) { values.reserve(reserve); } @@ -80,7 +105,9 @@ class JSONSax : nlohmann::json_sax { std::unique_ptr rs; public: - JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {}; + JSONSax(EvalState & state, Value & v) + : state(state) + , rs(new JSONState(&v)) {}; bool null() override { @@ -130,7 +157,7 @@ public: } #if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8 - bool binary(binary_t&) override + bool binary(binary_t &) override { // This function ought to be unreachable assert(false); @@ -146,27 +173,30 @@ public: bool key(string_t & name) override { - dynamic_cast(rs.get())->key(name, state); + dynamic_cast(rs.get())->key(name, state); return true; } - bool end_object() override { + bool end_object() override + { rs = rs->resolve(state); rs->add(); return true; } - bool end_array() override { + bool end_array() override + { return end_object(); } - bool start_array(size_t len) override { - rs = std::make_unique(std::move(rs), - len != std::numeric_limits::max() ? len : 128); + bool start_array(size_t len) override + { + rs = std::make_unique(std::move(rs), len != std::numeric_limits::max() ? len : 128); return true; } - bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) override { + bool parse_error(std::size_t, const std::string &, const nlohmann::detail::exception & ex) override + { throw JSONParseError("%s", ex.what()); } }; @@ -179,4 +209,4 @@ void parseJSON(EvalState & state, const std::string_view & s_, Value & v) throw JSONParseError("Invalid JSON Value"); } -} +} // namespace nix diff --git a/src/libexpr/lexer-helpers.hh b/src/libexpr/lexer-helpers.hh index 225eb157a..49865f794 100644 --- a/src/libexpr/lexer-helpers.hh +++ b/src/libexpr/lexer-helpers.hh @@ -14,4 +14,4 @@ void initLoc(YYLTYPE * loc); void adjustLoc(yyscan_t yyscanner, YYLTYPE * loc, const char * s, size_t len); -} // namespace nix::lexer +} // namespace nix::lexer::internal diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 92071b22d..c0a25d1d4 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -17,7 +17,7 @@ ExprBlackHole eBlackHole; // FIXME: remove, because *symbols* are abstract and do not have a single // textual representation; see printIdentifier() -std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol) +std::ostream & operator<<(std::ostream & str, const SymbolStr & symbol) { std::string_view s = symbol; return printIdentifier(str, s); @@ -76,7 +76,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co { typedef const decltype(attrs)::value_type * Attr; std::vector sorted; - for (auto & i : attrs) sorted.push_back(&i); + for (auto & i : attrs) + sorted.push_back(&i); std::sort(sorted.begin(), sorted.end(), [&](Attr a, Attr b) { std::string_view sa = symbols[a->first], sb = symbols[b->first]; return sa < sb; @@ -102,14 +103,16 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co } if (!inherits.empty()) { str << "inherit"; - for (auto sym : inherits) str << " " << symbols[sym]; + for (auto sym : inherits) + str << " " << symbols[sym]; str << "; "; } for (const auto & [from, syms] : inheritsFrom) { str << "inherit ("; (*inheritFromExprs)[from]->show(symbols, str); str << ")"; - for (auto sym : syms) str << " " << symbols[sym]; + for (auto sym : syms) + str << " " << symbols[sym]; str << "; "; } for (auto & i : sorted) { @@ -130,7 +133,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const { - if (recursive) str << "rec "; + if (recursive) + str << "rec "; str << "{ "; showBindings(symbols, str); str << "}"; @@ -157,7 +161,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const // same expression being printed in two different ways depending on its // context. always use lexicographic ordering to avoid this. for (auto & i : formals->lexicographicOrder(symbols)) { - if (first) first = false; else str << ", "; + if (first) + first = false; + else + str << ", "; str << symbols[i.name]; if (i.def) { str << " ? "; @@ -165,13 +172,16 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const } } if (formals->ellipsis) { - if (!first) str << ", "; + if (!first) + str << ", "; str << "..."; } str << " }"; - if (arg) str << " @ "; + if (arg) + str << " @ "; } - if (arg) str << symbols[arg]; + if (arg) + str << symbols[arg]; str << ": "; body->show(symbols, str); str << ")"; @@ -182,7 +192,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const str << '('; fun->show(symbols, str); for (auto e : args) { - str << ' '; + str << ' '; e->show(symbols, str); } str << ')'; @@ -237,7 +247,10 @@ void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) co bool first = true; str << "("; for (auto & i : *es) { - if (first) first = false; else str << " + "; + if (first) + first = false; + else + str << " + "; i.second->show(symbols, str); } str << ")"; @@ -248,13 +261,15 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const str << "__curPos"; } - std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) { std::ostringstream out; bool first = true; for (auto & i : attrPath) { - if (!first) out << '.'; else first = false; + if (!first) + out << '.'; + else + first = false; if (i.symbol) out << symbols[i.symbol]; else { @@ -266,7 +281,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) return out.str(); } - /* Computing levels/displacements for variables. */ void Expr::bindVars(EvalState & es, const std::shared_ptr & env) @@ -312,7 +326,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & int withLevel = -1; for (curEnv = env.get(), level = 0; curEnv; curEnv = curEnv->up.get(), level++) { if (curEnv->isWith) { - if (withLevel == -1) withLevel = level; + if (withLevel == -1) + withLevel = level; } else { auto i = curEnv->find(name); if (i != curEnv->vars.end()) { @@ -327,10 +342,7 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & enclosing `with'. If there is no `with', then we can issue an "undefined variable" error now. */ if (withLevel == -1) - es.error( - "undefined variable '%1%'", - es.symbols[name] - ).atPos(pos).debugThrow(); + es.error("undefined variable '%1%'", es.symbols[name]).atPos(pos).debugThrow(); for (auto * e = env.get(); e && !fromWith; e = e->up.get()) fromWith = e->isWith; this->level = withLevel; @@ -348,7 +360,8 @@ void ExprSelect::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); e->bindVars(es, env); - if (def) def->bindVars(es, env); + if (def) + def->bindVars(es, env); for (auto & i : attrPath) if (!i.symbol) i.expr->bindVars(es, env); @@ -365,8 +378,8 @@ void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptrbindVars(es, env); } -std::shared_ptr ExprAttrs::bindInheritSources( - EvalState & es, const std::shared_ptr & env) +std::shared_ptr +ExprAttrs::bindInheritSources(EvalState & es, const std::shared_ptr & env) { if (!inheritFromExprs) return nullptr; @@ -392,7 +405,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = [&]() -> std::shared_ptr { auto newEnv = std::make_shared(nullptr, env, attrs.size()); Displacement displ = 0; @@ -411,8 +424,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr i.nameExpr->bindVars(es, newEnv); i.valueExpr->bindVars(es, newEnv); } - } - else { + } else { auto inheritFromEnv = bindInheritSources(es, env); for (auto & i : attrs) @@ -439,14 +451,13 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared( - nullptr, env, - (hasFormals() ? formals->formals.size() : 0) + - (!arg ? 0 : 1)); + auto newEnv = + std::make_shared(nullptr, env, (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1)); Displacement displ = 0; - if (arg) newEnv->vars.emplace_back(arg, displ++); + if (arg) + newEnv->vars.emplace_back(arg, displ++); if (hasFormals()) { for (auto & i : formals->formals) @@ -455,7 +466,8 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr newEnv->sort(); for (auto & i : formals->formals) - if (i.def) i.def->bindVars(es, newEnv); + if (i.def) + i.def->bindVars(es, newEnv); } body->bindVars(es, newEnv); @@ -473,7 +485,7 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = [&]() -> std::shared_ptr { auto newEnv = std::make_shared(nullptr, env, attrs->attrs.size()); Displacement displ = 0; @@ -562,13 +574,9 @@ void ExprPos::bindVars(EvalState & es, const std::shared_ptr & es.exprEnvs.insert(std::make_pair(this, env)); } - /* Storing function names. */ -void Expr::setName(Symbol name) -{ -} - +void Expr::setName(Symbol name) {} void ExprLambda::setName(Symbol name) { @@ -576,16 +584,14 @@ void ExprLambda::setName(Symbol name) body->setName(name); } - std::string ExprLambda::showNamePos(const EvalState & state) const { - std::string id(name - ? concatStrings("'", state.symbols[name], "'") - : "anonymous function"); + std::string id(name ? concatStrings("'", state.symbols[name], "'") : "anonymous function"); return fmt("%1% at %2%", id, state.positions[pos]); } -void ExprLambda::setDocComment(DocComment docComment) { +void ExprLambda::setDocComment(DocComment docComment) +{ // RFC 145 specifies that the innermost doc comment wins. // See https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md#ambiguous-placement if (!this->docComment) { @@ -606,11 +612,12 @@ void ExprLambda::setDocComment(DocComment docComment) { size_t SymbolTable::totalSize() const { size_t n = 0; - dump([&] (SymbolStr s) { n += s.size(); }); + dump([&](SymbolStr s) { n += s.size(); }); return n; } -std::string DocComment::getInnerText(const PosTable & positions) const { +std::string DocComment::getInnerText(const PosTable & positions) const +{ auto beginPos = positions[begin]; auto endPos = positions[end]; auto docCommentStr = beginPos.getSnippetUpTo(endPos).value_or(""); @@ -628,8 +635,6 @@ std::string DocComment::getInnerText(const PosTable & positions) const { return docStr; } - - /* ‘Cursed or’ handling. * * In parser.y, every use of expr_select in a production must call one of the @@ -647,13 +652,16 @@ void ExprCall::warnIfCursedOr(const SymbolTable & symbols, const PosTable & posi { if (cursedOrEndPos.has_value()) { std::ostringstream out; - out << "at " << positions[pos] << ": " + out << "at " << positions[pos] + << ": " "This expression uses `or` as an identifier in a way that will change in a future Nix release.\n" "Wrap this entire expression in parentheses to preserve its current meaning:\n" - " (" << positions[pos].getSnippetUpTo(positions[*cursedOrEndPos]).value_or("could not read expression") << ")\n" + " (" + << positions[pos].getSnippetUpTo(positions[*cursedOrEndPos]).value_or("could not read expression") + << ")\n" "Give feedback at https://github.com/NixOS/nix/pull/11121"; warn(out.str()); } } -} +} // namespace nix diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index c5107de3a..f90bc37df 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -18,4 +18,4 @@ SourcePath EvalState::storePath(const StorePath & path) return {rootFS, CanonPath{store->printStorePath(path)}}; } -} +} // namespace nix diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 99ca19d7e..a645f546d 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -29,14 +29,13 @@ #include #ifndef _WIN32 -# include +# include #endif #include namespace nix { - /************************************************************* * Miscellaneous *************************************************************/ @@ -67,48 +66,50 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS if (!store->isValidPath(p)) error(store->printStorePath(p)).debugThrow(); }; - std::visit(overloaded { - [&](const NixStringContextElem::Built & b) { - drvs.push_back(DerivedPath::Built { - .drvPath = b.drvPath, - .outputs = OutputsSpec::Names { b.output }, - }); - ensureValid(b.drvPath->getBaseStorePath()); + std::visit( + overloaded{ + [&](const NixStringContextElem::Built & b) { + drvs.push_back( + DerivedPath::Built{ + .drvPath = b.drvPath, + .outputs = OutputsSpec::Names{b.output}, + }); + ensureValid(b.drvPath->getBaseStorePath()); + }, + [&](const NixStringContextElem::Opaque & o) { + ensureValid(o.path); + if (maybePathsOut) + maybePathsOut->emplace(o.path); + }, + [&](const NixStringContextElem::DrvDeep & d) { + /* Treat same as Opaque */ + ensureValid(d.drvPath); + if (maybePathsOut) + maybePathsOut->emplace(d.drvPath); + }, }, - [&](const NixStringContextElem::Opaque & o) { - ensureValid(o.path); - if (maybePathsOut) - maybePathsOut->emplace(o.path); - }, - [&](const NixStringContextElem::DrvDeep & d) { - /* Treat same as Opaque */ - ensureValid(d.drvPath); - if (maybePathsOut) - maybePathsOut->emplace(d.drvPath); - }, - }, c.raw); + c.raw); } - if (drvs.empty()) return {}; + if (drvs.empty()) + return {}; if (isIFD) { if (!settings.enableImportFromDerivation) error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - drvs.begin()->to_string(*store) - ).debugThrow(); + drvs.begin()->to_string(*store)) + .debugThrow(); if (settings.traceImportFromDerivation) - warn( - "built '%1%' during evaluation due to an import from derivation", - drvs.begin()->to_string(*store) - ); + warn("built '%1%' during evaluation due to an import from derivation", drvs.begin()->to_string(*store)); } /* Build/substitute the context. */ std::vector buildReqs; buildReqs.reserve(drvs.size()); - for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); + for (auto & d : drvs) + buildReqs.emplace_back(DerivedPath{d}); buildStore->buildPaths(buildReqs, bmNormal, store); StorePathSet outputsToCopyAndAllow; @@ -124,17 +125,18 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { res.insert_or_assign( DownstreamPlaceholder::fromSingleDerivedPathBuilt( - SingleDerivedPath::Built { + SingleDerivedPath::Built{ .drvPath = drv.drvPath, .output = outputName, - }).render(), - buildStore->printStorePath(outputPath) - ); + }) + .render(), + buildStore->printStorePath(outputPath)); } } } - if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow); + if (store != buildStore) + copyClosure(*buildStore, *store, outputsToCopyAndAllow); if (isIFD) { /* Allow access to the output closures of this derivation. */ @@ -145,7 +147,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS return res; } -static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, std::optional resolveSymlinks = SymlinkResolution::Full) +static SourcePath realisePath( + EvalState & state, + const PosIdx pos, + Value & v, + std::optional resolveSymlinks = SymlinkResolution::Full) { NixStringContext context; @@ -185,7 +191,7 @@ static void mkOutputString( { state.mkOutputString( attrs.alloc(o.first), - SingleDerivedPath::Built { + SingleDerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath), .output = o.first, }, @@ -201,13 +207,18 @@ static void mkOutputString( * @param storePath The path to the `.drv` to import. * @param v Return value */ -void derivationToValue(EvalState & state, const PosIdx pos, const SourcePath & path, const StorePath & storePath, Value & v) { +void derivationToValue( + EvalState & state, const PosIdx pos, const SourcePath & path, const StorePath & storePath, Value & v) +{ auto path2 = path.path.abs(); Derivation drv = state.store->readDerivation(storePath); auto attrs = state.buildBindings(3 + drv.outputs.size()); - attrs.alloc(state.sDrvPath).mkString(path2, { - NixStringContextElem::DrvDeep { .drvPath = storePath }, - }); + attrs.alloc(state.sDrvPath) + .mkString( + path2, + { + NixStringContextElem::DrvDeep{.drvPath = storePath}, + }); attrs.alloc(state.sName).mkString(drv.env["name"]); auto list = state.buildList(drv.outputs.size()); @@ -222,12 +233,15 @@ void derivationToValue(EvalState & state, const PosIdx pos, const SourcePath & p if (!state.vImportedDrvToDerivation) { state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); - state.eval(state.parseExprFromString( - #include "imported-drv-to-derivation.nix.gen.hh" - , state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation); + state.eval( + state.parseExprFromString( +#include "imported-drv-to-derivation.nix.gen.hh" + , state.rootPath(CanonPath::root)), + **state.vImportedDrvToDerivation); } - state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); + state.forceFunction( + **state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); v.mkApp(*state.vImportedDrvToDerivation, w); state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh"); } @@ -241,7 +255,8 @@ void derivationToValue(EvalState & state, const PosIdx pos, const SourcePath & p * @param vScope The base scope to use for the import. * @param v Return value */ -static void scopedImport(EvalState & state, const PosIdx pos, SourcePath & path, Value * vScope, Value & v) { +static void scopedImport(EvalState & state, const PosIdx pos, SourcePath & path, Value * vScope, Value & v) +{ state.forceAttrs(*vScope, pos, "while evaluating the first argument passed to builtins.scopedImport"); Env * env = &state.allocEnv(vScope->attrs()->size()); @@ -283,29 +298,24 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v if (auto storePath = isValidDerivationInStore()) { derivationToValue(state, pos, path, *storePath, v); - } - else if (vScope) { + } else if (vScope) { scopedImport(state, pos, path, vScope, v); - } - else { + } else { state.evalFile(path, v); } } -static RegisterPrimOp primop_scopedImport(PrimOp { - .name = "scopedImport", - .arity = 2, - .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) - { - import(state, pos, *args[1], args[0], v); - } -}); +static RegisterPrimOp primop_scopedImport( + PrimOp{ + .name = "scopedImport", .arity = 2, .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + import(state, pos, *args[1], args[0], v); + }}); -static RegisterPrimOp primop_import({ - .name = "import", - .args = {"path"}, - // TODO turn "normal path values" into link below - .doc = R"( +static RegisterPrimOp primop_import( + {.name = "import", + .args = {"path"}, + // TODO turn "normal path values" into link below + .doc = R"( Load, parse, and return the Nix expression in the file *path*. > **Note** @@ -372,11 +382,9 @@ static RegisterPrimOp primop_import({ > > The function argument doesn’t have to be called `x` in `foo.nix`; any name would work. )", - .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) - { - import(state, pos, *args[0], nullptr, v); - } -}); + .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + import(state, pos, *args[0], nullptr, v); + }}); #ifndef _WIN32 // TODO implement via DLL loading on Windows @@ -385,24 +393,28 @@ static RegisterPrimOp primop_import({ extern "C" typedef void (*ValueInitializer)(EvalState & state, Value & v); /* Load a ValueInitializer from a DSO and return whatever it initializes */ -void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_importNative(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto path = realisePath(state, pos, *args[0]); - std::string sym(state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.importNative")); + std::string sym( + state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.importNative")); - void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); + void * handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) state.error("could not open '%1%': %2%", path, dlerror()).debugThrow(); dlerror(); ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str()); - if(!func) { - char *message = dlerror(); + if (!func) { + char * message = dlerror(); if (message) state.error("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow(); else - state.error("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow(); + state + .error( + "symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path) + .debugThrow(); } (func)(state, v); @@ -410,9 +422,8 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu /* We don't dlclose because v may be a primop referencing a function in the shared object file */ } - /* Execute a program and parse its output */ -void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_exec(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.exec"); auto elems = args[0]->listView(); @@ -420,20 +431,33 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) if (count == 0) state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); NixStringContext context; - auto program = state.coerceToString(pos, *elems[0], context, - "while evaluating the first element of the argument passed to builtins.exec", - false, false).toOwned(); + auto program = state + .coerceToString( + pos, + *elems[0], + context, + "while evaluating the first element of the argument passed to builtins.exec", + false, + false) + .toOwned(); Strings commandArgs; for (size_t i = 1; i < count; ++i) { - commandArgs.push_back( - state.coerceToString(pos, *elems[i], context, - "while evaluating an element of the argument passed to builtins.exec", - false, false).toOwned()); + commandArgs.push_back(state + .coerceToString( + pos, + *elems[i], + context, + "while evaluating an element of the argument passed to builtins.exec", + false, + false) + .toOwned()); } try { auto _ = state.realiseContext(context); // FIXME: Handle CA derivations } catch (InvalidPathError & e) { - state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); + state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path) + .atPos(pos) + .debugThrow(); } auto output = runProgram(program, true, commandArgs); @@ -455,24 +479,43 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) #endif /* Return a string representing the type of the expression. */ -static void prim_typeOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_typeOf(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); std::string t; switch (args[0]->type()) { - case nInt: t = "int"; break; - case nBool: t = "bool"; break; - case nString: t = "string"; break; - case nPath: t = "path"; break; - case nNull: t = "null"; break; - case nAttrs: t = "set"; break; - case nList: t = "list"; break; - case nFunction: t = "lambda"; break; - case nExternal: - t = args[0]->external()->typeOf(); - break; - case nFloat: t = "float"; break; - case nThunk: unreachable(); + case nInt: + t = "int"; + break; + case nBool: + t = "bool"; + break; + case nString: + t = "string"; + break; + case nPath: + t = "path"; + break; + case nNull: + t = "null"; + break; + case nAttrs: + t = "set"; + break; + case nList: + t = "list"; + break; + case nFunction: + t = "lambda"; + break; + case nExternal: + t = args[0]->external()->typeOf(); + break; + case nFloat: + t = "float"; + break; + case nThunk: + unreachable(); } v.mkString(t); } @@ -489,7 +532,7 @@ static RegisterPrimOp primop_typeOf({ }); /* Determine whether the argument is the null value. */ -static void prim_isNull(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isNull(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nNull); @@ -507,7 +550,7 @@ static RegisterPrimOp primop_isNull({ }); /* Determine whether the argument is a function. */ -static void prim_isFunction(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isFunction(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nFunction); @@ -523,7 +566,7 @@ static RegisterPrimOp primop_isFunction({ }); /* Determine whether the argument is an integer. */ -static void prim_isInt(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isInt(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nInt); @@ -539,7 +582,7 @@ static RegisterPrimOp primop_isInt({ }); /* Determine whether the argument is a float. */ -static void prim_isFloat(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isFloat(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nFloat); @@ -555,7 +598,7 @@ static RegisterPrimOp primop_isFloat({ }); /* Determine whether the argument is a string. */ -static void prim_isString(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isString(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nString); @@ -571,7 +614,7 @@ static RegisterPrimOp primop_isString({ }); /* Determine whether the argument is a Boolean. */ -static void prim_isBool(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isBool(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nBool); @@ -587,7 +630,7 @@ static RegisterPrimOp primop_isBool({ }); /* Determine whether the argument is a path. */ -static void prim_isPath(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isPath(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nPath); @@ -603,14 +646,11 @@ static RegisterPrimOp primop_isPath({ }); template - static inline void withExceptionContext(Trace trace, Callable&& func) +static inline void withExceptionContext(Trace trace, Callable && func) { - try - { + try { func(); - } - catch(Error & e) - { + } catch (Error & e) { e.pushTrace(trace); throw; } @@ -622,14 +662,17 @@ struct CompareValues const PosIdx pos; const std::string_view errorCtx; - CompareValues(EvalState & state, const PosIdx pos, const std::string_view && errorCtx) : state(state), pos(pos), errorCtx(errorCtx) { }; + CompareValues(EvalState & state, const PosIdx pos, const std::string_view && errorCtx) + : state(state) + , pos(pos) + , errorCtx(errorCtx) {}; - bool operator () (Value * v1, Value * v2) const + bool operator()(Value * v1, Value * v2) const { return (*this)(v1, v2, errorCtx); } - bool operator () (Value * v1, Value * v2, std::string_view errorCtx) const + bool operator()(Value * v1, Value * v2, std::string_view errorCtx) const { try { if (v1->type() == nFloat && v2->type() == nInt) @@ -638,35 +681,38 @@ struct CompareValues return v1->integer().value < v2->fpoint(); if (v1->type() != v2->type()) state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); - // Allow selecting a subset of enum values - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wswitch-enum" +// Allow selecting a subset of enum values +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" switch (v1->type()) { - case nInt: - return v1->integer() < v2->integer(); - case nFloat: - return v1->fpoint() < v2->fpoint(); - case nString: - return strcmp(v1->c_str(), v2->c_str()) < 0; - case nPath: - // Note: we don't take the accessor into account - // since it's not obvious how to compare them in a - // reproducible way. - return strcmp(v1->pathStr(), v2->pathStr()) < 0; - case nList: - // Lexicographic comparison - for (size_t i = 0;; i++) { - if (i == v2->listSize()) { - return false; - } else if (i == v1->listSize()) { - return true; - } else if (!state.eqValues(*v1->listView()[i], *v2->listView()[i], pos, errorCtx)) { - return (*this)(v1->listView()[i], v2->listView()[i], "while comparing two list elements"); - } + case nInt: + return v1->integer() < v2->integer(); + case nFloat: + return v1->fpoint() < v2->fpoint(); + case nString: + return strcmp(v1->c_str(), v2->c_str()) < 0; + case nPath: + // Note: we don't take the accessor into account + // since it's not obvious how to compare them in a + // reproducible way. + return strcmp(v1->pathStr(), v2->pathStr()) < 0; + case nList: + // Lexicographic comparison + for (size_t i = 0;; i++) { + if (i == v2->listSize()) { + return false; + } else if (i == v1->listSize()) { + return true; + } else if (!state.eqValues(*v1->listView()[i], *v2->listView()[i], pos, errorCtx)) { + return (*this)(v1->listView()[i], v2->listView()[i], "while comparing two list elements"); } - default: - state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); - #pragma GCC diagnostic pop + } + default: + state + .error( + "cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)) + .debugThrow(); +#pragma GCC diagnostic pop } } catch (Error & e) { if (!errorCtx.empty()) @@ -676,17 +722,20 @@ struct CompareValues } }; - typedef std::list> ValueList; -static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], noPos, "while evaluating the first argument passed to builtins.genericClosure"); /* Get the start set. */ - auto startSet = state.getAttr(state.sStartSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); + auto startSet = state.getAttr( + state.sStartSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); - state.forceList(*startSet->value, noPos, "while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure"); + state.forceList( + *startSet->value, + noPos, + "while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure"); ValueList workSet; for (auto elem : startSet->value->listView()) @@ -698,8 +747,10 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a } /* Get the operator. */ - auto op = state.getAttr(state.sOperator, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); - state.forceFunction(*op->value, noPos, "while evaluating the 'operator' attribute passed as argument to builtins.genericClosure"); + auto op = state.getAttr( + state.sOperator, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); + state.forceFunction( + *op->value, noPos, "while evaluating the 'operator' attribute passed as argument to builtins.genericClosure"); /* Construct the closure by applying the operator to elements of `workSet', adding the result to `workSet', continuing until @@ -713,22 +764,33 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a Value * e = *(workSet.begin()); workSet.pop_front(); - state.forceAttrs(*e, noPos, "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure"); + state.forceAttrs( + *e, + noPos, + "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure"); - auto key = state.getAttr(state.sKey, e->attrs(), "in one of the attrsets generated by (or initially passed to) builtins.genericClosure"); + auto key = state.getAttr( + state.sKey, + e->attrs(), + "in one of the attrsets generated by (or initially passed to) builtins.genericClosure"); state.forceValue(*key->value, noPos); - if (!doneKeys.insert(key->value).second) continue; + if (!doneKeys.insert(key->value).second) + continue; res.push_back(e); /* Call the `operator' function with `e' as argument. */ Value newElements; state.callFunction(*op->value, {&e, 1}, newElements, noPos); - state.forceList(newElements, noPos, "while evaluating the return value of the `operator` passed to builtins.genericClosure"); + state.forceList( + newElements, + noPos, + "while evaluating the return value of the `operator` passed to builtins.genericClosure"); /* Add the values returned by the operator to the work set. */ for (auto elem : newElements.listView()) { - state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` passed to builtins.genericClosure"); + state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` + // passed to builtins.genericClosure"); workSet.push_back(elem); } } @@ -740,11 +802,12 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a v.mkList(list); } -static RegisterPrimOp primop_genericClosure(PrimOp { - .name = "__genericClosure", - .args = {"attrset"}, - .arity = 1, - .doc = R"( +static RegisterPrimOp primop_genericClosure( + PrimOp{ + .name = "__genericClosure", + .args = {"attrset"}, + .arity = 1, + .doc = R"( `builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function. It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attribute sets: @@ -794,95 +857,100 @@ static RegisterPrimOp primop_genericClosure(PrimOp { > [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ] > ``` )", - .fun = prim_genericClosure, -}); + .fun = prim_genericClosure, + }); - -static RegisterPrimOp primop_break({ - .name = "break", - .args = {"v"}, - .doc = R"( +static RegisterPrimOp primop_break( + {.name = "break", + .args = {"v"}, + .doc = R"( In debug mode (enabled using `--debugger`), pause Nix expression evaluation and enter the REPL. Otherwise, return the argument `v`. )", - .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) - { - if (state.canDebug()) { - auto error = Error(ErrorInfo { - .level = lvlInfo, - .msg = HintFmt("breakpoint reached"), - .pos = state.positions[pos], - }); + .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + if (state.canDebug()) { + auto error = Error( + ErrorInfo{ + .level = lvlInfo, + .msg = HintFmt("breakpoint reached"), + .pos = state.positions[pos], + }); - state.runDebugRepl(&error); - } + state.runDebugRepl(&error); + } - // Return the value we were passed. - v = *args[0]; - } -}); + // Return the value we were passed. + v = *args[0]; + }}); -static RegisterPrimOp primop_abort({ - .name = "abort", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_abort( + {.name = "abort", + .args = {"s"}, + .doc = R"( Abort Nix expression evaluation and print the error message *s*. )", - .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) - { - NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, - "while evaluating the error message passed to builtins.abort").toOwned(); - state.error("evaluation aborted with the following error message: '%1%'", s).setIsFromExpr().debugThrow(); - } -}); + .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + NixStringContext context; + auto s = + state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.abort") + .toOwned(); + state.error("evaluation aborted with the following error message: '%1%'", s) + .setIsFromExpr() + .debugThrow(); + }}); -static RegisterPrimOp primop_throw({ - .name = "throw", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_throw( + {.name = "throw", + .args = {"s"}, + .doc = R"( Throw an error message *s*. This usually aborts Nix expression evaluation, but in `nix-env -qa` and other commands that try to evaluate a set of derivations to get information about those derivations, a derivation that throws an error is silently skipped (which is not the case for `abort`). )", - .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) - { - NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, - "while evaluating the error message passed to builtin.throw").toOwned(); - state.error(s).setIsFromExpr().debugThrow(); - } -}); + .fun = [](EvalState & state, const PosIdx pos, Value ** args, Value & v) { + NixStringContext context; + auto s = + state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtin.throw") + .toOwned(); + state.error(s).setIsFromExpr().debugThrow(); + }}); -static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value ** args, Value & v) { try { state.forceValue(*args[1], pos); v = *args[1]; } catch (Error & e) { NixStringContext context; - auto message = state.coerceToString(pos, *args[0], context, - "while evaluating the error message passed to builtins.addErrorContext", - false, false).toOwned(); + auto message = state + .coerceToString( + pos, + *args[0], + context, + "while evaluating the error message passed to builtins.addErrorContext", + false, + false) + .toOwned(); e.addTrace(nullptr, HintFmt(message), TracePrint::Always); throw; } } -static RegisterPrimOp primop_addErrorContext(PrimOp { - .name = "__addErrorContext", - .arity = 2, - // The normal trace item is redundant - .addTrace = false, - .fun = prim_addErrorContext, -}); +static RegisterPrimOp primop_addErrorContext( + PrimOp{ + .name = "__addErrorContext", + .arity = 2, + // The normal trace item is redundant + .addTrace = false, + .fun = prim_addErrorContext, + }); -static void prim_ceil(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_ceil(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto value = state.forceFloat(*args[0], args[0]->determinePos(pos), - "while evaluating the first argument passed to builtins.ceil"); + auto value = state.forceFloat( + *args[0], args[0]->determinePos(pos), "while evaluating the first argument passed to builtins.ceil"); auto ceilValue = ceil(value); bool isInt = args[0]->type() == nInt; constexpr NixFloat int_min = std::numeric_limits::min(); // power of 2, so that no rounding occurs @@ -890,16 +958,29 @@ static void prim_ceil(EvalState & state, const PosIdx pos, Value * * args, Value v.mkInt(ceilValue); } else if (isInt) { // a NixInt, e.g. INT64_MAX, can be rounded to -int_min due to the cast to NixFloat - state.error("Due to a bug (see https://github.com/NixOS/nix/issues/12899) the NixInt argument %1% caused undefined behavior in previous Nix versions.\n\tFuture Nix versions might implement the correct behavior.", args[0]->integer().value).atPos(pos).debugThrow(); + state + .error( + "Due to a bug (see https://github.com/NixOS/nix/issues/12899) the NixInt argument %1% caused undefined behavior in previous Nix versions.\n\tFuture Nix versions might implement the correct behavior.", + args[0]->integer().value) + .atPos(pos) + .debugThrow(); } else { - state.error("NixFloat argument %1% is not in the range of NixInt", args[0]->fpoint()).atPos(pos).debugThrow(); + state.error("NixFloat argument %1% is not in the range of NixInt", args[0]->fpoint()) + .atPos(pos) + .debugThrow(); } // `forceFloat` casts NixInt to NixFloat, but instead NixInt args shall be returned unmodified if (isInt) { auto arg = args[0]->integer(); auto res = v.integer(); if (arg != res) { - state.error("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow(); + state + .error( + "Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", + arg, + res) + .atPos(pos) + .debugThrow(); } } } @@ -921,9 +1002,10 @@ static RegisterPrimOp primop_ceil({ .fun = prim_ceil, }); -static void prim_floor(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_floor(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto value = state.forceFloat(*args[0], args[0]->determinePos(pos), "while evaluating the first argument passed to builtins.floor"); + auto value = state.forceFloat( + *args[0], args[0]->determinePos(pos), "while evaluating the first argument passed to builtins.floor"); auto floorValue = floor(value); bool isInt = args[0]->type() == nInt; constexpr NixFloat int_min = std::numeric_limits::min(); // power of 2, so that no rounding occurs @@ -931,16 +1013,29 @@ static void prim_floor(EvalState & state, const PosIdx pos, Value * * args, Valu v.mkInt(floorValue); } else if (isInt) { // a NixInt, e.g. INT64_MAX, can be rounded to -int_min due to the cast to NixFloat - state.error("Due to a bug (see https://github.com/NixOS/nix/issues/12899) the NixInt argument %1% caused undefined behavior in previous Nix versions.\n\tFuture Nix versions might implement the correct behavior.", args[0]->integer().value).atPos(pos).debugThrow(); + state + .error( + "Due to a bug (see https://github.com/NixOS/nix/issues/12899) the NixInt argument %1% caused undefined behavior in previous Nix versions.\n\tFuture Nix versions might implement the correct behavior.", + args[0]->integer().value) + .atPos(pos) + .debugThrow(); } else { - state.error("NixFloat argument %1% is not in the range of NixInt", args[0]->fpoint()).atPos(pos).debugThrow(); + state.error("NixFloat argument %1% is not in the range of NixInt", args[0]->fpoint()) + .atPos(pos) + .debugThrow(); } // `forceFloat` casts NixInt to NixFloat, but instead NixInt args shall be returned unmodified if (isInt) { auto arg = args[0]->integer(); auto res = v.integer(); if (arg != res) { - state.error("Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", arg, res).atPos(pos).debugThrow(); + state + .error( + "Due to a bug (see https://github.com/NixOS/nix/issues/12899) a loss of precision occurred in previous Nix versions because the NixInt argument %1% was rounded to %2%.\n\tFuture Nix versions might implement the correct behavior.", + arg, + res) + .atPos(pos) + .debugThrow(); } } } @@ -964,16 +1059,15 @@ static RegisterPrimOp primop_floor({ /* Try evaluating the argument. Success => {success=true; value=something;}, * else => {success=false; value=false;} */ -static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_tryEval(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto attrs = state.buildBindings(2); /* increment state.trylevel, and decrement it when this function returns. */ MaintainCount trylevel(state.trylevel); - ReplExitStatus (* savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr; - if (state.debugRepl && state.settings.ignoreExceptionsDuringTry) - { + ReplExitStatus (*savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr; + if (state.debugRepl && state.settings.ignoreExceptionsDuringTry) { /* to prevent starting the repl from exceptions within a tryEval, null it. */ savedDebugRepl = state.debugRepl; state.debugRepl = nullptr; @@ -1021,9 +1115,10 @@ static RegisterPrimOp primop_tryEval({ }); /* Return an environment variable. Use with care. */ -static void prim_getEnv(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_getEnv(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getEnv")); + std::string name( + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getEnv")); v.mkString(state.settings.restrictEval || state.settings.pureEval ? "" : getEnv(name).value_or("")); } @@ -1045,7 +1140,7 @@ static RegisterPrimOp primop_getEnv({ }); /* Evaluate the first argument, then return the second argument. */ -static void prim_seq(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_seq(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -1064,7 +1159,7 @@ static RegisterPrimOp primop_seq({ /* Evaluate the first argument deeply (i.e. recursing into lists and attrsets), then return the second argument. */ -static void prim_deepSeq(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_deepSeq(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValueDeep(*args[0]); state.forceValue(*args[1], pos); @@ -1084,7 +1179,7 @@ static RegisterPrimOp primop_deepSeq({ /* Evaluate the first expression and print it on standard error. Then return the second expression. Useful for debugging. */ -static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_trace(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); if (args[0]->type() == nString) @@ -1115,11 +1210,12 @@ static RegisterPrimOp primop_trace({ .fun = prim_trace, }); -static void prim_warn(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_warn(EvalState & state, const PosIdx pos, Value ** args, Value & v) { // We only accept a string argument for now. The use case for pretty printing a value is covered by `trace`. // By rejecting non-strings we allow future versions to add more features without breaking existing code. - auto msgStr = state.forceString(*args[0], pos, "while evaluating the first argument; the message passed to builtins.warn"); + auto msgStr = + state.forceString(*args[0], pos, "while evaluating the first argument; the message passed to builtins.warn"); { BaseError msg(std::string{msgStr}); @@ -1132,7 +1228,9 @@ static void prim_warn(EvalState & state, const PosIdx pos, Value * * args, Value if (state.settings.builtinsAbortOnWarn) { // Not an EvalError or subclass, which would cause the error to be stored in the eval cache. - state.error("aborting to reveal stack trace of warning, as abort-on-warn is set").setIsFromExpr().debugThrow(); + state.error("aborting to reveal stack trace of warning, as abort-on-warn is set") + .setIsFromExpr() + .debugThrow(); } if (state.settings.builtinsTraceDebugger || state.settings.builtinsDebuggerOnWarn) { state.runDebugRepl(nullptr); @@ -1164,11 +1262,10 @@ static RegisterPrimOp primop_warn({ .fun = prim_warn, }); - /* Takes two arguments and evaluates to the second one. Used as the * builtins.traceVerbose implementation when --trace-verbose is not enabled */ -static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_second(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[1], pos); v = *args[1]; @@ -1178,11 +1275,7 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val * Derivations *************************************************************/ -static void derivationStrictInternal( - EvalState & state, - std::string_view name, - const Bindings * attrs, - Value & v); +static void derivationStrictInternal(EvalState & state, std::string_view name, const Bindings * attrs, Value & v); /* Construct (as a unobservable side effect) a Nix derivation expression that performs the derivation described by the argument @@ -1191,7 +1284,7 @@ static void derivationStrictInternal( derivation; `drvPath' containing the path of the Nix expression; and `type' set to `derivation' to indicate that this is a derivation. */ -static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict"); @@ -1202,7 +1295,8 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * std::string_view drvName; try { - drvName = state.forceStringNoCtx(*nameAttr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict"); + drvName = state.forceStringNoCtx( + *nameAttr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict"); } catch (Error & e) { e.addTrace(state.positions[nameAttr->pos], "while evaluating the derivation attribute 'name'"); throw; @@ -1229,10 +1323,13 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * * often results from the composition of several functions * (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.) */ - e.addTrace(nullptr, HintFmt( + e.addTrace( + nullptr, + HintFmt( "while evaluating derivation '%s'\n" " whose name attribute is located at %s", - drvName, pos)); + drvName, + pos)); throw; } } @@ -1253,15 +1350,14 @@ static void checkDerivationName(EvalState & state, std::string_view drvName) // is optional. // Note that Nixpkgs generally won't trigger this, because `mkDerivation` // sanitizes the name. - state.error("invalid derivation name: %s. Please pass a different '%s'.", Uncolored(e.message()), "name").debugThrow(); + state + .error( + "invalid derivation name: %s. Please pass a different '%s'.", Uncolored(e.message()), "name") + .debugThrow(); } } -static void derivationStrictInternal( - EvalState & state, - std::string_view drvName, - const Bindings * attrs, - Value & v) +static void derivationStrictInternal(EvalState & state, std::string_view drvName, const Bindings * attrs, Value & v) { checkDerivationName(state, drvName); @@ -1270,17 +1366,23 @@ static void derivationStrictInternal( std::optional jsonObject; auto pos = v.determinePos(noPos); auto attr = attrs->find(state.sStructuredAttrs); - if (attr != attrs->end() && - state.forceBool(*attr->value, pos, - "while evaluating the `__structuredAttrs` " - "attribute passed to builtins.derivationStrict")) + if (attr != attrs->end() + && state.forceBool( + *attr->value, + pos, + "while evaluating the `__structuredAttrs` " + "attribute passed to builtins.derivationStrict")) jsonObject = json::object(); /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; attr = attrs->find(state.sIgnoreNulls); if (attr != attrs->end()) - ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); + ignoreNulls = state.forceBool( + *attr->value, + pos, + "while evaluating the `__ignoreNulls` attribute " + "passed to builtins.derivationStrict"); /* Build the derivation expression by processing the attributes. */ Derivation drv; @@ -1298,7 +1400,8 @@ static void derivationStrictInternal( outputs.insert("out"); for (auto & i : attrs->lexicographicOrder(state.symbols)) { - if (i->name == state.sIgnoreNulls) continue; + if (i->name == state.sIgnoreNulls) + continue; auto key = state.symbols[i->name]; vomit("processing attribute '%1%'", key); @@ -1306,13 +1409,14 @@ static void derivationStrictInternal( if (s == "recursive") { // back compat, new name is "nar" ingestionMethod = ContentAddressMethod::Raw::NixArchive; - } else try { - ingestionMethod = ContentAddressMethod::parse(s); - } catch (UsageError &) { - state.error( - "invalid value '%s' for 'outputHashMode' attribute", s - ).atPos(v).debugThrow(); - } + } else + try { + ingestionMethod = ContentAddressMethod::parse(s); + } catch (UsageError &) { + state.error("invalid value '%s' for 'outputHashMode' attribute", s) + .atPos(v) + .debugThrow(); + } if (ingestionMethod == ContentAddressMethod::Raw::Text) experimentalFeatureSettings.require(Xp::DynamicDerivations); if (ingestionMethod == ContentAddressMethod::Raw::Git) @@ -1323,24 +1427,18 @@ static void derivationStrictInternal( outputs.clear(); for (auto & j : ss) { if (outputs.find(j) != outputs.end()) - state.error("duplicate derivation output '%1%'", j) - .atPos(v) - .debugThrow(); + state.error("duplicate derivation output '%1%'", j).atPos(v).debugThrow(); /* !!! Check whether j is a valid attribute name. */ /* Derivations cannot be named ‘drvPath’, because we already have an attribute ‘drvPath’ in the resulting set (see state.sDrvPath). */ if (j == "drvPath") - state.error("invalid derivation output name 'drvPath'") - .atPos(v) - .debugThrow(); + state.error("invalid derivation output name 'drvPath'").atPos(v).debugThrow(); outputs.insert(j); } if (outputs.empty()) - state.error("derivation cannot have an empty set of outputs") - .atPos(v) - .debugThrow(); + state.error("derivation cannot have an empty set of outputs").atPos(v).debugThrow(); }; try { @@ -1350,7 +1448,8 @@ static void derivationStrictInternal( if (ignoreNulls) { state.forceValue(*i->value, pos); - if (i->value->type() == nNull) continue; + if (i->value->type() == nNull) + continue; } if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) { @@ -1368,9 +1467,10 @@ static void derivationStrictInternal( else if (i->name == state.sArgs) { state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listView()) { - auto s = state.coerceToString(pos, *elem, context, - "while evaluating an element of the argument list", - true).toOwned(); + auto s = state + .coerceToString( + pos, *elem, context, "while evaluating an element of the argument list", true) + .toOwned(); drv.args.push_back(s); } } @@ -1381,7 +1481,8 @@ static void derivationStrictInternal( if (jsonObject) { - if (i->name == state.sStructuredAttrs) continue; + if (i->name == state.sStructuredAttrs) + continue; jsonObject->emplace(key, printValueAsJSON(state, true, *i->value, pos, context)); @@ -1405,38 +1506,55 @@ static void derivationStrictInternal( } if (i->name == state.sAllowedReferences) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks..allowedReferences' instead", drvName); + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks..allowedReferences' instead", + drvName); if (i->name == state.sAllowedRequisites) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks..allowedRequisites' instead", drvName); + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks..allowedRequisites' instead", + drvName); if (i->name == state.sDisallowedReferences) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks..disallowedReferences' instead", drvName); + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks..disallowedReferences' instead", + drvName); if (i->name == state.sDisallowedRequisites) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks..disallowedRequisites' instead", drvName); + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks..disallowedRequisites' instead", + drvName); if (i->name == state.sMaxSize) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks..maxSize' instead", drvName); + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks..maxSize' instead", + drvName); if (i->name == state.sMaxClosureSize) - warn("In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks..maxClosureSize' instead", drvName); - + warn( + "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks..maxClosureSize' instead", + drvName); } else { auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); drv.env.emplace(key, s); - if (i->name == state.sBuilder) drv.builder = std::move(s); - else if (i->name == state.sSystem) drv.platform = std::move(s); - else if (i->name == state.sOutputHash) outputHash = std::move(s); - else if (i->name == state.sOutputHashAlgo) outputHashAlgo = parseHashAlgoOpt(s); - else if (i->name == state.sOutputHashMode) handleHashMode(s); + if (i->name == state.sBuilder) + drv.builder = std::move(s); + else if (i->name == state.sSystem) + drv.platform = std::move(s); + else if (i->name == state.sOutputHash) + outputHash = std::move(s); + else if (i->name == state.sOutputHashAlgo) + outputHashAlgo = parseHashAlgoOpt(s); + else if (i->name == state.sOutputHashMode) + handleHashMode(s); else if (i->name == state.sOutputs) handleOutputs(tokenizeString(s)); else if (i->name == state.sJson) - warn("In derivation '%s': setting structured attributes via '__json' is deprecated, and may be disallowed in future versions of Nix. Set '__structuredAttrs = true' instead.", drvName); + warn( + "In derivation '%s': setting structured attributes via '__json' is deprecated, and may be disallowed in future versions of Nix. Set '__structuredAttrs = true' instead.", + drvName); } - } } catch (Error & e) { - e.addTrace(state.positions[i->pos], - HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName)); + e.addTrace( + state.positions[i->pos], HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName)); throw; } } @@ -1450,53 +1568,49 @@ static void derivationStrictInternal( attributes should be added as dependencies of the resulting derivation. */ for (auto & c : context) { - std::visit(overloaded { - /* Since this allows the builder to gain access to every - path in the dependency graph of the derivation (including - all outputs), all paths in the graph must be added to - this derivation's list of inputs to ensure that they are - available when the builder runs. */ - [&](const NixStringContextElem::DrvDeep & d) { - /* !!! This doesn't work if readOnlyMode is set. */ - StorePathSet refs; - state.store->computeFSClosure(d.drvPath, refs); - for (auto & j : refs) { - drv.inputSrcs.insert(j); - if (j.isDerivation()) { - drv.inputDrvs.map[j].value = state.store->readDerivation(j).outputNames(); + std::visit( + overloaded{ + /* Since this allows the builder to gain access to every + path in the dependency graph of the derivation (including + all outputs), all paths in the graph must be added to + this derivation's list of inputs to ensure that they are + available when the builder runs. */ + [&](const NixStringContextElem::DrvDeep & d) { + /* !!! This doesn't work if readOnlyMode is set. */ + StorePathSet refs; + state.store->computeFSClosure(d.drvPath, refs); + for (auto & j : refs) { + drv.inputSrcs.insert(j); + if (j.isDerivation()) { + drv.inputDrvs.map[j].value = state.store->readDerivation(j).outputNames(); + } } - } + }, + [&](const NixStringContextElem::Built & b) { + drv.inputDrvs.ensureSlot(*b.drvPath).value.insert(b.output); + }, + [&](const NixStringContextElem::Opaque & o) { drv.inputSrcs.insert(o.path); }, }, - [&](const NixStringContextElem::Built & b) { - drv.inputDrvs.ensureSlot(*b.drvPath).value.insert(b.output); - }, - [&](const NixStringContextElem::Opaque & o) { - drv.inputSrcs.insert(o.path); - }, - }, c.raw); + c.raw); } /* Do we have all required attributes? */ if (drv.builder == "") - state.error("required attribute 'builder' missing") - .atPos(v) - .debugThrow(); + state.error("required attribute 'builder' missing").atPos(v).debugThrow(); if (drv.platform == "") - state.error("required attribute 'system' missing") - .atPos(v) - .debugThrow(); + state.error("required attribute 'system' missing").atPos(v).debugThrow(); /* Check whether the derivation name is valid. */ - if (isDerivation(drvName) && - !(ingestionMethod == ContentAddressMethod::Raw::Text && - outputs.size() == 1 && - *(outputs.begin()) == "out")) - { - state.error( - "derivation names are allowed to end in '%s' only if they produce a single derivation file", - drvExtension - ).atPos(v).debugThrow(); + if (isDerivation(drvName) + && !( + ingestionMethod == ContentAddressMethod::Raw::Text && outputs.size() == 1 && *(outputs.begin()) == "out")) { + state + .error( + "derivation names are allowed to end in '%s' only if they produce a single derivation file", + drvExtension) + .atPos(v) + .debugThrow(); } if (outputHash) { @@ -1505,19 +1619,20 @@ static void derivationStrictInternal( Ignore `__contentAddressed` because fixed output derivations are already content addressed. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") - state.error( - "multiple outputs are not supported in fixed-output derivations" - ).atPos(v).debugThrow(); + state.error("multiple outputs are not supported in fixed-output derivations") + .atPos(v) + .debugThrow(); auto h = newHashAllowEmpty(*outputHash, outputHashAlgo); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::Flat); - DerivationOutput::CAFixed dof { - .ca = ContentAddress { - .method = std::move(method), - .hash = std::move(h), - }, + DerivationOutput::CAFixed dof{ + .ca = + ContentAddress{ + .method = std::move(method), + .hash = std::move(h), + }, }; drv.env["out"] = state.store->printStorePath(dof.path(*state.store, drvName, "out")); @@ -1526,8 +1641,7 @@ static void derivationStrictInternal( else if (contentAddressed || isImpure) { if (contentAddressed && isImpure) - state.error("derivation cannot be both content-addressed and impure") - .atPos(v).debugThrow(); + state.error("derivation cannot be both content-addressed and impure").atPos(v).debugThrow(); auto ha = outputHashAlgo.value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(ContentAddressMethod::Raw::NixArchive); @@ -1535,14 +1649,16 @@ static void derivationStrictInternal( for (auto & i : outputs) { drv.env[i] = hashPlaceholder(i); if (isImpure) - drv.outputs.insert_or_assign(i, - DerivationOutput::Impure { + drv.outputs.insert_or_assign( + i, + DerivationOutput::Impure{ .method = method, .hashAlgo = ha, }); else - drv.outputs.insert_or_assign(i, - DerivationOutput::CAFloating { + drv.outputs.insert_or_assign( + i, + DerivationOutput::CAFloating{ .method = method, .hashAlgo = ha, }); @@ -1558,8 +1674,7 @@ static void derivationStrictInternal( the hash. */ for (auto & i : outputs) { drv.env[i] = ""; - drv.outputs.insert_or_assign(i, - DerivationOutput::Deferred { }); + drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{}); } auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true); @@ -1568,15 +1683,12 @@ static void derivationStrictInternal( for (auto & i : outputs) { auto h = get(hashModulo.hashes, i); if (!h) - state.error( - "derivation produced no hash for output '%s'", - i - ).atPos(v).debugThrow(); + state.error("derivation produced no hash for output '%s'", i).atPos(v).debugThrow(); auto outPath = state.store->makeOutputPath(i, *h, drvName); drv.env[i] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign( i, - DerivationOutput::InputAddressed { + DerivationOutput::InputAddressed{ .path = std::move(outPath), }); } @@ -1584,7 +1696,7 @@ static void derivationStrictInternal( ; case DrvHash::Kind::Deferred: for (auto & i : outputs) { - drv.outputs.insert_or_assign(i, DerivationOutput::Deferred {}); + drv.outputs.insert_or_assign(i, DerivationOutput::Deferred{}); } } } @@ -1604,20 +1716,24 @@ static void derivationStrictInternal( } auto result = state.buildBindings(1 + drv.outputs.size()); - result.alloc(state.sDrvPath).mkString(drvPathS, { - NixStringContextElem::DrvDeep { .drvPath = drvPath }, - }); + result.alloc(state.sDrvPath) + .mkString( + drvPathS, + { + NixStringContextElem::DrvDeep{.drvPath = drvPath}, + }); for (auto & i : drv.outputs) mkOutputString(state, result, drvPath, i); v.mkAttrs(result); } -static RegisterPrimOp primop_derivationStrict(PrimOp { - .name = "derivationStrict", - .arity = 1, - .fun = prim_derivationStrict, -}); +static RegisterPrimOp primop_derivationStrict( + PrimOp{ + .name = "derivationStrict", + .arity = 1, + .fun = prim_derivationStrict, + }); /* Return a placeholder string for the specified output that will be substituted by the corresponding output path at build time. For @@ -1626,9 +1742,10 @@ static RegisterPrimOp primop_derivationStrict(PrimOp { time, any occurrence of this string in an derivation attribute will be replaced with the concrete path in the Nix store of the output ‘out’. */ -static void prim_placeholder(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_placeholder(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - v.mkString(hashPlaceholder(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.placeholder"))); + v.mkString(hashPlaceholder( + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.placeholder"))); } static RegisterPrimOp primop_placeholder({ @@ -1646,18 +1763,17 @@ static RegisterPrimOp primop_placeholder({ .fun = prim_placeholder, }); - /************************************************************* * Paths *************************************************************/ - /* Convert the argument to a path and then to a string (confusing, eh?). !!! obsolete? */ -static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_toPath(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); + auto path = + state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); v.mkString(path.path.abs(), context); } @@ -1679,28 +1795,28 @@ static RegisterPrimOp primop_toPath({ /nix/store/newhash-oldhash-oldname. In the past, `toPath' had special case behaviour for store paths, but that created weird corner cases. */ -static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_storePath(EvalState & state, const PosIdx pos, Value ** args, Value & v) { if (state.settings.pureEval) - state.error( - "'%s' is not allowed in pure evaluation mode", - "builtins.storePath" - ).atPos(pos).debugThrow(); + state.error("'%s' is not allowed in pure evaluation mode", "builtins.storePath") + .atPos(pos) + .debugThrow(); NixStringContext context; - auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; + auto path = + state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'") + .path; /* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink directly in the store. The latter condition is necessary so e.g. nix-push does the right thing. */ if (!state.store->isStorePath(path.abs())) path = CanonPath(canonPath(path.abs(), true)); if (!state.store->isInStore(path.abs())) - state.error("path '%1%' is not in the Nix store", path) - .atPos(pos).debugThrow(); + state.error("path '%1%' is not in the Nix store", path).atPos(pos).debugThrow(); auto path2 = state.store->toStorePath(path.abs()).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); - context.insert(NixStringContextElem::Opaque { .path = path2 }); + context.insert(NixStringContextElem::Opaque{.path = path2}); v.mkString(path.abs(), context); } @@ -1724,19 +1840,17 @@ static RegisterPrimOp primop_storePath({ .fun = prim_storePath, }); -static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_pathExists(EvalState & state, const PosIdx pos, Value ** args, Value & v) { try { auto & arg = *args[0]; /* SourcePath doesn't know about trailing slash. */ state.forceValue(arg, pos); - auto mustBeDir = arg.type() == nString - && (arg.string_view().ends_with("/") - || arg.string_view().ends_with("/.")); + auto mustBeDir = + arg.type() == nString && (arg.string_view().ends_with("/") || arg.string_view().ends_with("/.")); - auto symlinkResolution = - mustBeDir ? SymlinkResolution::Full : SymlinkResolution::Ancestors; + auto symlinkResolution = mustBeDir ? SymlinkResolution::Full : SymlinkResolution::Ancestors; auto path = realisePath(state, pos, arg, symlinkResolution); auto st = path.maybeLstat(); @@ -1779,12 +1893,13 @@ static std::string_view legacyBaseNameOf(std::string_view path) /* Return the base name of the given string, i.e., everything following the last slash. */ -static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - v.mkString(legacyBaseNameOf(*state.coerceToString(pos, *args[0], context, - "while evaluating the first argument passed to builtins.baseNameOf", - false, false)), context); + v.mkString( + legacyBaseNameOf(*state.coerceToString( + pos, *args[0], context, "while evaluating the first argument passed to builtins.baseNameOf", false, false)), + context); } static RegisterPrimOp primop_baseNameOf({ @@ -1808,7 +1923,7 @@ static RegisterPrimOp primop_baseNameOf({ /* Return the directory of the given path, i.e., everything before the last slash. Return either a path or a string depending on the type of the argument. */ -static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_dirOf(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); if (args[0]->type() == nPath) { @@ -1816,9 +1931,8 @@ static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Valu v.mkPath(path.path.isRoot() ? path : path.parent()); } else { NixStringContext context; - auto path = state.coerceToString(pos, *args[0], context, - "while evaluating the first argument passed to 'builtins.dirOf'", - false, false); + auto path = state.coerceToString( + pos, *args[0], context, "while evaluating the first argument passed to 'builtins.dirOf'", false, false); auto dir = dirOf(*path); v.mkString(dir, context); } @@ -1836,15 +1950,14 @@ static RegisterPrimOp primop_dirOf({ }); /* Return the contents of a file as a string. */ -static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_readFile(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto path = realisePath(state, pos, *args[0]); auto s = path.readFile(); if (s.find((char) 0) != std::string::npos) - state.error( - "the contents of the file '%1%' cannot be represented as a Nix string", - path - ).atPos(pos).debugThrow(); + state.error("the contents of the file '%1%' cannot be represented as a Nix string", path) + .atPos(pos) + .debugThrow(); StorePathSet refs; if (state.store->isInStore(path.path.abs())) { try { @@ -1858,9 +1971,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V } NixStringContext context; for (auto && p : std::move(refs)) { - context.insert(NixStringContextElem::Opaque { - .path = std::move((StorePath &&)p), - }); + context.insert( + NixStringContextElem::Opaque{ + .path = std::move((StorePath &&) p), + }); } v.mkString(s, context); } @@ -1876,7 +1990,7 @@ static RegisterPrimOp primop_readFile({ /* Find a file in the Nix search path. Used to implement paths, which are desugared to 'findFile __nixPath "x"'. */ -static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.findFile"); @@ -1888,41 +2002,52 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V std::string prefix; auto i = v2->attrs()->find(state.sPrefix); if (i != v2->attrs()->end()) - prefix = state.forceStringNoCtx(*i->value, pos, "while evaluating the `prefix` attribute of an element of the list passed to builtins.findFile"); + prefix = state.forceStringNoCtx( + *i->value, + pos, + "while evaluating the `prefix` attribute of an element of the list passed to builtins.findFile"); i = state.getAttr(state.sPath, v2->attrs(), "in an element of the __nixPath"); NixStringContext context; - auto path = state.coerceToString(pos, *i->value, context, - "while evaluating the `path` attribute of an element of the list passed to builtins.findFile", - false, false).toOwned(); + auto path = + state + .coerceToString( + pos, + *i->value, + context, + "while evaluating the `path` attribute of an element of the list passed to builtins.findFile", + false, + false) + .toOwned(); try { auto rewrites = state.realiseContext(context); path = rewriteStrings(std::move(path), rewrites); } catch (InvalidPathError & e) { - state.error( - "cannot find '%1%', since path '%2%' is not valid", - path, - e.path - ).atPos(pos).debugThrow(); + state.error("cannot find '%1%', since path '%2%' is not valid", path, e.path) + .atPos(pos) + .debugThrow(); } - lookupPath.elements.emplace_back(LookupPath::Elem { - .prefix = LookupPath::Prefix { .s = std::move(prefix) }, - .path = LookupPath::Path { .s = std::move(path) }, - }); + lookupPath.elements.emplace_back( + LookupPath::Elem{ + .prefix = LookupPath::Prefix{.s = std::move(prefix)}, + .path = LookupPath::Path{.s = std::move(path)}, + }); } - auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); + auto path = + state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); v.mkPath(state.findFile(lookupPath, path, pos)); } -static RegisterPrimOp primop_findFile(PrimOp { - .name = "__findFile", - .args = {"search-path", "lookup-path"}, - .doc = R"( +static RegisterPrimOp primop_findFile( + PrimOp{ + .name = "__findFile", + .args = {"search-path", "lookup-path"}, + .doc = R"( Find *lookup-path* in *search-path*. [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](#builtins-nixPath): @@ -2050,13 +2175,14 @@ static RegisterPrimOp primop_findFile(PrimOp { > > makes `` refer to a particular branch of the `NixOS/nixpkgs` repository on GitHub. )", - .fun = prim_findFile, -}); + .fun = prim_findFile, + }); /* Return the cryptographic hash of a file in base-16. */ -static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_hashFile(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); + auto algo = + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); std::optional ha = parseHashAlgo(algo); if (!ha) state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); @@ -2079,14 +2205,13 @@ static RegisterPrimOp primop_hashFile({ static Value * fileTypeToString(EvalState & state, SourceAccessor::Type type) { - return - type == SourceAccessor::Type::tRegular ? &state.vStringRegular : - type == SourceAccessor::Type::tDirectory ? &state.vStringDirectory : - type == SourceAccessor::Type::tSymlink ? &state.vStringSymlink : - &state.vStringUnknown; + return type == SourceAccessor::Type::tRegular ? &state.vStringRegular + : type == SourceAccessor::Type::tDirectory ? &state.vStringDirectory + : type == SourceAccessor::Type::tSymlink ? &state.vStringSymlink + : &state.vStringUnknown; } -static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_readFileType(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto path = realisePath(state, pos, *args[0], std::nullopt); /* Retrieve the directory entry type and stringize it. */ @@ -2104,7 +2229,7 @@ static RegisterPrimOp primop_readFileType({ }); /* Read a directory (without . or ..) */ -static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_readDir(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto path = realisePath(state, pos, *args[0]); @@ -2161,16 +2286,18 @@ static RegisterPrimOp primop_readDir({ }); /* Extend single element string context with another output. */ -static void prim_outputOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_outputOf(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - SingleDerivedPath drvPath = state.coerceToSingleDerivedPath(pos, *args[0], "while evaluating the first argument to builtins.outputOf"); + SingleDerivedPath drvPath = + state.coerceToSingleDerivedPath(pos, *args[0], "while evaluating the first argument to builtins.outputOf"); - OutputNameView outputName = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument to builtins.outputOf"); + OutputNameView outputName = + state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument to builtins.outputOf"); state.mkSingleDerivedPathString( - SingleDerivedPath::Built { + SingleDerivedPath::Built{ .drvPath = make_ref(drvPath), - .output = std::string { outputName }, + .output = std::string{outputName}, }, v); } @@ -2209,11 +2336,10 @@ static RegisterPrimOp primop_outputOf({ * Creating files *************************************************************/ - /* Convert the argument (which can be any Nix expression) to an XML representation returned in a string. Not all Nix expressions can be sensibly or completely represented (e.g., functions). */ -static void prim_toXML(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_toXML(EvalState & state, const PosIdx pos, Value ** args, Value & v) { std::ostringstream out; NixStringContext context; @@ -2321,7 +2447,7 @@ static RegisterPrimOp primop_toXML({ /* Convert the argument (which can be any Nix expression) to a JSON string. Not all Nix expressions can be sensibly or completely represented (e.g., functions). */ -static void prim_toJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_toJSON(EvalState & state, const PosIdx pos, Value ** args, Value & v) { std::ostringstream out; NixStringContext context; @@ -2344,12 +2470,12 @@ static RegisterPrimOp primop_toJSON({ }); /* Parse a JSON string to a value. */ -static void prim_fromJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fromJSON(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto s = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.fromJSON"); try { parseJSON(state, s, v); - } catch (JSONParseError &e) { + } catch (JSONParseError & e) { e.addTrace(state.positions[pos], "while decoding a JSON string"); throw; } @@ -2372,11 +2498,12 @@ static RegisterPrimOp primop_fromJSON({ /* Store a string in the Nix store as a source file that can be used as an input by derivations. */ -static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_toFile(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; auto name = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.toFile"); - auto contents = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.toFile"); + auto contents = + state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.toFile"); StorePathSet refs; @@ -2384,23 +2511,33 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val if (auto p = std::get_if(&c.raw)) refs.insert(p->path); else - state.error( - "files created by %1% may not reference derivations, but %2% references %3%", - "builtins.toFile", - name, - c.to_string() - ).atPos(pos).debugThrow(); + state + .error( + "files created by %1% may not reference derivations, but %2% references %3%", + "builtins.toFile", + name, + c.to_string()) + .atPos(pos) + .debugThrow(); } - auto storePath = settings.readOnlyMode - ? state.store->makeFixedOutputPathFromCA(name, TextInfo { - .hash = hashString(HashAlgorithm::SHA256, contents), - .references = std::move(refs), - }) - : ({ - StringSource s { contents }; - state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, state.repair); - }); + auto storePath = settings.readOnlyMode ? state.store->makeFixedOutputPathFromCA( + name, + TextInfo{ + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(refs), + }) + : ({ + StringSource s{contents}; + state.store->addToStoreFromDump( + s, + name, + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + refs, + state.repair); + }); /* Note: we don't need to add `context' to the context of the result, since `storePath' itself has references to the paths @@ -2487,10 +2624,7 @@ static RegisterPrimOp primop_toFile({ .fun = prim_toFile, }); -bool EvalState::callPathFilter( - Value * filterFun, - const SourcePath & path, - PosIdx pos) +bool EvalState::callPathFilter(Value * filterFun, const SourcePath & path, PosIdx pos) { auto st = path.lstat(); @@ -2500,7 +2634,7 @@ bool EvalState::callPathFilter( arg1.mkString(path.path.abs()); // assert that type is not "unknown" - Value * args []{&arg1, fileTypeToString(*this, st.type)}; + Value * args[]{&arg1, fileTypeToString(*this, st.type)}; Value res; callFunction(*filterFun, args, res, pos); @@ -2535,10 +2669,8 @@ static void addPath( std::optional expectedStorePath; if (expectedHash) - expectedStorePath = state.store->makeFixedOutputPathFromCA(name, ContentAddressWithReferences::fromParts( - method, - *expectedHash, - {})); + expectedStorePath = state.store->makeFixedOutputPathFromCA( + name, ContentAddressWithReferences::fromParts(method, *expectedHash, {})); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { auto dstPath = fetchToStore( @@ -2551,10 +2683,9 @@ static void addPath( filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) - state.error( - "store path mismatch in (possibly filtered) path added from '%s'", - path - ).atPos(pos).debugThrow(); + state.error("store path mismatch in (possibly filtered) path added from '%s'", path) + .atPos(pos) + .debugThrow(); state.allowAndSetStorePathString(dstPath, v); } else state.allowAndSetStorePathString(*expectedStorePath, v); @@ -2564,15 +2695,18 @@ static void addPath( } } - -static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_filterSource(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto path = state.coerceToPath(pos, *args[1], context, + auto path = state.coerceToPath( + pos, + *args[1], + context, "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); - addPath(state, pos, path.baseName(), path, args[0], ContentAddressMethod::Raw::NixArchive, std::nullopt, v, context); + addPath( + state, pos, path.baseName(), path, args[0], ContentAddressMethod::Raw::NixArchive, std::nullopt, v, context); } static RegisterPrimOp primop_filterSource({ @@ -2630,7 +2764,7 @@ static RegisterPrimOp primop_filterSource({ .fun = prim_filterSource, }); -static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_path(EvalState & state, const PosIdx pos, Value ** args, Value & v) { std::optional path; std::string_view name; @@ -2644,27 +2778,33 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value for (auto & attr : *args[0]->attrs()) { auto n = state.symbols[attr.name]; if (n == "path") - path.emplace(state.coerceToPath(attr.pos, *attr.value, context, "while evaluating the 'path' attribute passed to 'builtins.path'")); + path.emplace(state.coerceToPath( + attr.pos, *attr.value, context, "while evaluating the 'path' attribute passed to 'builtins.path'")); else if (attr.name == state.sName) - name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.path"); + name = state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.path"); else if (n == "filter") - state.forceFunction(*(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path"); + state.forceFunction( + *(filterFun = attr.value), attr.pos, "while evaluating the `filter` parameter passed to builtins.path"); else if (n == "recursive") - method = state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") - ? ContentAddressMethod::Raw::NixArchive - : ContentAddressMethod::Raw::Flat; + method = state.forceBool( + *attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") + ? ContentAddressMethod::Raw::NixArchive + : ContentAddressMethod::Raw::Flat; else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); + expectedHash = newHashAllowEmpty( + state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), + HashAlgorithm::SHA256); else - state.error( - "unsupported argument '%1%' to 'builtins.path'", - state.symbols[attr.name] - ).atPos(attr.pos).debugThrow(); + state.error("unsupported argument '%1%' to 'builtins.path'", state.symbols[attr.name]) + .atPos(attr.pos) + .debugThrow(); } if (!path) - state.error( - "missing required 'path' attribute in the first argument to 'builtins.path'" - ).atPos(pos).debugThrow(); + state.error("missing required 'path' attribute in the first argument to 'builtins.path'") + .atPos(pos) + .debugThrow(); if (name.empty()) name = path->baseName(); @@ -2706,15 +2846,13 @@ static RegisterPrimOp primop_path({ .fun = prim_path, }); - /************************************************************* * Sets *************************************************************/ - /* Return the names of the attributes in a set as a sorted list of strings. */ -static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_attrNames(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.attrNames"); @@ -2723,8 +2861,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, for (const auto & [n, i] : enumerate(*args[0]->attrs())) list[n] = Value::toPtr(state.symbols[i.name]); - std::sort(list.begin(), list.end(), - [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); + std::sort(list.begin(), list.end(), [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); v.mkList(list); } @@ -2742,7 +2879,7 @@ static RegisterPrimOp primop_attrNames({ /* Return the values of the attributes in a set as a list, in the same order as attrNames. */ -static void prim_attrValues(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_attrValues(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.attrValues"); @@ -2751,12 +2888,10 @@ static void prim_attrValues(EvalState & state, const PosIdx pos, Value * * args, for (const auto & [n, i] : enumerate(*args[0]->attrs())) list[n] = (Value *) &i; - std::sort(list.begin(), list.end(), - [&](Value * v1, Value * v2) { - std::string_view s1 = state.symbols[((Attr *) v1)->name], - s2 = state.symbols[((Attr *) v2)->name]; - return s1 < s2; - }); + std::sort(list.begin(), list.end(), [&](Value * v1, Value * v2) { + std::string_view s1 = state.symbols[((Attr *) v1)->name], s2 = state.symbols[((Attr *) v2)->name]; + return s1 < s2; + }); for (auto & v : list) v = ((Attr *) v)->value; @@ -2775,17 +2910,14 @@ static RegisterPrimOp primop_attrValues({ }); /* Dynamic version of the `.' operator. */ -void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_getAttr(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto attr = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.getAttr"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.getAttr"); - auto i = state.getAttr( - state.symbols.create(attr), - args[1]->attrs(), - "in the attribute set under consideration" - ); + auto i = state.getAttr(state.symbols.create(attr), args[1]->attrs(), "in the attribute set under consideration"); // !!! add to stack trace? - if (state.countCalls && i->pos) state.attrSelects[i->pos]++; + if (state.countCalls && i->pos) + state.attrSelects[i->pos]++; state.forceValue(*i->value, pos); v = *i->value; } @@ -2803,9 +2935,10 @@ static RegisterPrimOp primop_getAttr({ }); /* Return position information of the specified attribute. */ -static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto attr = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.unsafeGetAttrPos"); + auto attr = state.forceStringNoCtx( + *args[0], pos, "while evaluating the first argument passed to builtins.unsafeGetAttrPos"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.unsafeGetAttrPos"); auto i = args[1]->attrs()->find(state.symbols.create(attr)); if (i == args[1]->attrs()->end()) @@ -2814,17 +2947,18 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value * * state.mkPos(v, i->pos); } -static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp { - .name = "__unsafeGetAttrPos", - .args = {"s", "set"}, - .arity = 2, - .doc = R"( +static RegisterPrimOp primop_unsafeGetAttrPos( + PrimOp{ + .name = "__unsafeGetAttrPos", + .args = {"s", "set"}, + .arity = 2, + .doc = R"( `unsafeGetAttrPos` returns the position of the attribute named *s* from *set*. This is used by Nixpkgs to provide location information in error messages. )", - .fun = prim_unsafeGetAttrPos, -}); + .fun = prim_unsafeGetAttrPos, + }); // access to exact position information (ie, line and column numbers) is deferred // due to the cost associated with calculating that information and how rarely @@ -2838,19 +2972,14 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp { // but each type of thunk has an associated runtime cost in the current evaluator. // as with black holes this cost is too high to justify another thunk type to check // for in the very hot path that is forceValue. -static struct LazyPosAccessors { - PrimOp primop_lineOfPos{ - .arity = 1, - .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) { - v.mkInt(state.positions[PosIdx(args[0]->integer().value)].line); - } - }; - PrimOp primop_columnOfPos{ - .arity = 1, - .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) { - v.mkInt(state.positions[PosIdx(args[0]->integer().value)].column); - } - }; +static struct LazyPosAccessors +{ + PrimOp primop_lineOfPos{.arity = 1, .fun = [](EvalState & state, PosIdx pos, Value ** args, Value & v) { + v.mkInt(state.positions[PosIdx(args[0]->integer().value)].line); + }}; + PrimOp primop_columnOfPos{.arity = 1, .fun = [](EvalState & state, PosIdx pos, Value ** args, Value & v) { + v.mkInt(state.positions[PosIdx(args[0]->integer().value)].column); + }}; Value lineOfPos, columnOfPos; @@ -2875,7 +3004,7 @@ void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value } /* Dynamic version of the `?' operator. */ -static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_hasAttr(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto attr = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hasAttr"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.hasAttr"); @@ -2894,7 +3023,7 @@ static RegisterPrimOp primop_hasAttr({ }); /* Determine whether the argument is a set. */ -static void prim_isAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nAttrs); @@ -2909,7 +3038,7 @@ static RegisterPrimOp primop_isAttrs({ .fun = prim_isAttrs, }); -static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.removeAttrs"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.removeAttrs"); @@ -2921,7 +3050,8 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args boost::container::small_vector names; names.reserve(args[1]->listSize()); for (auto elem : args[1]->listView()) { - state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs"); + state.forceStringNoCtx( + *elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs"); names.emplace_back(state.symbols.create(elem->string_view()), nullptr); } std::sort(names.begin(), names.end()); @@ -2931,9 +3061,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args vector. */ auto attrs = state.buildBindings(args[0]->attrs()->size()); std::set_difference( - args[0]->attrs()->begin(), args[0]->attrs()->end(), - names.begin(), names.end(), - std::back_inserter(attrs)); + args[0]->attrs()->begin(), args[0]->attrs()->end(), names.begin(), names.end(), std::back_inserter(attrs)); v.mkAttrs(attrs.alreadySorted()); } @@ -2958,7 +3086,7 @@ static RegisterPrimOp primop_removeAttrs({ "nameN"; value = valueN;}] is transformed to {name1 = value1; ... nameN = valueN;}. In case of duplicate occurrences of the same name, the first takes precedence. */ -static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the argument passed to builtins.listToAttrs"); @@ -2973,7 +3101,10 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value * * args auto j = state.getAttr(state.sName, v2->attrs(), "in a {name=...; value=...;} pair"); - auto name = state.forceStringNoCtx(*j->value, j->pos, "while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs"); + auto name = state.forceStringNoCtx( + *j->value, + j->pos, + "while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs"); auto sym = state.symbols.create(name); // (ab)use Attr to store a Value * * instead of a Value *, so that we can stabilize the sort using the Value * * @@ -3037,7 +3168,7 @@ static RegisterPrimOp primop_listToAttrs({ .fun = prim_listToAttrs, }); -static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.intersectAttrs"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.intersectAttrs"); @@ -3091,8 +3222,7 @@ static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value * * a if (r != right.end()) attrs.insert(*r); } - } - else { + } else { for (auto & r : right) { auto l = left.find(r.name); if (l != left.end()) @@ -3115,16 +3245,18 @@ static RegisterPrimOp primop_intersectAttrs({ .fun = prim_intersectAttrs, }); -static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_catAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs")); + auto attrName = state.symbols.create( + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs")); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs"); SmallValueVector res(args[1]->listSize()); size_t found = 0; for (auto v2 : args[1]->listView()) { - state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs"); + state.forceAttrs( + *v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs"); if (auto i = v2->attrs()->get(attrName)) res[found++] = i->value; } @@ -3152,7 +3284,7 @@ static RegisterPrimOp primop_catAttrs({ .fun = prim_catAttrs, }); -static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_functionArgs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); if (args[0]->isPrimOpApp() || args[0]->isPrimOp()) { @@ -3167,7 +3299,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg return; } - const auto &formals = args[0]->lambda().fun->formals->formals; + const auto & formals = args[0]->lambda().fun->formals->formals; auto attrs = state.buildBindings(formals.size()); for (auto & i : formals) attrs.insert(i.name, state.getBool(i.def), i.pos); @@ -3197,7 +3329,7 @@ static RegisterPrimOp primop_functionArgs({ }); /* */ -static void prim_mapAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_mapAttrs(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.mapAttrs"); @@ -3228,7 +3360,7 @@ static RegisterPrimOp primop_mapAttrs({ .fun = prim_mapAttrs, }); -static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value ** args, Value & v) { // we will first count how many values are present for each given key. // we then allocate a single attrset and pre-populate it with lists of @@ -3251,7 +3383,8 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg const auto listItems = args[1]->listView(); for (auto & vElem : listItems) { - state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith"); + state.forceAttrs( + *vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith"); for (auto & attr : *vElem->attrs()) attrsSeen.try_emplace(attr.name).first->second.size++; } @@ -3314,14 +3447,12 @@ static RegisterPrimOp primop_zipAttrsWith({ .fun = prim_zipAttrsWith, }); - /************************************************************* * Lists *************************************************************/ - /* Determine whether the argument is a list. */ -static void prim_isList(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_isList(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nList); @@ -3337,16 +3468,15 @@ static RegisterPrimOp primop_isList({ }); /* Return the n-1'th element of a list. */ -static void prim_elemAt(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_elemAt(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - NixInt::Inner n = state.forceInt(*args[1], pos, "while evaluating the second argument passed to 'builtins.elemAt'").value; + NixInt::Inner n = + state.forceInt(*args[1], pos, "while evaluating the second argument passed to 'builtins.elemAt'").value; state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.elemAt'"); if (n < 0 || std::make_unsigned_t(n) >= args[0]->listSize()) - state.error( - "'builtins.elemAt' called with index %d on a list of size %d", - n, - args[0]->listSize() - ).atPos(pos).debugThrow(); + state.error("'builtins.elemAt' called with index %d on a list of size %d", n, args[0]->listSize()) + .atPos(pos) + .debugThrow(); state.forceValue(*args[0]->listView()[n], pos); v = *args[0]->listView()[n]; } @@ -3362,13 +3492,11 @@ static RegisterPrimOp primop_elemAt({ }); /* Return the first element of a list. */ -static void prim_head(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_head(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.head'"); if (args[0]->listSize() == 0) - state.error( - "'builtins.head' called on an empty list" - ).atPos(pos).debugThrow(); + state.error("'builtins.head' called on an empty list").atPos(pos).debugThrow(); state.forceValue(*args[0]->listView()[0], pos); v = *args[0]->listView()[0]; } @@ -3387,7 +3515,7 @@ static RegisterPrimOp primop_head({ /* Return a list consisting of everything but the first element of a list. Warning: this function takes O(n) time, so you probably don't want to use it! */ -static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_tail(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to 'builtins.tail'"); if (args[0]->listSize() == 0) @@ -3416,7 +3544,7 @@ static RegisterPrimOp primop_tail({ }); /* Apply a function to every element of a list. */ -static void prim_map(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_map(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.map"); @@ -3429,8 +3557,7 @@ static void prim_map(EvalState & state, const PosIdx pos, Value * * args, Value auto list = state.buildList(args[1]->listSize()); for (const auto & [n, v] : enumerate(list)) - (v = state.allocValue())->mkApp( - args[0], args[1]->listView()[n]); + (v = state.allocValue())->mkApp(args[0], args[1]->listView()[n]); v.mkList(list); } @@ -3453,7 +3580,7 @@ static RegisterPrimOp primop_map({ /* Filter a list using a predicate; that is, return a list containing every element from the list for which the predicate function returns true. */ -static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_filter(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.filter"); @@ -3472,7 +3599,8 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val for (size_t n = 0; n < len; ++n) { Value res; state.callFunction(*args[0], *args[1]->listView()[n], res, noPos); - if (state.forceBool(res, pos, "while evaluating the return value of the filtering function passed to builtins.filter")) + if (state.forceBool( + res, pos, "while evaluating the return value of the filtering function passed to builtins.filter")) vs[k++] = args[1]->listView()[n]; else same = false; @@ -3482,7 +3610,8 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val v = *args[1]; else { auto list = state.buildList(k); - for (const auto & [n, v] : enumerate(list)) v = vs[n]; + for (const auto & [n, v] : enumerate(list)) + v = vs[n]; v.mkList(list); } } @@ -3498,7 +3627,7 @@ static RegisterPrimOp primop_filter({ }); /* Return true if a list contains a given element. */ -static void prim_elem(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_elem(EvalState & state, const PosIdx pos, Value ** args, Value & v) { bool res = false; state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.elem"); @@ -3521,11 +3650,16 @@ static RegisterPrimOp primop_elem({ }); /* Concatenate a list of lists. */ -static void prim_concatLists(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_concatLists(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.concatLists"); auto listView = args[0]->listView(); - state.concatLists(v, args[0]->listSize(), listView.data(), pos, "while evaluating a value of the list passed to builtins.concatLists"); + state.concatLists( + v, + args[0]->listSize(), + listView.data(), + pos, + "while evaluating a value of the list passed to builtins.concatLists"); } static RegisterPrimOp primop_concatLists({ @@ -3538,7 +3672,7 @@ static RegisterPrimOp primop_concatLists({ }); /* Return the length of a list. This is an O(1) time operation. */ -static void prim_length(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_length(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.length"); v.mkInt(args[0]->listSize()); @@ -3555,7 +3689,7 @@ static RegisterPrimOp primop_length({ /* Reduce a list by applying a binary operator, from left to right. The operator is applied strictly. */ -static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.foldlStrict"); state.forceList(*args[2], pos, "while evaluating the third argument passed to builtins.foldlStrict"); @@ -3565,7 +3699,7 @@ static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args auto listView = args[2]->listView(); for (auto [n, elem] : enumerate(listView)) { - Value * vs []{vCur, elem}; + Value * vs[]{vCur, elem}; vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue(); state.callFunction(*args[0], vs, *vCur, pos); } @@ -3596,14 +3730,15 @@ static RegisterPrimOp primop_foldlStrict({ .fun = prim_foldlStrict, }); -static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value ** args, Value & v) { - state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all")); - state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all")); + state.forceFunction( + *args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all")); + state.forceList( + *args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all")); - std::string_view errorCtx = any - ? "while evaluating the return value of the function passed to builtins.any" - : "while evaluating the return value of the function passed to builtins.all"; + std::string_view errorCtx = any ? "while evaluating the return value of the function passed to builtins.any" + : "while evaluating the return value of the function passed to builtins.all"; Value vTmp; for (auto elem : args[1]->listView()) { @@ -3618,8 +3753,7 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar v.mkBool(!any); } - -static void prim_any(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_any(EvalState & state, const PosIdx pos, Value ** args, Value & v) { anyOrAll(true, state, pos, args, v); } @@ -3634,7 +3768,7 @@ static RegisterPrimOp primop_any({ .fun = prim_any, }); -static void prim_all(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_all(EvalState & state, const PosIdx pos, Value ** args, Value & v) { anyOrAll(false, state, pos, args, v); } @@ -3649,7 +3783,7 @@ static RegisterPrimOp primop_all({ .fun = prim_all, }); -static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_genList(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto len_ = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList").value; @@ -3687,10 +3821,9 @@ static RegisterPrimOp primop_genList({ .fun = prim_genList, }); -static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, Value & v); +static void prim_lessThan(EvalState & state, const PosIdx pos, Value ** args, Value & v); - -static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_sort(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.sort"); @@ -3712,13 +3845,15 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value if (args[0]->isPrimOp()) { auto ptr = args[0]->primOp()->fun.target(); if (ptr && *ptr == prim_lessThan) - return CompareValues(state, noPos, "while evaluating the ordering function passed to builtins.sort")(a, b); + return CompareValues(state, noPos, "while evaluating the ordering function passed to builtins.sort")( + a, b); } Value * vs[] = {a, b}; Value vBool; state.callFunction(*args[0], vs, vBool, noPos); - return state.forceBool(vBool, pos, "while evaluating the return value of the sorting function passed to builtins.sort"); + return state.forceBool( + vBool, pos, "while evaluating the return value of the sorting function passed to builtins.sort"); }; /* NOTE: Using custom implementation because std::sort and std::stable_sort @@ -3780,7 +3915,7 @@ static RegisterPrimOp primop_sort({ .fun = prim_sort, }); -static void prim_partition(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_partition(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.partition"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.partition"); @@ -3794,7 +3929,8 @@ static void prim_partition(EvalState & state, const PosIdx pos, Value * * args, state.forceValue(*vElem, pos); Value res; state.callFunction(*args[0], *vElem, res, pos); - if (state.forceBool(res, pos, "while evaluating the return value of the partition function passed to builtins.partition")) + if (state.forceBool( + res, pos, "while evaluating the return value of the partition function passed to builtins.partition")) right.push_back(vElem); else wrong.push_back(vElem); @@ -3840,7 +3976,7 @@ static RegisterPrimOp primop_partition({ .fun = prim_partition, }); -static void prim_groupBy(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_groupBy(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.groupBy"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.groupBy"); @@ -3850,7 +3986,8 @@ static void prim_groupBy(EvalState & state, const PosIdx pos, Value * * args, Va for (auto vElem : args[1]->listView()) { Value res; state.callFunction(*args[0], *vElem, res, pos); - auto name = state.forceStringNoCtx(res, pos, "while evaluating the return value of the grouping function passed to builtins.groupBy"); + auto name = state.forceStringNoCtx( + res, pos, "while evaluating the return value of the grouping function passed to builtins.groupBy"); auto sym = state.symbols.create(name); auto vector = attrs.try_emplace(sym, ValueVector()).first; vector->second.push_back(vElem); @@ -3892,7 +4029,7 @@ static RegisterPrimOp primop_groupBy({ .fun = prim_groupBy, }); -static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_concatMap(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.concatMap"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.concatMap"); @@ -3905,7 +4042,10 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args, for (size_t n = 0; n < nrLists; ++n) { Value * vElem = args[1]->listView()[n]; state.callFunction(*args[0], *vElem, lists[n], pos); - state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap"); + state.forceList( + lists[n], + lists[n].determinePos(args[0]->determinePos(pos)), + "while evaluating the return value of the function passed to builtins.concatMap"); len += lists[n].listSize(); } @@ -3931,19 +4071,18 @@ static RegisterPrimOp primop_concatMap({ .fun = prim_concatMap, }); - /************************************************************* * Integer arithmetic *************************************************************/ - -static void prim_add(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_add(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); if (args[0]->type() == nFloat || args[1]->type() == nFloat) - v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first argument of the addition") - + state.forceFloat(*args[1], pos, "while evaluating the second argument of the addition")); + v.mkFloat( + state.forceFloat(*args[0], pos, "while evaluating the first argument of the addition") + + state.forceFloat(*args[1], pos, "while evaluating the second argument of the addition")); else { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the addition"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the addition"); @@ -3966,13 +4105,14 @@ static RegisterPrimOp primop_add({ .fun = prim_add, }); -static void prim_sub(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_sub(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); if (args[0]->type() == nFloat || args[1]->type() == nFloat) - v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first argument of the subtraction") - - state.forceFloat(*args[1], pos, "while evaluating the second argument of the subtraction")); + v.mkFloat( + state.forceFloat(*args[0], pos, "while evaluating the first argument of the subtraction") + - state.forceFloat(*args[1], pos, "while evaluating the second argument of the subtraction")); else { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the subtraction"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the subtraction"); @@ -3996,13 +4136,14 @@ static RegisterPrimOp primop_sub({ .fun = prim_sub, }); -static void prim_mul(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_mul(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); if (args[0]->type() == nFloat || args[1]->type() == nFloat) - v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first of the multiplication") - * state.forceFloat(*args[1], pos, "while evaluating the second argument of the multiplication")); + v.mkFloat( + state.forceFloat(*args[0], pos, "while evaluating the first of the multiplication") + * state.forceFloat(*args[1], pos, "while evaluating the second argument of the multiplication")); else { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the multiplication"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the multiplication"); @@ -4026,7 +4167,7 @@ static RegisterPrimOp primop_mul({ .fun = prim_mul, }); -static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_div(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -4059,7 +4200,7 @@ static RegisterPrimOp primop_div({ .fun = prim_div, }); -static void prim_bitAnd(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_bitAnd(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitAnd"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitAnd"); @@ -4075,7 +4216,7 @@ static RegisterPrimOp primop_bitAnd({ .fun = prim_bitAnd, }); -static void prim_bitOr(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_bitOr(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitOr"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitOr"); @@ -4092,7 +4233,7 @@ static RegisterPrimOp primop_bitOr({ .fun = prim_bitOr, }); -static void prim_bitXor(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_bitXor(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitXor"); auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitXor"); @@ -4109,7 +4250,7 @@ static RegisterPrimOp primop_bitXor({ .fun = prim_bitXor, }); -static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_lessThan(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -4129,21 +4270,18 @@ static RegisterPrimOp primop_lessThan({ .fun = prim_lessThan, }); - /************************************************************* * String manipulation *************************************************************/ - /* Convert the argument to a string. Paths are *not* copied to the store, so `toString /foo/bar' yields `"/foo/bar"', not `"/nix/store/whatever..."'. */ -static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_toString(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, - "while evaluating the first argument passed to builtins.toString", - true, false); + auto s = state.coerceToString( + pos, *args[0], context, "while evaluating the first argument passed to builtins.toString", true, false); v.mkString(*s, context); } @@ -4175,15 +4313,25 @@ static RegisterPrimOp primop_toString({ at byte position `min(start, stringLength str)' inclusive and ending at `min(start + len, stringLength str)'. `start' must be non-negative. */ -static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, Value & v) { using NixUInt = std::make_unsigned_t; - NixInt::Inner start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring").value; + NixInt::Inner start = + state + .forceInt( + *args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring") + .value; if (start < 0) state.error("negative start position in 'substring'").atPos(pos).debugThrow(); - NixInt::Inner len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring").value; + NixInt::Inner len = + state + .forceInt( + *args[1], + pos, + "while evaluating the second argument (the substring length) passed to builtins.substring") + .value; // Negative length may be idiomatically passed to builtins.substring to get // the tail of the string. @@ -4204,7 +4352,8 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, } NixStringContext context; - auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); + auto s = state.coerceToString( + pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context); } @@ -4230,10 +4379,11 @@ static RegisterPrimOp primop_substring({ .fun = prim_substring, }); -static void prim_stringLength(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_stringLength(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.stringLength"); + auto s = + state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.stringLength"); v.mkInt(NixInt::Inner(s->size())); } @@ -4248,15 +4398,17 @@ static RegisterPrimOp primop_stringLength({ }); /* Return the cryptographic hash of a string in base-16. */ -static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_hashString(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); + auto algo = + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); std::optional ha = parseHashAlgo(algo); if (!ha) state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); NixStringContext context; // discarded - auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); + auto s = + state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false)); } @@ -4272,7 +4424,7 @@ static RegisterPrimOp primop_hashString({ .fun = prim_hashString, }); -static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_convertHash(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash"); auto inputAttrs = args[0]->attrs(); @@ -4283,10 +4435,13 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args auto iteratorHashAlgo = inputAttrs->get(state.symbols.create("hashAlgo")); std::optional ha = std::nullopt; if (iteratorHashAlgo) - ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); + ha = parseHashAlgo( + state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); - auto iteratorToHashFormat = state.getAttr(state.symbols.create("toHashFormat"), args[0]->attrs(), "while locating the attribute 'toHashFormat'"); - HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); + auto iteratorToHashFormat = state.getAttr( + state.symbols.create("toHashFormat"), args[0]->attrs(), "while locating the attribute 'toHashFormat'"); + HashFormat hf = parseHashFormat( + state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI)); } @@ -4398,7 +4553,7 @@ std::shared_ptr makeRegexCache() return std::make_shared(); } -void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_match(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto re = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.match"); @@ -4407,7 +4562,8 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto regex = state.regexCache->get(re); NixStringContext context; - const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match"); + const auto str = + state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match"); std::cmatch match; if (!std::regex_match(str.begin(), str.end(), match, regex)) { @@ -4427,13 +4583,9 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.error("memory limit exceeded by regular expression '%s'", re) - .atPos(pos) - .debugThrow(); + state.error("memory limit exceeded by regular expression '%s'", re).atPos(pos).debugThrow(); } else - state.error("invalid regular expression '%s'", re) - .atPos(pos) - .debugThrow(); + state.error("invalid regular expression '%s'", re).atPos(pos).debugThrow(); } } @@ -4475,7 +4627,7 @@ static RegisterPrimOp primop_match({ /* Split a string with a regular expression, and return a list of the non-matching parts interleaved by the lists of the matching groups. */ -void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_split(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto re = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.split"); @@ -4484,7 +4636,8 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto regex = state.regexCache->get(re); NixStringContext context; - const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split"); + const auto str = + state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split"); auto begin = std::cregex_iterator(str.begin(), str.end(), regex); auto end = std::cregex_iterator(); @@ -4533,13 +4686,9 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.error("memory limit exceeded by regular expression '%s'", re) - .atPos(pos) - .debugThrow(); + state.error("memory limit exceeded by regular expression '%s'", re).atPos(pos).debugThrow(); } else - state.error("invalid regular expression '%s'", re) - .atPos(pos) - .debugThrow(); + state.error("invalid regular expression '%s'", re).atPos(pos).debugThrow(); } } @@ -4580,20 +4729,34 @@ static RegisterPrimOp primop_split({ .fun = prim_split, }); -static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto sep = state.forceString(*args[0], context, pos, "while evaluating the first argument (the separator string) passed to builtins.concatStringsSep"); - state.forceList(*args[1], pos, "while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep"); + auto sep = state.forceString( + *args[0], + context, + pos, + "while evaluating the first argument (the separator string) passed to builtins.concatStringsSep"); + state.forceList( + *args[1], + pos, + "while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep"); std::string res; res.reserve((args[1]->listSize() + 32) * sep.size()); bool first = true; for (auto elem : args[1]->listView()) { - if (first) first = false; else res += sep; - res += *state.coerceToString(pos, *elem, context, "while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep"); + if (first) + first = false; + else + res += sep; + res += *state.coerceToString( + pos, + *elem, + context, + "while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep"); } v.mkString(res, context); @@ -4610,29 +4773,31 @@ static RegisterPrimOp primop_concatStringsSep({ .fun = prim_concatStringsSep, }); -static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings"); if (args[0]->listSize() != args[1]->listSize()) - state.error( - "'from' and 'to' arguments passed to builtins.replaceStrings have different lengths" - ).atPos(pos).debugThrow(); + state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths") + .atPos(pos) + .debugThrow(); std::vector from; from.reserve(args[0]->listSize()); for (auto elem : args[0]->listView()) - from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings")); + from.emplace_back(state.forceString( + *elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings")); std::unordered_map cache; auto to = args[1]->listView(); NixStringContext context; - auto s = state.forceString(*args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings"); + auto s = state.forceString( + *args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings"); std::string res; // Loops one past last character to handle the case where 'from' contains an empty string. - for (size_t p = 0; p <= s.size(); ) { + for (size_t p = 0; p <= s.size();) { bool found = false; auto i = from.begin(); auto j = to.begin(); @@ -4643,9 +4808,13 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a auto v = cache.find(j_index); if (v == cache.end()) { NixStringContext ctx; - auto ts = state.forceString(**j, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings"); + auto ts = state.forceString( + **j, + ctx, + pos, + "while evaluating one of the replacement strings passed to builtins.replaceStrings"); v = (cache.emplace(j_index, ts)).first; - for (auto& path : ctx) + for (auto & path : ctx) context.insert(path); } res += v->second; @@ -4688,15 +4857,14 @@ static RegisterPrimOp primop_replaceStrings({ .fun = prim_replaceStrings, }); - /************************************************************* * Versions *************************************************************/ - -static void prim_parseDrvName(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_parseDrvName(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto name = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.parseDrvName"); + auto name = + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.parseDrvName"); DrvName parsed(name); auto attrs = state.buildBindings(2); attrs.alloc(state.sName).mkString(parsed.name); @@ -4718,10 +4886,12 @@ static RegisterPrimOp primop_parseDrvName({ .fun = prim_parseDrvName, }); -static void prim_compareVersions(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_compareVersions(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto version1 = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.compareVersions"); - auto version2 = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.compareVersions"); + auto version1 = + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.compareVersions"); + auto version2 = state.forceStringNoCtx( + *args[1], pos, "while evaluating the second argument passed to builtins.compareVersions"); auto result = compareVersions(version1, version2); v.mkInt(result < 0 ? -1 : result > 0 ? 1 : 0); } @@ -4739,9 +4909,10 @@ static RegisterPrimOp primop_compareVersions({ .fun = prim_compareVersions, }); -static void prim_splitVersion(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_splitVersion(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - auto version = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.splitVersion"); + auto version = + state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.splitVersion"); auto iter = version.cbegin(); Strings components; while (iter != version.cend()) { @@ -4767,18 +4938,15 @@ static RegisterPrimOp primop_splitVersion({ .fun = prim_splitVersion, }); - /************************************************************* * Primop registration *************************************************************/ - RegisterPrimOp::RegisterPrimOp(PrimOp && primOp) { primOps().push_back(std::move(primOp)); } - void EvalState::createBaseEnv(const EvalSettings & evalSettings) { baseEnv.up = 0; @@ -4788,9 +4956,12 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) /* `builtins' must be first! */ v.mkAttrs(buildBindings(128).finish()); - addConstant("builtins", v, { - .type = nAttrs, - .doc = R"( + addConstant( + "builtins", + v, + { + .type = nAttrs, + .doc = R"( Contains all the built-in functions and values. Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations: @@ -4800,12 +4971,15 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) if builtins ? hasContext then builtins.hasContext s else true ``` )", - }); + }); v.mkBool(true); - addConstant("true", v, { - .type = nBool, - .doc = R"( + addConstant( + "true", + v, + { + .type = nBool, + .doc = R"( Primitive value. It can be returned by @@ -4820,12 +4994,15 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) 1 ``` )", - }); + }); v.mkBool(false); - addConstant("false", v, { - .type = nBool, - .doc = R"( + addConstant( + "false", + v, + { + .type = nBool, + .doc = R"( Primitive value. It can be returned by @@ -4840,11 +5017,14 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) 1 ``` )", - }); + }); - addConstant("null", &vNull, { - .type = nNull, - .doc = R"( + addConstant( + "null", + &vNull, + { + .type = nNull, + .doc = R"( Primitive value. The name `null` is not special, and can be shadowed: @@ -4854,14 +5034,17 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) 1 ``` )", - }); + }); if (!settings.pureEval) { v.mkInt(time(0)); } - addConstant("__currentTime", v, { - .type = nInt, - .doc = R"( + addConstant( + "__currentTime", + v, + { + .type = nInt, + .doc = R"( Return the [Unix time](https://en.wikipedia.org/wiki/Unix_time) at first evaluation. Repeated references to that name re-use the initially obtained value. @@ -4880,14 +5063,17 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) The [store path](@docroot@/store/store-path.md) of a derivation depending on `currentTime` differs for each evaluation, unless both evaluate `builtins.currentTime` in the same second. )", - .impureOnly = true, - }); + .impureOnly = true, + }); if (!settings.pureEval) v.mkString(settings.getCurrentSystem()); - addConstant("__currentSystem", v, { - .type = nString, - .doc = R"( + addConstant( + "__currentSystem", + v, + { + .type = nString, + .doc = R"( The value of the [`eval-system`](@docroot@/command-ref/conf-file.md#conf-eval-system) or else @@ -4910,13 +5096,16 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) "mips64-linux" ``` )", - .impureOnly = true, - }); + .impureOnly = true, + }); v.mkString(nixVersion); - addConstant("__nixVersion", v, { - .type = nString, - .doc = R"( + addConstant( + "__nixVersion", + v, + { + .type = nString, + .doc = R"( The version of Nix. For example, where the command line returns the current Nix version, @@ -4933,12 +5122,15 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) "2.16.0" ``` )", - }); + }); v.mkString(store->storeDir); - addConstant("__storeDir", v, { - .type = nString, - .doc = R"( + addConstant( + "__storeDir", + v, + { + .type = nString, + .doc = R"( Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use. This value is determined by the `store` parameter in [Store URLs](@docroot@/store/types/index.md#store-url-format): @@ -4948,19 +5140,22 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) "/blah" ``` )", - }); + }); /* Language version. This should be increased every time a new language feature gets added. It's not necessary to increase it when primops get added, because you can just use `builtins ? primOp' to check. */ v.mkInt(6); - addConstant("__langVersion", v, { - .type = nInt, - .doc = R"( + addConstant( + "__langVersion", + v, + { + .type = nInt, + .doc = R"( The current version of the Nix language. )", - }); + }); #ifndef _WIN32 // TODO implement on Windows // Miscellaneous @@ -4980,7 +5175,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) addPrimOp({ .name = "__traceVerbose", - .args = { "e1", "e2" }, + .args = {"e1", "e2"}, .arity = 2, .doc = R"( Evaluate *e1* and print its abstract syntax representation on standard @@ -4999,9 +5194,12 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) (list[n] = allocValue())->mkAttrs(attrs); } v.mkList(list); - addConstant("__nixPath", v, { - .type = nList, - .doc = R"( + addConstant( + "__nixPath", + v, + { + .type = nList, + .doc = R"( A list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md). Its value is primarily determined by the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path), which are - Overridden by the [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment variable or the `--nix-path` option @@ -5027,7 +5225,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) builtins.findFile builtins.nixPath "nixpkgs" ``` )", - }); + }); for (auto & primOp : RegisterPrimOp::primOps()) if (experimentalFeatureSettings.isEnabled(primOp.experimentalFeature)) { @@ -5048,9 +5246,12 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) Null docs because it is documented separately. */ auto vDerivation = allocValue(); - addConstant("derivation", vDerivation, { - .type = nFunction, - }); + addConstant( + "derivation", + vDerivation, + { + .type = nFunction, + }); /* Now that we've added all primops, sort the `builtins' set, because attribute lookups expect it to be sorted. */ @@ -5063,5 +5264,4 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) evalFile(derivationInternal, *vDerivation); } - -} +} // namespace nix diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 56962d6a8..11b59efcd 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -5,10 +5,11 @@ namespace nix { -static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext"); + auto s = state.coerceToString( + pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext"); v.mkString(*s); } @@ -21,18 +22,17 @@ static RegisterPrimOp primop_unsafeDiscardStringContext({ .fun = prim_unsafeDiscardStringContext, }); - -static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_hasContext(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext"); v.mkBool(!context.empty()); } -static RegisterPrimOp primop_hasContext({ - .name = "__hasContext", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_hasContext( + {.name = "__hasContext", + .args = {"s"}, + .doc = R"( Return `true` if string *s* has a non-empty context. The context can be obtained with [`getContext`](#builtins-getContext). @@ -50,21 +50,18 @@ static RegisterPrimOp primop_hasContext({ > else { ${name} = meta; } > ``` )", - .fun = prim_hasContext -}); + .fun = prim_hasContext}); - -static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency"); + auto s = state.coerceToString( + pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency"); NixStringContext context2; for (auto && c : context) { if (auto * ptr = std::get_if(&c.raw)) { - context2.emplace(NixStringContextElem::Opaque { - .path = ptr->drvPath - }); + context2.emplace(NixStringContextElem::Opaque{.path = ptr->drvPath}); } else { /* Can reuse original item */ context2.emplace(std::move(c).raw); @@ -74,10 +71,10 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p v.mkString(*s, context2); } -static RegisterPrimOp primop_unsafeDiscardOutputDependency({ - .name = "__unsafeDiscardOutputDependency", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_unsafeDiscardOutputDependency( + {.name = "__unsafeDiscardOutputDependency", + .args = {"s"}, + .doc = R"( Create a copy of the given string where every [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) string context element is turned into a @@ -94,58 +91,58 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency({ [`builtins.addDrvOutputDependencies`]: #builtins-addDrvOutputDependencies )", - .fun = prim_unsafeDiscardOutputDependency -}); + .fun = prim_unsafeDiscardOutputDependency}); - -static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies"); + auto s = state.coerceToString( + pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies"); - auto contextSize = context.size(); + auto contextSize = context.size(); if (contextSize != 1) { - state.error( - "context of string '%s' must have exactly one element, but has %d", - *s, - contextSize - ).atPos(pos).debugThrow(); + state.error("context of string '%s' must have exactly one element, but has %d", *s, contextSize) + .atPos(pos) + .debugThrow(); } - NixStringContext context2 { - (NixStringContextElem { std::visit(overloaded { - [&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep { - if (!c.path.isDerivation()) { - state.error( - "path '%s' is not a derivation", - state.store->printStorePath(c.path) - ).atPos(pos).debugThrow(); - } - return NixStringContextElem::DrvDeep { - .drvPath = c.path, - }; + NixStringContext context2{ + (NixStringContextElem{std::visit( + overloaded{ + [&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep { + if (!c.path.isDerivation()) { + state.error("path '%s' is not a derivation", state.store->printStorePath(c.path)) + .atPos(pos) + .debugThrow(); + } + return NixStringContextElem::DrvDeep{ + .drvPath = c.path, + }; + }, + [&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep { + state + .error( + "`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", + c.output) + .atPos(pos) + .debugThrow(); + }, + [&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep { + /* Reuse original item because we want this to be idempotent. */ + /* FIXME: Suspicious move out of const. This is actually a copy, so the comment + above does not make much sense. */ + return std::move(c); + }, }, - [&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep { - state.error( - "`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", - c.output - ).atPos(pos).debugThrow(); - }, - [&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep { - /* Reuse original item because we want this to be idempotent. */ - /* FIXME: Suspicious move out of const. This is actually a copy, so the comment - above does not make much sense. */ - return std::move(c); - }, - }, context.begin()->raw) }), + context.begin()->raw)}), }; v.mkString(*s, context2); } -static RegisterPrimOp primop_addDrvOutputDependencies({ - .name = "__addDrvOutputDependencies", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_addDrvOutputDependencies( + {.name = "__addDrvOutputDependencies", + .args = {"s"}, + .doc = R"( Create a copy of the given string where a single [constant](@docroot@/language/string-context.md#string-context-element-constant) string context element is turned into a @@ -159,9 +156,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies({ This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency). )", - .fun = prim_addDrvOutputDependencies -}); - + .fun = prim_addDrvOutputDependencies}); /* Extract the context of a string as a structured Nix value. @@ -182,31 +177,31 @@ static RegisterPrimOp primop_addDrvOutputDependencies({ Note that for a given path any combination of the above attributes may be present. */ -static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_getContext(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - struct ContextInfo { + struct ContextInfo + { bool path = false; bool allOutputs = false; Strings outputs; }; + NixStringContext context; state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext"); auto contextInfos = std::map(); for (auto && i : context) { - std::visit(overloaded { - [&](NixStringContextElem::DrvDeep && d) { - contextInfos[std::move(d.drvPath)].allOutputs = true; + std::visit( + overloaded{ + [&](NixStringContextElem::DrvDeep && d) { contextInfos[std::move(d.drvPath)].allOutputs = true; }, + [&](NixStringContextElem::Built && b) { + // FIXME should eventually show string context as is, no + // resolving here. + auto drvPath = resolveDerivedPath(*state.store, *b.drvPath); + contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output)); + }, + [&](NixStringContextElem::Opaque && o) { contextInfos[std::move(o.path)].path = true; }, }, - [&](NixStringContextElem::Built && b) { - // FIXME should eventually show string context as is, no - // resolving here. - auto drvPath = resolveDerivedPath(*state.store, *b.drvPath); - contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output)); - }, - [&](NixStringContextElem::Opaque && o) { - contextInfos[std::move(o.path)].path = true; - }, - }, ((NixStringContextElem &&) i).raw); + ((NixStringContextElem &&) i).raw); } auto attrs = state.buildBindings(contextInfos.size()); @@ -231,10 +226,10 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, v.mkAttrs(attrs); } -static RegisterPrimOp primop_getContext({ - .name = "__getContext", - .args = {"s"}, - .doc = R"( +static RegisterPrimOp primop_getContext( + {.name = "__getContext", + .args = {"s"}, + .doc = R"( Return the string context of *s*. The string context tracks references to derivations within a string. @@ -253,19 +248,18 @@ static RegisterPrimOp primop_getContext({ { "/nix/store/arhvjaf6zmlyn8vh8fgn55rpwnxq0n7l-a.drv" = { outputs = [ "out" ]; }; } ``` )", - .fun = prim_getContext -}); - + .fun = prim_getContext}); /* Append the given context to a given string. See the commentary above getContext for details of the context representation. */ -static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** args, Value & v) { NixStringContext context; - auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext"); + auto orig = state.forceString( + *args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext"); @@ -274,10 +268,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar for (auto & i : *args[1]->attrs()) { const auto & name = state.symbols[i.name]; if (!state.store->isStorePath(name)) - state.error( - "context key '%s' is not a store path", - name - ).atPos(i.pos).debugThrow(); + state.error("context key '%s' is not a store path", name).atPos(i.pos).debugThrow(); auto namePath = state.store->parseStorePath(name); if (!settings.readOnlyMode) state.store->ensurePath(namePath); @@ -285,39 +276,46 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (auto attr = i.value->attrs()->get(sPath)) { if (state.forceBool(*attr->value, attr->pos, "while evaluating the `path` attribute of a string context")) - context.emplace(NixStringContextElem::Opaque { - .path = namePath, - }); + context.emplace( + NixStringContextElem::Opaque{ + .path = namePath, + }); } if (auto attr = i.value->attrs()->get(sAllOutputs)) { - if (state.forceBool(*attr->value, attr->pos, "while evaluating the `allOutputs` attribute of a string context")) { + if (state.forceBool( + *attr->value, attr->pos, "while evaluating the `allOutputs` attribute of a string context")) { if (!isDerivation(name)) { - state.error( - "tried to add all-outputs context of %s, which is not a derivation, to a string", - name - ).atPos(i.pos).debugThrow(); + state + .error( + "tried to add all-outputs context of %s, which is not a derivation, to a string", name) + .atPos(i.pos) + .debugThrow(); } - context.emplace(NixStringContextElem::DrvDeep { - .drvPath = namePath, - }); + context.emplace( + NixStringContextElem::DrvDeep{ + .drvPath = namePath, + }); } } if (auto attr = i.value->attrs()->get(state.sOutputs)) { state.forceList(*attr->value, attr->pos, "while evaluating the `outputs` attribute of a string context"); if (attr->value->listSize() && !isDerivation(name)) { - state.error( - "tried to add derivation output context of %s, which is not a derivation, to a string", - name - ).atPos(i.pos).debugThrow(); + state + .error( + "tried to add derivation output context of %s, which is not a derivation, to a string", name) + .atPos(i.pos) + .debugThrow(); } for (auto elem : attr->value->listView()) { - auto outputName = state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context"); - context.emplace(NixStringContextElem::Built { - .drvPath = makeConstantStorePathRef(namePath), - .output = std::string { outputName }, - }); + auto outputName = + state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context"); + context.emplace( + NixStringContextElem::Built{ + .drvPath = makeConstantStorePathRef(namePath), + .output = std::string{outputName}, + }); } } } @@ -325,10 +323,6 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar v.mkString(orig, context); } -static RegisterPrimOp primop_appendContext({ - .name = "__appendContext", - .arity = 2, - .fun = prim_appendContext -}); +static RegisterPrimOp primop_appendContext({.name = "__appendContext", .arity = 2, .fun = prim_appendContext}); -} +} // namespace nix diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 4be4dac8f..d3b38e5a3 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -15,29 +15,35 @@ namespace nix { * @param toPathMaybe Path to write the rewritten path to. If empty, the error shows the actual path. * @param v Return `Value` */ -static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, const std::optional & toPathMaybe, Value &v) { +static void runFetchClosureWithRewrite( + EvalState & state, + const PosIdx pos, + Store & fromStore, + const StorePath & fromPath, + const std::optional & toPathMaybe, + Value & v) +{ // establish toPath or throw if (!toPathMaybe || !state.store->isValidPath(*toPathMaybe)) { auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath); if (toPathMaybe && *toPathMaybe != rewrittenPath) - throw Error({ - .msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", - state.store->printStorePath(fromPath), - state.store->printStorePath(rewrittenPath), - state.store->printStorePath(*toPathMaybe)), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", + state.store->printStorePath(fromPath), + state.store->printStorePath(rewrittenPath), + state.store->printStorePath(*toPathMaybe)), + .pos = state.positions[pos]}); if (!toPathMaybe) - throw Error({ - .msg = HintFmt( - "rewriting '%s' to content-addressed form yielded '%s'\n" - "Use this value for the 'toPath' attribute passed to 'fetchClosure'", - state.store->printStorePath(fromPath), - state.store->printStorePath(rewrittenPath)), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "rewriting '%s' to content-addressed form yielded '%s'\n" + "Use this value for the 'toPath' attribute passed to 'fetchClosure'", + state.store->printStorePath(fromPath), + state.store->printStorePath(rewrittenPath)), + .pos = state.positions[pos]}); } const auto & toPath = *toPathMaybe; @@ -49,13 +55,12 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor if (!resultInfo->isContentAddressed(*state.store)) { // We don't perform the rewriting when outPath already exists, as an optimisation. // However, we can quickly detect a mistake if the toPath is input addressed. - throw Error({ - .msg = HintFmt( - "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" - "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", - state.store->printStorePath(toPath)), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" + "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", + state.store->printStorePath(toPath)), + .pos = state.positions[pos]}); } state.mkStorePathString(toPath, v); @@ -64,24 +69,25 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor /** * Fetch the closure and make sure it's content addressed. */ -static void runFetchClosureWithContentAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) { +static void runFetchClosureWithContentAddressedPath( + EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) +{ if (!state.store->isValidPath(fromPath)) - copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath }); + copyClosure(fromStore, *state.store, RealisedPath::Set{fromPath}); auto info = state.store->queryPathInfo(fromPath); if (!info->isContentAddressed(*state.store)) { - throw Error({ - .msg = HintFmt( - "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n" - "If you do intend to fetch an input-addressed store path, add\n\n" - " inputAddressed = true;\n\n" - "to the 'fetchClosure' arguments.\n\n" - "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.", - state.store->printStorePath(fromPath)), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n" + "If you do intend to fetch an input-addressed store path, add\n\n" + " inputAddressed = true;\n\n" + "to the 'fetchClosure' arguments.\n\n" + "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.", + state.store->printStorePath(fromPath)), + .pos = state.positions[pos]}); } state.mkStorePathString(fromPath, v); @@ -90,21 +96,22 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos /** * Fetch the closure and make sure it's input addressed. */ -static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) { +static void runFetchClosureWithInputAddressedPath( + EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) +{ if (!state.store->isValidPath(fromPath)) - copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath }); + copyClosure(fromStore, *state.store, RealisedPath::Set{fromPath}); auto info = state.store->queryPathInfo(fromPath); if (info->isContentAddressed(*state.store)) { - throw Error({ - .msg = HintFmt( - "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" - "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", - state.store->printStorePath(fromPath)), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" + "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", + state.store->printStorePath(fromPath)), + .pos = state.positions[pos]}); } state.mkStorePathString(fromPath, v); @@ -112,7 +119,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId typedef std::optional StorePathOrGap; -static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure"); @@ -136,67 +143,58 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg state.forceValue(*attr.value, attr.pos); bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == ""; if (isEmptyString) { - toPath = StorePathOrGap {}; - } - else { + toPath = StorePathOrGap{}; + } else { NixStringContext context; toPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint()); } } else if (attrName == "fromStore") - fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos, - attrHint()); + fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos, attrHint()); else if (attrName == "inputAddressed") inputAddressedMaybe = state.forceBool(*attr.value, attr.pos, attrHint()); else - throw Error({ - .msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), + .pos = state.positions[pos]}); } if (!fromPath) - throw Error({ - .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), + .pos = state.positions[pos]}); bool inputAddressed = inputAddressedMaybe.value_or(false); if (inputAddressed) { if (toPath) - throw Error({ - .msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", - "inputAddressed", - "toPath"), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt( + "attribute '%s' is set to true, but '%s' is also set. Please remove one of them", + "inputAddressed", + "toPath"), + .pos = state.positions[pos]}); } if (!fromStoreUrl) - throw Error({ - .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), + .pos = state.positions[pos]}); auto parsedURL = parseURL(*fromStoreUrl); - if (parsedURL.scheme != "http" && - parsedURL.scheme != "https" && - !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) - throw Error({ - .msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), - .pos = state.positions[pos] - }); + if (parsedURL.scheme != "http" && parsedURL.scheme != "https" + && !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) + throw Error( + {.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), .pos = state.positions[pos]}); if (!parsedURL.query.empty()) - throw Error({ - .msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), - .pos = state.positions[pos] - }); + throw Error( + {.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), + .pos = state.positions[pos]}); auto fromStore = openStore(parsedURL.to_string()); @@ -284,4 +282,4 @@ static RegisterPrimOp primop_fetchClosure({ .experimentalFeature = Xp::FetchClosure, }); -} +} // namespace nix diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 189bd1f73..9fc8e6c83 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -8,7 +8,7 @@ namespace nix { -static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value ** args, Value & v) { std::string url; std::optional rev; @@ -23,31 +23,46 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a for (auto & attr : *args[0]->attrs()) { std::string_view n(state.symbols[attr.name]); if (n == "url") - url = state.coerceToString(attr.pos, *attr.value, context, - "while evaluating the `url` attribute passed to builtins.fetchMercurial", - false, false).toOwned(); + url = state + .coerceToString( + attr.pos, + *attr.value, + context, + "while evaluating the `url` attribute passed to builtins.fetchMercurial", + false, + false) + .toOwned(); else if (n == "rev") { // Ugly: unlike fetchGit, here the "rev" attribute can // be both a revision or a branch/tag name. - auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial"); + auto value = state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial"); if (std::regex_match(value.begin(), value.end(), revRegex)) rev = Hash::parseAny(value, HashAlgorithm::SHA1); else ref = value; - } - else if (n == "name") - name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial"); + } else if (n == "name") + name = state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial"); else - state.error("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow(); + state.error("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]) + .atPos(attr.pos) + .debugThrow(); } if (url.empty()) state.error("'url' argument required").atPos(pos).debugThrow(); } else - url = state.coerceToString(pos, *args[0], context, - "while evaluating the first argument passed to builtins.fetchMercurial", - false, false).toOwned(); + url = state + .coerceToString( + pos, + *args[0], + context, + "while evaluating the first argument passed to builtins.fetchMercurial", + false, + false) + .toOwned(); // FIXME: git externals probably can be used to bypass the URI // whitelist. Ah well. @@ -60,8 +75,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a attrs.insert_or_assign("type", "hg"); attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url); attrs.insert_or_assign("name", std::string(name)); - if (ref) attrs.insert_or_assign("ref", *ref); - if (rev) attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + attrs.insert_or_assign("ref", *ref); + if (rev) + attrs.insert_or_assign("rev", rev->gitRev()); auto input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); auto [storePath, input2] = input.fetchToStore(state.store); @@ -82,10 +99,6 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a state.allowPath(storePath); } -static RegisterPrimOp r_fetchMercurial({ - .name = "fetchMercurial", - .arity = 1, - .fun = prim_fetchMercurial -}); +static RegisterPrimOp r_fetchMercurial({.name = "fetchMercurial", .arity = 1, .fun = prim_fetchMercurial}); -} +} // namespace nix diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 5b6dd6531..274f758a7 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -37,8 +37,7 @@ void emitTreeAttrs( attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true)); if (input.getType() == "git") - attrs.alloc("submodules").mkBool( - fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); + attrs.alloc("submodules").mkBool(fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); if (!forceDirty) { @@ -56,7 +55,6 @@ void emitTreeAttrs( attrs.alloc("revCount").mkInt(*revCount); else if (emptyRevFallback) attrs.alloc("revCount").mkInt(0); - } if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) { @@ -66,14 +64,14 @@ void emitTreeAttrs( if (auto lastModified = input.getLastModified()) { attrs.alloc("lastModified").mkInt(*lastModified); - attrs.alloc("lastModifiedDate").mkString( - fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S"))); + attrs.alloc("lastModifiedDate").mkString(fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S"))); } v.mkAttrs(attrs); } -struct FetchTreeParams { +struct FetchTreeParams +{ bool emptyRevFallback = false; bool allowNameArgument = false; bool isFetchGit = false; @@ -81,17 +79,14 @@ struct FetchTreeParams { }; static void fetchTree( - EvalState & state, - const PosIdx pos, - Value * * args, - Value & v, - const FetchTreeParams & params = FetchTreeParams{} -) { - fetchers::Input input { state.fetchSettings }; + EvalState & state, const PosIdx pos, Value ** args, Value & v, const FetchTreeParams & params = FetchTreeParams{}) +{ + fetchers::Input input{state.fetchSettings}; NixStringContext context; std::optional type; auto fetcher = params.isFetchGit ? "fetchGit" : "fetchTree"; - if (params.isFetchGit) type = "git"; + if (params.isFetchGit) + type = "git"; state.forceValue(*args[0], pos); @@ -102,47 +97,55 @@ static void fetchTree( if (auto aType = args[0]->attrs()->get(state.sType)) { if (type) - state.error( - "unexpected argument 'type'" - ).atPos(pos).debugThrow(); - type = state.forceStringNoCtx(*aType->value, aType->pos, - fmt("while evaluating the `type` argument passed to '%s'", fetcher)); + state.error("unexpected argument 'type'").atPos(pos).debugThrow(); + type = state.forceStringNoCtx( + *aType->value, aType->pos, fmt("while evaluating the `type` argument passed to '%s'", fetcher)); } else if (!type) - state.error( - "argument 'type' is missing in call to '%s'", fetcher - ).atPos(pos).debugThrow(); + state.error("argument 'type' is missing in call to '%s'", fetcher).atPos(pos).debugThrow(); attrs.emplace("type", type.value()); for (auto & attr : *args[0]->attrs()) { - if (attr.name == state.sType) continue; + if (attr.name == state.sType) + continue; state.forceValue(*attr.value, attr.pos); if (attr.value->type() == nPath || attr.value->type() == nString) { auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned(); - attrs.emplace(state.symbols[attr.name], - params.isFetchGit && state.symbols[attr.name] == "url" - ? fixGitURL(s) - : s); - } - else if (attr.value->type() == nBool) + attrs.emplace( + state.symbols[attr.name], + params.isFetchGit && state.symbols[attr.name] == "url" ? fixGitURL(s) : s); + } else if (attr.value->type() == nBool) attrs.emplace(state.symbols[attr.name], Explicit{attr.value->boolean()}); else if (attr.value->type() == nInt) { auto intValue = attr.value->integer().value; if (intValue < 0) - state.error("negative value given for '%s' argument '%s': %d", fetcher, state.symbols[attr.name], intValue).atPos(pos).debugThrow(); + state + .error( + "negative value given for '%s' argument '%s': %d", + fetcher, + state.symbols[attr.name], + intValue) + .atPos(pos) + .debugThrow(); attrs.emplace(state.symbols[attr.name], uint64_t(intValue)); } else if (state.symbols[attr.name] == "publicKeys") { experimentalFeatureSettings.require(Xp::VerifiedFetches); - attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump()); - } - else - state.error("argument '%s' to '%s' is %s while a string, Boolean or integer is expected", - state.symbols[attr.name], fetcher, showType(*attr.value)).debugThrow(); + attrs.emplace( + state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump()); + } else + state + .error( + "argument '%s' to '%s' is %s while a string, Boolean or integer is expected", + state.symbols[attr.name], + fetcher, + showType(*attr.value)) + .debugThrow(); } - if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { + if (params.isFetchGit && !attrs.contains("exportIgnore") + && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } @@ -153,29 +156,38 @@ static void fetchTree( if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) - state.error( - "argument 'name' isn’t supported in call to '%s'", fetcher - ).atPos(pos).debugThrow(); + state.error("argument 'name' isn’t supported in call to '%s'", fetcher) + .atPos(pos) + .debugThrow(); input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); } else { - auto url = state.coerceToString(pos, *args[0], context, - fmt("while evaluating the first argument passed to '%s'", fetcher), - false, false).toOwned(); + auto url = state + .coerceToString( + pos, + *args[0], + context, + fmt("while evaluating the first argument passed to '%s'", fetcher), + false, + false) + .toOwned(); if (params.isFetchGit) { fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); - if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { + if (!attrs.contains("exportIgnore") + && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - state.error( - "passing a string argument to '%s' requires the 'flakes' experimental feature", fetcher - ).atPos(pos).debugThrow(); + state + .error( + "passing a string argument to '%s' requires the 'flakes' experimental feature", fetcher) + .atPos(pos) + .debugThrow(); input = fetchers::Input::fromURL(state.fetchSettings, url); } } @@ -190,9 +202,11 @@ static void fetchTree( "This is deprecated since such inputs are verifiable but may not be reproducible.", input.to_string()); else - state.error( - "in pure evaluation mode, '%s' doesn't fetch unlocked input '%s'", - fetcher, input.to_string()).atPos(pos).debugThrow(); + state + .error( + "in pure evaluation mode, '%s' doesn't fetch unlocked input '%s'", fetcher, input.to_string()) + .atPos(pos) + .debugThrow(); } state.checkURI(input.toURLString()); @@ -211,9 +225,9 @@ static void fetchTree( emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false); } -static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchTree(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - fetchTree(state, pos, args, v, { }); + fetchTree(state, pos, args, v, {}); } static RegisterPrimOp primop_fetchTree({ @@ -446,7 +460,7 @@ static RegisterPrimOp primop_fetchTree({ .experimentalFeature = Xp::FetchTree, }); -void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v) +void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value ** args, Value & v) { fetchTree(state, pos, args, v, {.isFinal = true}); } @@ -458,8 +472,14 @@ static RegisterPrimOp primop_fetchFinalTree({ .internal = true, }); -static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v, - const std::string & who, bool unpack, std::string name) +static void fetch( + EvalState & state, + const PosIdx pos, + Value ** args, + Value & v, + const std::string & who, + bool unpack, + std::string name) { std::optional url; std::optional expectedHash; @@ -476,19 +496,20 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v if (n == "url") url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch"); else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256); + expectedHash = newHashAllowEmpty( + state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), + HashAlgorithm::SHA256); else if (n == "name") { nameAttrPassed = true; - name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); - } - else - state.error("unsupported argument '%s' to '%s'", n, who) - .atPos(pos).debugThrow(); + name = state.forceStringNoCtx( + *attr.value, attr.pos, "while evaluating the name of the content we should fetch"); + } else + state.error("unsupported argument '%s' to '%s'", n, who).atPos(pos).debugThrow(); } if (!url) - state.error( - "'url' argument required").atPos(pos).debugThrow(); + state.error("'url' argument required").atPos(pos).debugThrow(); } else url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); @@ -504,27 +525,41 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v checkName(name); } catch (BadStorePathName & e) { auto resolution = - nameAttrPassed ? HintFmt("Please change the value for the 'name' attribute passed to '%s', so that it can create a valid store path.", who) : - isArgAttrs ? HintFmt("Please add a valid 'name' attribute to the argument for '%s', so that it can create a valid store path.", who) : - HintFmt("Please pass an attribute set with 'url' and 'name' attributes to '%s', so that it can create a valid store path.", who); + nameAttrPassed + ? HintFmt( + "Please change the value for the 'name' attribute passed to '%s', so that it can create a valid store path.", + who) + : isArgAttrs + ? HintFmt( + "Please add a valid 'name' attribute to the argument for '%s', so that it can create a valid store path.", + who) + : HintFmt( + "Please pass an attribute set with 'url' and 'name' attributes to '%s', so that it can create a valid store path.", + who); - state.error( - std::string("invalid store path name when fetching URL '%s': %s. %s"), *url, Uncolored(e.message()), Uncolored(resolution.str())) - .atPos(pos).debugThrow(); + state + .error( + std::string("invalid store path name when fetching URL '%s': %s. %s"), + *url, + Uncolored(e.message()), + Uncolored(resolution.str())) + .atPos(pos) + .debugThrow(); } if (state.settings.pureEval && !expectedHash) - state.error("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); + state.error("in pure evaluation mode, '%s' requires a 'sha256' argument", who) + .atPos(pos) + .debugThrow(); // early exit if pinned and already in the store if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { auto expectedPath = state.store->makeFixedOutputPath( name, - FixedOutputInfo { + FixedOutputInfo{ .method = unpack ? FileIngestionMethod::NixArchive : FileIngestionMethod::Flat, .hash = *expectedHash, - .references = {} - }); + .references = {}}); if (state.store->isValidPath(expectedPath)) { state.allowAndSetStorePathString(expectedPath, v); @@ -534,35 +569,33 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v // TODO: fetching may fail, yet the path may be substitutable. // https://github.com/NixOS/nix/issues/4313 - auto storePath = - unpack - ? fetchToStore( - state.fetchSettings, - *state.store, - fetchers::downloadTarball(state.store, state.fetchSettings, *url), - FetchMode::Copy, - name) - : fetchers::downloadFile(state.store, state.fetchSettings, *url, name).storePath; + auto storePath = unpack ? fetchToStore( + state.fetchSettings, + *state.store, + fetchers::downloadTarball(state.store, state.fetchSettings, *url), + FetchMode::Copy, + name) + : fetchers::downloadFile(state.store, state.fetchSettings, *url, name).storePath; if (expectedHash) { - auto hash = unpack - ? state.store->queryPathInfo(storePath)->narHash - : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); + auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash + : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) { - state.error( - "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, - expectedHash->to_string(HashFormat::Nix32, true), - hash.to_string(HashFormat::Nix32, true) - ).withExitStatus(102) - .debugThrow(); + state + .error( + "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", + *url, + expectedHash->to_string(HashFormat::Nix32, true), + hash.to_string(HashFormat::Nix32, true)) + .withExitStatus(102) + .debugThrow(); } } state.allowAndSetStorePathString(storePath, v); } -static void prim_fetchurl(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchurl(EvalState & state, const PosIdx pos, Value ** args, Value & v) { fetch(state, pos, args, v, "fetchurl", false, ""); } @@ -588,7 +621,7 @@ static RegisterPrimOp primop_fetchurl({ .fun = prim_fetchurl, }); -static void prim_fetchTarball(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchTarball(EvalState & state, const PosIdx pos, Value ** args, Value & v) { fetch(state, pos, args, v, "fetchTarball", true, "source"); } @@ -638,14 +671,10 @@ static RegisterPrimOp primop_fetchTarball({ .fun = prim_fetchTarball, }); -static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v) +static void prim_fetchGit(EvalState & state, const PosIdx pos, Value ** args, Value & v) { - fetchTree(state, pos, args, v, - FetchTreeParams { - .emptyRevFallback = true, - .allowNameArgument = true, - .isFetchGit = true - }); + fetchTree( + state, pos, args, v, FetchTreeParams{.emptyRevFallback = true, .allowNameArgument = true, .isFetchGit = true}); } static RegisterPrimOp primop_fetchGit({ @@ -858,4 +887,4 @@ static RegisterPrimOp primop_fetchGit({ .fun = prim_fetchGit, }); -} +} // namespace nix diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 2a29e0424..533739592 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -7,7 +7,7 @@ namespace nix { -static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, Value & val) +static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Value & val) { auto toml = state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.fromTOML"); @@ -16,75 +16,75 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V std::function visit; visit = [&](Value & v, toml::value t) { + switch (t.type()) { + case toml::value_t::table: { + auto table = toml::get(t); - switch(t.type()) - { - case toml::value_t::table: - { - auto table = toml::get(t); + size_t size = 0; + for (auto & i : table) { + (void) i; + size++; + } - size_t size = 0; - for (auto & i : table) { (void) i; size++; } + auto attrs = state.buildBindings(size); - auto attrs = state.buildBindings(size); + for (auto & elem : table) { + forceNoNullByte(elem.first); + visit(attrs.alloc(elem.first), elem.second); + } - for(auto & elem : table) { - forceNoNullByte(elem.first); - visit(attrs.alloc(elem.first), elem.second); - } - - v.mkAttrs(attrs); - } - break;; - case toml::value_t::array: - { - auto array = toml::get>(t); - - auto list = state.buildList(array.size()); - for (const auto & [n, v] : enumerate(list)) - visit(*(v = state.allocValue()), array[n]); - v.mkList(list); - } - break;; - case toml::value_t::boolean: - v.mkBool(toml::get(t)); - break;; - case toml::value_t::integer: - v.mkInt(toml::get(t)); - break;; - case toml::value_t::floating: - v.mkFloat(toml::get(t)); - break;; - case toml::value_t::string: - { - auto s = toml::get(t); - forceNoNullByte(s); - v.mkString(s); - } - break;; - case toml::value_t::local_datetime: - case toml::value_t::offset_datetime: - case toml::value_t::local_date: - case toml::value_t::local_time: - { - if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { - auto attrs = state.buildBindings(2); - attrs.alloc("_type").mkString("timestamp"); - std::ostringstream s; - s << t; - auto str = toView(s); - forceNoNullByte(str); - attrs.alloc("value").mkString(str); - v.mkAttrs(attrs); - } else { - throw std::runtime_error("Dates and times are not supported"); - } - } - break;; - case toml::value_t::empty: - v.mkNull(); - break;; + v.mkAttrs(attrs); + } break; + ; + case toml::value_t::array: { + auto array = toml::get>(t); + auto list = state.buildList(array.size()); + for (const auto & [n, v] : enumerate(list)) + visit(*(v = state.allocValue()), array[n]); + v.mkList(list); + } break; + ; + case toml::value_t::boolean: + v.mkBool(toml::get(t)); + break; + ; + case toml::value_t::integer: + v.mkInt(toml::get(t)); + break; + ; + case toml::value_t::floating: + v.mkFloat(toml::get(t)); + break; + ; + case toml::value_t::string: { + auto s = toml::get(t); + forceNoNullByte(s); + v.mkString(s); + } break; + ; + case toml::value_t::local_datetime: + case toml::value_t::offset_datetime: + case toml::value_t::local_date: + case toml::value_t::local_time: { + if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { + auto attrs = state.buildBindings(2); + attrs.alloc("_type").mkString("timestamp"); + std::ostringstream s; + s << t; + auto str = toView(s); + forceNoNullByte(str); + attrs.alloc("value").mkString(str); + v.mkAttrs(attrs); + } else { + throw std::runtime_error("Dates and times are not supported"); + } + } break; + ; + case toml::value_t::empty: + v.mkNull(); + break; + ; } }; @@ -95,10 +95,10 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V } } -static RegisterPrimOp primop_fromTOML({ - .name = "fromTOML", - .args = {"e"}, - .doc = R"( +static RegisterPrimOp primop_fromTOML( + {.name = "fromTOML", + .args = {"e"}, + .doc = R"( Convert a TOML string to a Nix value. For example, ```nix @@ -112,7 +112,6 @@ static RegisterPrimOp primop_fromTOML({ returns the value `{ s = "a"; table = { y = 2; }; x = 1; }`. )", - .fun = prim_fromTOML -}); + .fun = prim_fromTOML}); -} +} // namespace nix diff --git a/src/libexpr/print-ambiguous.cc b/src/libexpr/print-ambiguous.cc index 2a0b009eb..8b80e2a66 100644 --- a/src/libexpr/print-ambiguous.cc +++ b/src/libexpr/print-ambiguous.cc @@ -7,11 +7,7 @@ namespace nix { // See: https://github.com/NixOS/nix/issues/9730 void printAmbiguous( - Value &v, - const SymbolTable &symbols, - std::ostream &str, - std::set *seen, - int depth) + Value & v, const SymbolTable & symbols, std::ostream & str, std::set * seen, int depth) { checkInterrupt(); @@ -100,4 +96,4 @@ void printAmbiguous( } } -} +} // namespace nix diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 1f0c592c1..502f32ea1 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -28,9 +28,7 @@ void printElided( output << ANSI_NORMAL; } - -std::ostream & -printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors) +std::ostream & printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors) { size_t charsPrinted = 0; if (ansiColors) @@ -43,12 +41,18 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max return str; } - if (*i == '\"' || *i == '\\') str << "\\" << *i; - else if (*i == '\n') str << "\\n"; - else if (*i == '\r') str << "\\r"; - else if (*i == '\t') str << "\\t"; - else if (*i == '$' && *(i+1) == '{') str << "\\" << *i; - else str << *i; + if (*i == '\"' || *i == '\\') + str << "\\" << *i; + else if (*i == '\n') + str << "\\n"; + else if (*i == '\r') + str << "\\r"; + else if (*i == '\t') + str << "\\t"; + else if (*i == '$' && *(i + 1) == '{') + str << "\\" << *i; + else + str << *i; charsPrinted++; } str << "\""; @@ -57,14 +61,12 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max return str; } -std::ostream & -printLiteralString(std::ostream & str, const std::string_view string) +std::ostream & printLiteralString(std::ostream & str, const std::string_view string) { return printLiteralString(str, string, std::numeric_limits::max(), false); } -std::ostream & -printLiteralBool(std::ostream & str, bool boolean) +std::ostream & printLiteralBool(std::ostream & str, bool boolean) { str << (boolean ? "true" : "false"); return str; @@ -80,13 +82,12 @@ printLiteralBool(std::ostream & str, bool boolean) bool isReservedKeyword(const std::string_view str) { static const std::unordered_set reservedKeywords = { - "if", "then", "else", "assert", "with", "let", "in", "rec", "inherit" - }; + "if", "then", "else", "assert", "with", "let", "in", "rec", "inherit"}; return reservedKeywords.contains(str); } -std::ostream & -printIdentifier(std::ostream & str, std::string_view s) { +std::ostream & printIdentifier(std::ostream & str, std::string_view s) +{ if (s.empty()) str << "\"\""; else if (isReservedKeyword(s)) @@ -98,10 +99,8 @@ printIdentifier(std::ostream & str, std::string_view s) { return str; } for (auto c : s) - if (!((c >= 'a' && c <= 'z') || - (c >= 'A' && c <= 'Z') || - (c >= '0' && c <= '9') || - c == '_' || c == '\'' || c == '-')) { + if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '\'' + || c == '-')) { printLiteralString(str, s); return str; } @@ -112,21 +111,22 @@ printIdentifier(std::ostream & str, std::string_view s) { static bool isVarName(std::string_view s) { - if (s.size() == 0) return false; - if (isReservedKeyword(s)) return false; + if (s.size() == 0) + return false; + if (isReservedKeyword(s)) + return false; char c = s[0]; - if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false; + if ((c >= '0' && c <= '9') || c == '-' || c == '\'') + return false; for (auto & i : s) - if (!((i >= 'a' && i <= 'z') || - (i >= 'A' && i <= 'Z') || - (i >= '0' && i <= '9') || - i == '_' || i == '-' || i == '\'')) + if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || (i >= '0' && i <= '9') || i == '_' || i == '-' + || i == '\'')) return false; return true; } -std::ostream & -printAttributeName(std::ostream & str, std::string_view name) { +std::ostream & printAttributeName(std::ostream & str, std::string_view name) +{ if (isVarName(name)) str << name; else @@ -134,7 +134,7 @@ printAttributeName(std::ostream & str, std::string_view name) { return str; } -bool isImportantAttrName(const std::string& attrName) +bool isImportantAttrName(const std::string & attrName) { return attrName == "type" || attrName == "_type"; } @@ -144,12 +144,11 @@ typedef std::pair AttrPair; struct ImportantFirstAttrNameCmp { - bool operator()(const AttrPair& lhs, const AttrPair& rhs) const + bool operator()(const AttrPair & lhs, const AttrPair & rhs) const { auto lhsIsImportant = isImportantAttrName(lhs.first); auto rhsIsImportant = isImportantAttrName(rhs.first); - return std::forward_as_tuple(!lhsIsImportant, lhs.first) - < std::forward_as_tuple(!rhsIsImportant, rhs.first); + return std::forward_as_tuple(!lhsIsImportant, lhs.first) < std::forward_as_tuple(!rhsIsImportant, rhs.first); } }; @@ -275,7 +274,8 @@ private: std::optional storePath; if (auto i = v.attrs()->get(state.sDrvPath)) { NixStringContext context; - storePath = state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"); + storePath = + state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"); } /* This unfortunately breaks printing nested values because of @@ -499,10 +499,10 @@ private: output << ANSI_NORMAL; } else if (v.isThunk() || v.isApp()) { if (options.ansiColors) - output << ANSI_MAGENTA; + output << ANSI_MAGENTA; output << "«thunk»"; if (options.ansiColors) - output << ANSI_NORMAL; + output << ANSI_NORMAL; } else { unreachable(); } @@ -593,8 +593,7 @@ private: } } catch (Error & e) { if (options.errors == ErrorPrintBehavior::Throw - || (options.errors == ErrorPrintBehavior::ThrowTopLevel - && depth == 0)) { + || (options.errors == ErrorPrintBehavior::ThrowTopLevel && depth == 0)) { throw; } printError_(e); @@ -603,7 +602,11 @@ private: public: Printer(std::ostream & output, EvalState & state, PrintOptions options) - : output(output), state(state), options(options) { } + : output(output) + , state(state) + , options(options) + { + } void print(Value & v) { @@ -636,8 +639,8 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) template<> HintFmt & HintFmt::operator%(const ValuePrinter & value) { - fmt % value; - return *this; + fmt % value; + return *this; } -} +} // namespace nix diff --git a/src/libexpr/search-path.cc b/src/libexpr/search-path.cc index 76aecd4e5..5912c6129 100644 --- a/src/libexpr/search-path.cc +++ b/src/libexpr/search-path.cc @@ -2,8 +2,7 @@ namespace nix { -std::optional LookupPath::Prefix::suffixIfPotentialMatch( - std::string_view path) const +std::optional LookupPath::Prefix::suffixIfPotentialMatch(std::string_view path) const { auto n = s.size(); @@ -21,29 +20,25 @@ std::optional LookupPath::Prefix::suffixIfPotentialMatch( } /* Skip next path separator. */ - return { - path.substr(needSeparator ? n + 1 : n) - }; + return {path.substr(needSeparator ? n + 1 : n)}; } - LookupPath::Elem LookupPath::Elem::parse(std::string_view rawElem) { size_t pos = rawElem.find('='); - return LookupPath::Elem { - .prefix = Prefix { - .s = pos == std::string::npos - ? std::string { "" } - : std::string { rawElem.substr(0, pos) }, - }, - .path = Path { - .s = std::string { rawElem.substr(pos + 1) }, - }, + return LookupPath::Elem{ + .prefix = + Prefix{ + .s = pos == std::string::npos ? std::string{""} : std::string{rawElem.substr(0, pos)}, + }, + .path = + Path{ + .s = std::string{rawElem.substr(pos + 1)}, + }, }; } - LookupPath LookupPath::parse(const Strings & rawElems) { LookupPath res; @@ -52,4 +47,4 @@ LookupPath LookupPath::parse(const Strings & rawElems) return res; } -} +} // namespace nix diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index a9b51afa0..2578620f3 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -7,107 +7,109 @@ #include #include - namespace nix { using json = nlohmann::json; + // TODO: rename. It doesn't print. -json printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore) +json printValueAsJSON( + EvalState & state, bool strict, Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore) { checkInterrupt(); - if (strict) state.forceValue(v, pos); + if (strict) + state.forceValue(v, pos); json out; switch (v.type()) { - case nInt: - out = v.integer().value; - break; + case nInt: + out = v.integer().value; + break; - case nBool: - out = v.boolean(); - break; + case nBool: + out = v.boolean(); + break; - case nString: - copyContext(v, context); - out = v.c_str(); - break; + case nString: + copyContext(v, context); + out = v.c_str(); + break; - case nPath: - if (copyToStore) - out = state.store->printStorePath( - state.copyPathToStore(context, v.path())); - else - out = v.path().path.abs(); - break; + case nPath: + if (copyToStore) + out = state.store->printStorePath(state.copyPathToStore(context, v.path())); + else + out = v.path().path.abs(); + break; - case nNull: - // already initialized as null - break; + case nNull: + // already initialized as null + break; - case nAttrs: { - auto maybeString = state.tryAttrsToString(pos, v, context, false, false); - if (maybeString) { - out = *maybeString; - break; - } - if (auto i = v.attrs()->get(state.sOutPath)) - return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore); - else { - out = json::object(); - for (auto & a : v.attrs()->lexicographicOrder(state.symbols)) { - try { - out.emplace(state.symbols[a->name], printValueAsJSON(state, strict, *a->value, a->pos, context, copyToStore)); - } catch (Error & e) { - e.addTrace(state.positions[a->pos], - HintFmt("while evaluating attribute '%1%'", state.symbols[a->name])); - throw; - } - } - } + case nAttrs: { + auto maybeString = state.tryAttrsToString(pos, v, context, false, false); + if (maybeString) { + out = *maybeString; break; } - - case nList: { - out = json::array(); - int i = 0; - for (auto elem : v.listView()) { + if (auto i = v.attrs()->get(state.sOutPath)) + return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore); + else { + out = json::object(); + for (auto & a : v.attrs()->lexicographicOrder(state.symbols)) { try { - out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); + out.emplace( + state.symbols[a->name], + printValueAsJSON(state, strict, *a->value, a->pos, context, copyToStore)); } catch (Error & e) { - e.addTrace(state.positions[pos], - HintFmt("while evaluating list element at index %1%", i)); + e.addTrace( + state.positions[a->pos], HintFmt("while evaluating attribute '%1%'", state.symbols[a->name])); throw; } - i++; } - break; } + break; + } - case nExternal: - return v.external()->printValueAsJSON(state, strict, context, copyToStore); - break; + case nList: { + out = json::array(); + int i = 0; + for (auto elem : v.listView()) { + try { + out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); + } catch (Error & e) { + e.addTrace(state.positions[pos], HintFmt("while evaluating list element at index %1%", i)); + throw; + } + i++; + } + break; + } - case nFloat: - out = v.fpoint(); - break; + case nExternal: + return v.external()->printValueAsJSON(state, strict, context, copyToStore); + break; - case nThunk: - case nFunction: - state.error( - "cannot convert %1% to JSON", - showType(v) - ) - .atPos(v.determinePos(pos)) - .debugThrow(); + case nFloat: + out = v.fpoint(); + break; + + case nThunk: + case nFunction: + state.error("cannot convert %1% to JSON", showType(v)).atPos(v.determinePos(pos)).debugThrow(); } return out; } -void printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore) +void printValueAsJSON( + EvalState & state, + bool strict, + Value & v, + const PosIdx pos, + std::ostream & str, + NixStringContext & context, + bool copyToStore) { try { str << printValueAsJSON(state, strict, v, pos, context, copyToStore); @@ -116,12 +118,10 @@ void printValueAsJSON(EvalState & state, bool strict, } } -json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, - NixStringContext & context, bool copyToStore) const +json ExternalValueBase::printValueAsJSON( + EvalState & state, bool strict, NixStringContext & context, bool copyToStore) const { - state.error("cannot convert %1% to JSON", showType()) - .debugThrow(); + state.error("cannot convert %1% to JSON", showType()).debugThrow(); } - -} +} // namespace nix diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 235ef2627..b3b986dae 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -5,10 +5,8 @@ #include - namespace nix { - static XMLAttrs singletonAttrs(const std::string & name, std::string_view value) { XMLAttrs attrs; @@ -16,12 +14,16 @@ static XMLAttrs singletonAttrs(const std::string & name, std::string_view value) return attrs; } - -static void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, +static void printValueAsXML( + EvalState & state, + bool strict, + bool location, + Value & v, + XMLWriter & doc, + NixStringContext & context, + PathSet & drvsSeen, const PosIdx pos); - static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos) { if (auto path = std::get_if(&pos.origin)) @@ -30,142 +32,167 @@ static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos) xmlAttrs["column"] = fmt("%1%", pos.column); } - -static void showAttrs(EvalState & state, bool strict, bool location, - const Bindings & attrs, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen) +static void showAttrs( + EvalState & state, + bool strict, + bool location, + const Bindings & attrs, + XMLWriter & doc, + NixStringContext & context, + PathSet & drvsSeen) { StringSet names; for (auto & a : attrs.lexicographicOrder(state.symbols)) { XMLAttrs xmlAttrs; xmlAttrs["name"] = state.symbols[a->name]; - if (location && a->pos) posToXML(state, xmlAttrs, state.positions[a->pos]); + if (location && a->pos) + posToXML(state, xmlAttrs, state.positions[a->pos]); XMLOpenElement _(doc, "attr", xmlAttrs); - printValueAsXML(state, strict, location, - *a->value, doc, context, drvsSeen, a->pos); + printValueAsXML(state, strict, location, *a->value, doc, context, drvsSeen, a->pos); } } - -static void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, +static void printValueAsXML( + EvalState & state, + bool strict, + bool location, + Value & v, + XMLWriter & doc, + NixStringContext & context, + PathSet & drvsSeen, const PosIdx pos) { checkInterrupt(); - if (strict) state.forceValue(v, pos); + if (strict) + state.forceValue(v, pos); switch (v.type()) { - case nInt: - doc.writeEmptyElement("int", singletonAttrs("value", fmt("%1%", v.integer()))); - break; + case nInt: + doc.writeEmptyElement("int", singletonAttrs("value", fmt("%1%", v.integer()))); + break; - case nBool: - doc.writeEmptyElement("bool", singletonAttrs("value", v.boolean() ? "true" : "false")); - break; + case nBool: + doc.writeEmptyElement("bool", singletonAttrs("value", v.boolean() ? "true" : "false")); + break; - case nString: - /* !!! show the context? */ - copyContext(v, context); - doc.writeEmptyElement("string", singletonAttrs("value", v.c_str())); - break; + case nString: + /* !!! show the context? */ + copyContext(v, context); + doc.writeEmptyElement("string", singletonAttrs("value", v.c_str())); + break; - case nPath: - doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string())); - break; + case nPath: + doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string())); + break; - case nNull: - doc.writeEmptyElement("null"); - break; + case nNull: + doc.writeEmptyElement("null"); + break; - case nAttrs: - if (state.isDerivation(v)) { - XMLAttrs xmlAttrs; - - Path drvPath; - if (auto a = v.attrs()->get(state.sDrvPath)) { - if (strict) state.forceValue(*a->value, a->pos); - if (a->value->type() == nString) - xmlAttrs["drvPath"] = drvPath = a->value->c_str(); - } - - if (auto a = v.attrs()->get(state.sOutPath)) { - if (strict) state.forceValue(*a->value, a->pos); - if (a->value->type() == nString) - xmlAttrs["outPath"] = a->value->c_str(); - } - - XMLOpenElement _(doc, "derivation", xmlAttrs); - - if (drvPath != "" && drvsSeen.insert(drvPath).second) - showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen); - else - doc.writeEmptyElement("repeated"); - } - - else { - XMLOpenElement _(doc, "attrs"); - showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen); - } - - break; - - case nList: { - XMLOpenElement _(doc, "list"); - for (auto v2 : v.listView()) - printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos); - break; - } - - case nFunction: { - if (!v.isLambda()) { - // FIXME: Serialize primops and primopapps - doc.writeEmptyElement("unevaluated"); - break; - } + case nAttrs: + if (state.isDerivation(v)) { XMLAttrs xmlAttrs; - if (location) posToXML(state, xmlAttrs, state.positions[v.lambda().fun->pos]); - XMLOpenElement _(doc, "function", xmlAttrs); - if (v.lambda().fun->hasFormals()) { - XMLAttrs attrs; - if (v.lambda().fun->arg) attrs["name"] = state.symbols[v.lambda().fun->arg]; - if (v.lambda().fun->formals->ellipsis) attrs["ellipsis"] = "1"; - XMLOpenElement _(doc, "attrspat", attrs); - for (auto & i : v.lambda().fun->formals->lexicographicOrder(state.symbols)) - doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name])); - } else - doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.lambda().fun->arg])); + Path drvPath; + if (auto a = v.attrs()->get(state.sDrvPath)) { + if (strict) + state.forceValue(*a->value, a->pos); + if (a->value->type() == nString) + xmlAttrs["drvPath"] = drvPath = a->value->c_str(); + } - break; + if (auto a = v.attrs()->get(state.sOutPath)) { + if (strict) + state.forceValue(*a->value, a->pos); + if (a->value->type() == nString) + xmlAttrs["outPath"] = a->value->c_str(); + } + + XMLOpenElement _(doc, "derivation", xmlAttrs); + + if (drvPath != "" && drvsSeen.insert(drvPath).second) + showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen); + else + doc.writeEmptyElement("repeated"); } - case nExternal: - v.external()->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos); - break; + else { + XMLOpenElement _(doc, "attrs"); + showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen); + } - case nFloat: - doc.writeEmptyElement("float", singletonAttrs("value", fmt("%1%", v.fpoint()))); - break; + break; - case nThunk: + case nList: { + XMLOpenElement _(doc, "list"); + for (auto v2 : v.listView()) + printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos); + break; + } + + case nFunction: { + if (!v.isLambda()) { + // FIXME: Serialize primops and primopapps doc.writeEmptyElement("unevaluated"); + break; + } + XMLAttrs xmlAttrs; + if (location) + posToXML(state, xmlAttrs, state.positions[v.lambda().fun->pos]); + XMLOpenElement _(doc, "function", xmlAttrs); + + if (v.lambda().fun->hasFormals()) { + XMLAttrs attrs; + if (v.lambda().fun->arg) + attrs["name"] = state.symbols[v.lambda().fun->arg]; + if (v.lambda().fun->formals->ellipsis) + attrs["ellipsis"] = "1"; + XMLOpenElement _(doc, "attrspat", attrs); + for (auto & i : v.lambda().fun->formals->lexicographicOrder(state.symbols)) + doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name])); + } else + doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.lambda().fun->arg])); + + break; + } + + case nExternal: + v.external()->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos); + break; + + case nFloat: + doc.writeEmptyElement("float", singletonAttrs("value", fmt("%1%", v.fpoint()))); + break; + + case nThunk: + doc.writeEmptyElement("unevaluated"); } } - -void ExternalValueBase::printValueAsXML(EvalState & state, bool strict, - bool location, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, +void ExternalValueBase::printValueAsXML( + EvalState & state, + bool strict, + bool location, + XMLWriter & doc, + NixStringContext & context, + PathSet & drvsSeen, const PosIdx pos) const { doc.writeEmptyElement("unevaluated"); } - -void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos) +void printValueAsXML( + EvalState & state, + bool strict, + bool location, + Value & v, + std::ostream & out, + NixStringContext & context, + const PosIdx pos) { XMLWriter doc(true, out); XMLOpenElement root(doc, "expr"); @@ -173,5 +200,4 @@ void printValueAsXML(EvalState & state, bool strict, bool location, printValueAsXML(state, strict, location, v, doc, context, drvsSeen, pos); } - -} +} // namespace nix diff --git a/src/libexpr/value/context.cc b/src/libexpr/value/context.cc index 40d08da59..6eb313211 100644 --- a/src/libexpr/value/context.cc +++ b/src/libexpr/value/context.cc @@ -5,9 +5,7 @@ namespace nix { -NixStringContextElem NixStringContextElem::parse( - std::string_view s0, - const ExperimentalFeatureSettings & xpSettings) +NixStringContextElem NixStringContextElem::parse(std::string_view s0, const ExperimentalFeatureSettings & xpSettings) { std::string_view s = s0; @@ -16,16 +14,16 @@ NixStringContextElem NixStringContextElem::parse( // Case on whether there is a '!' size_t index = s.find("!"); if (index == std::string_view::npos) { - return SingleDerivedPath::Opaque { - .path = StorePath { s }, + return SingleDerivedPath::Opaque{ + .path = StorePath{s}, }; } else { - std::string output { s.substr(0, index) }; + std::string output{s.substr(0, index)}; // Advance string to parse after the '!' s = s.substr(index + 1); auto drv = make_ref(parseRest()); drvRequireExperiment(*drv, xpSettings); - return SingleDerivedPath::Built { + return SingleDerivedPath::Built{ .drvPath = std::move(drv), .output = std::move(output), }; @@ -33,8 +31,7 @@ NixStringContextElem NixStringContextElem::parse( }; if (s.size() == 0) { - throw BadNixStringContextElem(s0, - "String context element should never be an empty string"); + throw BadNixStringContextElem(s0, "String context element should never be an empty string"); } switch (s.at(0)) { @@ -44,28 +41,23 @@ NixStringContextElem NixStringContextElem::parse( // Find *second* '!' if (s.find("!") == std::string_view::npos) { - throw BadNixStringContextElem(s0, - "String content element beginning with '!' should have a second '!'"); + throw BadNixStringContextElem(s0, "String content element beginning with '!' should have a second '!'"); } - return std::visit( - [&](auto x) -> NixStringContextElem { return std::move(x); }, - parseRest()); + return std::visit([&](auto x) -> NixStringContextElem { return std::move(x); }, parseRest()); } case '=': { - return NixStringContextElem::DrvDeep { - .drvPath = StorePath { s.substr(1) }, + return NixStringContextElem::DrvDeep{ + .drvPath = StorePath{s.substr(1)}, }; } default: { // Ensure no '!' if (s.find("!") != std::string_view::npos) { - throw BadNixStringContextElem(s0, - "String content element not beginning with '!' should not have a second '!'"); + throw BadNixStringContextElem( + s0, "String content element not beginning with '!' should not have a second '!'"); } - return std::visit( - [&](auto x) -> NixStringContextElem { return std::move(x); }, - parseRest()); + return std::visit([&](auto x) -> NixStringContextElem { return std::move(x); }, parseRest()); } } } @@ -76,33 +68,33 @@ std::string NixStringContextElem::to_string() const std::function toStringRest; toStringRest = [&](auto & p) { - std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) { - res += o.path.to_string(); + std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) { res += o.path.to_string(); }, + [&](const SingleDerivedPath::Built & o) { + res += o.output; + res += '!'; + toStringRest(*o.drvPath); + }, }, - [&](const SingleDerivedPath::Built & o) { - res += o.output; - res += '!'; - toStringRest(*o.drvPath); - }, - }, p.raw()); + p.raw()); }; - std::visit(overloaded { - [&](const NixStringContextElem::Built & b) { - res += '!'; - toStringRest(b); + std::visit( + overloaded{ + [&](const NixStringContextElem::Built & b) { + res += '!'; + toStringRest(b); + }, + [&](const NixStringContextElem::Opaque & o) { toStringRest(o); }, + [&](const NixStringContextElem::DrvDeep & d) { + res += '='; + res += d.drvPath.to_string(); + }, }, - [&](const NixStringContextElem::Opaque & o) { - toStringRest(o); - }, - [&](const NixStringContextElem::DrvDeep & d) { - res += '='; - res += d.drvPath.to_string(); - }, - }, raw); + raw); return res; } -} +} // namespace nix diff --git a/src/libfetchers-tests/access-tokens.cc b/src/libfetchers-tests/access-tokens.cc index 93043ba3e..7127434db 100644 --- a/src/libfetchers-tests/access-tokens.cc +++ b/src/libfetchers-tests/access-tokens.cc @@ -19,6 +19,7 @@ public: { experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes); } + void TearDown() override {} }; @@ -98,4 +99,4 @@ TEST_F(AccessKeysTest, multipleSourceHut) ASSERT_EQ(token, "token"); } -} +} // namespace nix::fetchers diff --git a/src/libfetchers-tests/public-key.cc b/src/libfetchers-tests/public-key.cc index 39a7cf4bd..97a232447 100644 --- a/src/libfetchers-tests/public-key.cc +++ b/src/libfetchers-tests/public-key.cc @@ -13,42 +13,44 @@ class PublicKeyTest : public CharacterizationTest std::filesystem::path unitTestData = getUnitTestData() / "public-key"; public: - std::filesystem::path goldenMaster(std::string_view testStem) const override { + std::filesystem::path goldenMaster(std::string_view testStem) const override + { return unitTestData / testStem; } }; -#define TEST_JSON(FIXTURE, NAME, VAL) \ - TEST_F(FIXTURE, PublicKey_ ## NAME ## _from_json) { \ - readTest(#NAME ".json", [&](const auto & encoded_) { \ - fetchers::PublicKey expected { VAL }; \ - fetchers::PublicKey got = nlohmann::json::parse(encoded_); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, PublicKey_ ## NAME ## _to_json) { \ - writeTest(#NAME ".json", [&]() -> json { \ - return nlohmann::json(fetchers::PublicKey { VAL }); \ - }, [](const auto & file) { \ - return json::parse(readFile(file)); \ - }, [](const auto & file, const auto & got) { \ - return writeFile(file, got.dump(2) + "\n"); \ - }); \ +#define TEST_JSON(FIXTURE, NAME, VAL) \ + TEST_F(FIXTURE, PublicKey_##NAME##_from_json) \ + { \ + readTest(#NAME ".json", [&](const auto & encoded_) { \ + fetchers::PublicKey expected{VAL}; \ + fetchers::PublicKey got = nlohmann::json::parse(encoded_); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(FIXTURE, PublicKey_##NAME##_to_json) \ + { \ + writeTest( \ + #NAME ".json", \ + [&]() -> json { return nlohmann::json(fetchers::PublicKey{VAL}); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } -TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey { .type = "ssh-rsa", .key = "ABCDE" })) +TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey{.type = "ssh-rsa", .key = "ABCDE"})) -TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey { .key = "ABCDE" }) +TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey{.key = "ABCDE"}) #undef TEST_JSON -TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json) { +TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json) +{ readTest("noRoundTrip.json", [&](const auto & encoded_) { - fetchers::PublicKey expected = { .type = "ssh-ed25519", .key = "ABCDE" }; + fetchers::PublicKey expected = {.type = "ssh-ed25519", .key = "ABCDE"}; fetchers::PublicKey got = nlohmann::json::parse(encoded_); ASSERT_EQ(got, expected); }); } -} +} // namespace nix diff --git a/src/libfetchers/attrs.cc b/src/libfetchers/attrs.cc index 6808e8af1..841808bd1 100644 --- a/src/libfetchers/attrs.cc +++ b/src/libfetchers/attrs.cc @@ -15,7 +15,7 @@ Attrs jsonToAttrs(const nlohmann::json & json) else if (i.value().is_string()) attrs.emplace(i.key(), i.value().get()); else if (i.value().is_boolean()) - attrs.emplace(i.key(), Explicit { i.value().get() }); + attrs.emplace(i.key(), Explicit{i.value().get()}); else throw Error("unsupported input attribute type in lock file"); } @@ -33,7 +33,8 @@ nlohmann::json attrsToJSON(const Attrs & attrs) json[attr.first] = *v; } else if (auto v = std::get_if>(&attr.second)) { json[attr.first] = v->t; - } else unreachable(); + } else + unreachable(); } return json; } @@ -41,7 +42,8 @@ nlohmann::json attrsToJSON(const Attrs & attrs) std::optional maybeGetStrAttr(const Attrs & attrs, const std::string & name) { auto i = attrs.find(name); - if (i == attrs.end()) return {}; + if (i == attrs.end()) + return {}; if (auto v = std::get_if(&i->second)) return *v; throw Error("input attribute '%s' is not a string %s", name, attrsToJSON(attrs).dump()); @@ -58,7 +60,8 @@ std::string getStrAttr(const Attrs & attrs, const std::string & name) std::optional maybeGetIntAttr(const Attrs & attrs, const std::string & name) { auto i = attrs.find(name); - if (i == attrs.end()) return {}; + if (i == attrs.end()) + return {}; if (auto v = std::get_if(&i->second)) return *v; throw Error("input attribute '%s' is not an integer", name); @@ -75,7 +78,8 @@ uint64_t getIntAttr(const Attrs & attrs, const std::string & name) std::optional maybeGetBoolAttr(const Attrs & attrs, const std::string & name) { auto i = attrs.find(name); - if (i == attrs.end()) return {}; + if (i == attrs.end()) + return {}; if (auto v = std::get_if>(&i->second)) return v->t; throw Error("input attribute '%s' is not a Boolean", name); @@ -99,7 +103,8 @@ StringMap attrsToQuery(const Attrs & attrs) query.insert_or_assign(attr.first, *v); } else if (auto v = std::get_if>(&attr.second)) { query.insert_or_assign(attr.first, v->t ? "1" : "0"); - } else unreachable(); + } else + unreachable(); } return query; } @@ -109,4 +114,4 @@ Hash getRevAttr(const Attrs & attrs, const std::string & name) return Hash::parseAny(getStrAttr(attrs, name), HashAlgorithm::SHA1); } -} +} // namespace nix::fetchers diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 9a2531ba5..85fd94590 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -44,46 +44,37 @@ struct CacheImpl : Cache state->db.isCache(); state->db.exec(schema); - state->upsert.create(state->db, - "insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)"); + state->upsert.create( + state->db, "insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)"); - state->lookup.create(state->db, - "select value, timestamp from Cache where domain = ? and key = ?"); + state->lookup.create(state->db, "select value, timestamp from Cache where domain = ? and key = ?"); } - void upsert( - const Key & key, - const Attrs & value) override + void upsert(const Key & key, const Attrs & value) override { - _state.lock()->upsert.use() - (key.first) - (attrsToJSON(key.second).dump()) - (attrsToJSON(value).dump()) - (time(0)).exec(); + _state.lock() + ->upsert.use()(key.first)(attrsToJSON(key.second).dump())(attrsToJSON(value).dump())(time(0)) + .exec(); } - std::optional lookup( - const Key & key) override + std::optional lookup(const Key & key) override { if (auto res = lookupExpired(key)) return std::move(res->value); return {}; } - std::optional lookupWithTTL( - const Key & key) override + std::optional lookupWithTTL(const Key & key) override { if (auto res = lookupExpired(key)) { if (!res->expired) return std::move(res->value); - debug("ignoring expired cache entry '%s:%s'", - key.first, attrsToJSON(key.second).dump()); + debug("ignoring expired cache entry '%s:%s'", key.first, attrsToJSON(key.second).dump()); } return {}; } - std::optional lookupExpired( - const Key & key) override + std::optional lookupExpired(const Key & key) override { auto state(_state.lock()); @@ -100,17 +91,13 @@ struct CacheImpl : Cache debug("using cache entry '%s:%s' -> '%s'", key.first, keyJSON, valueJSON); - return Result { + return Result{ .expired = settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0), .value = jsonToAttrs(nlohmann::json::parse(valueJSON)), }; } - void upsert( - Key key, - Store & store, - Attrs value, - const StorePath & storePath) override + void upsert(Key key, Store & store, Attrs value, const StorePath & storePath) override { /* Add the store prefix to the cache key to handle multiple store prefixes. */ @@ -121,14 +108,13 @@ struct CacheImpl : Cache upsert(key, value); } - std::optional lookupStorePath( - Key key, - Store & store) override + std::optional lookupStorePath(Key key, Store & store) override { key.second.insert_or_assign("store", store.storeDir); auto res = lookupExpired(key); - if (!res) return std::nullopt; + if (!res) + return std::nullopt; auto storePathS = getStrAttr(res->value, "storePath"); res->value.erase("storePath"); @@ -138,14 +124,16 @@ struct CacheImpl : Cache store.addTempRoot(res2.storePath); if (!store.isValidPath(res2.storePath)) { // FIXME: we could try to substitute 'storePath'. - debug("ignoring disappeared cache entry '%s:%s' -> '%s'", + debug( + "ignoring disappeared cache entry '%s:%s' -> '%s'", key.first, attrsToJSON(key.second).dump(), store.printStorePath(res2.storePath)); return std::nullopt; } - debug("using cache entry '%s:%s' -> '%s', '%s'", + debug( + "using cache entry '%s:%s' -> '%s', '%s'", key.first, attrsToJSON(key.second).dump(), attrsToJSON(res2.value).dump(), @@ -154,9 +142,7 @@ struct CacheImpl : Cache return res2; } - std::optional lookupStorePathWithTTL( - Key key, - Store & store) override + std::optional lookupStorePathWithTTL(Key key, Store & store) override { auto res = lookupStorePath(std::move(key), store); return res && !res->expired ? res : std::nullopt; @@ -171,4 +157,4 @@ ref Settings::getCache() const return ref(*cache); } -} +} // namespace nix::fetchers diff --git a/src/libfetchers/fetch-settings.cc b/src/libfetchers/fetch-settings.cc index 4b4e4e29d..f92b94a0b 100644 --- a/src/libfetchers/fetch-settings.cc +++ b/src/libfetchers/fetch-settings.cc @@ -2,8 +2,6 @@ namespace nix::fetchers { -Settings::Settings() -{ -} +Settings::Settings() {} -} +} // namespace nix::fetchers diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc index f7ab32322..6ce78e115 100644 --- a/src/libfetchers/fetch-to-store.cc +++ b/src/libfetchers/fetch-to-store.cc @@ -5,18 +5,11 @@ namespace nix { fetchers::Cache::Key makeFetchToStoreCacheKey( - const std::string & name, - const std::string & fingerprint, - ContentAddressMethod method, - const std::string & path) + const std::string & name, const std::string & fingerprint, ContentAddressMethod method, const std::string & path) { - return fetchers::Cache::Key{"fetchToStore", { - {"name", name}, - {"fingerprint", fingerprint}, - {"method", std::string{method.render()}}, - {"path", path} - }}; - + return fetchers::Cache::Key{ + "fetchToStore", + {{"name", name}, {"fingerprint", fingerprint}, {"method", std::string{method.render()}}, {"path", path}}}; } StorePath fetchToStore( @@ -43,17 +36,17 @@ StorePath fetchToStore( } else debug("source path '%s' is uncacheable", path); - Activity act(*logger, lvlChatty, actUnknown, + Activity act( + *logger, + lvlChatty, + actUnknown, fmt(mode == FetchMode::DryRun ? "hashing '%s'" : "copying '%s' to the store", path)); auto filter2 = filter ? *filter : defaultPathFilter; - auto storePath = - mode == FetchMode::DryRun - ? store.computeStorePath( - name, path, method, HashAlgorithm::SHA256, {}, filter2).first - : store.addToStore( - name, path, method, HashAlgorithm::SHA256, {}, filter2, repair); + auto storePath = mode == FetchMode::DryRun + ? store.computeStorePath(name, path, method, HashAlgorithm::SHA256, {}, filter2).first + : store.addToStore(name, path, method, HashAlgorithm::SHA256, {}, filter2, repair); debug(mode == FetchMode::DryRun ? "hashed '%s'" : "copied '%s' to '%s'", path, store.printStorePath(storePath)); @@ -63,4 +56,4 @@ StorePath fetchToStore( return storePath; } -} +} // namespace nix diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 9cb896601..54013bf55 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -39,9 +39,7 @@ nlohmann::json dumpRegisterInputSchemeInfo() return res; } -Input Input::fromURL( - const Settings & settings, - const std::string & url, bool requireTree) +Input Input::fromURL(const Settings & settings, const std::string & url, bool requireTree) { return fromURL(settings, parseURL(url), requireTree); } @@ -55,9 +53,7 @@ static void fixupInput(Input & input) input.getLastModified(); } -Input Input::fromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) +Input Input::fromURL(const Settings & settings, const ParsedURL & url, bool requireTree) { for (auto & [_, inputScheme] : inputSchemes()) { auto res = inputScheme->inputFromURL(settings, url, requireTree); @@ -86,7 +82,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) // but not all of them. Doing this is to support those other // operations which are supposed to be robust on // unknown/uninterpretable inputs. - Input input { settings }; + Input input{settings}; input.attrs = attrs; fixupInput(input); return input; @@ -97,7 +93,8 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) i ? *i : nullptr; }); - if (!inputScheme) return raw(); + if (!inputScheme) + return raw(); experimentalFeatureSettings.require(inputScheme->experimentalFeature()); @@ -108,7 +105,8 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName); auto res = inputScheme->inputFromAttrs(settings, attrs); - if (!res) return raw(); + if (!res) + return raw(); res->scheme = inputScheme; fixupInput(*res); return std::move(*res); @@ -116,9 +114,11 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) std::optional Input::getFingerprint(ref store) const { - if (!scheme) return std::nullopt; + if (!scheme) + return std::nullopt; - if (cachedFingerprint) return *cachedFingerprint; + if (cachedFingerprint) + return *cachedFingerprint; auto fingerprint = scheme->getFingerprint(store, *this); @@ -173,18 +173,20 @@ Attrs Input::toAttrs() const return attrs; } -bool Input::operator ==(const Input & other) const noexcept +bool Input::operator==(const Input & other) const noexcept { return attrs == other.attrs; } bool Input::contains(const Input & other) const { - if (*this == other) return true; + if (*this == other) + return true; auto other2(other); other2.attrs.erase("ref"); other2.attrs.erase("rev"); - if (*this == other2) return true; + if (*this == other2) + return true; return false; } @@ -198,7 +200,8 @@ std::pair Input::fetchToStore(ref store) const try { auto [accessor, result] = getAccessorUnchecked(store); - auto storePath = nix::fetchToStore(*settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName()); + auto storePath = + nix::fetchToStore(*settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName()); auto narHash = store->queryPathInfo(storePath)->narHash; result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); @@ -237,7 +240,8 @@ void Input::checkLocks(Input specified, Input & result) for (auto & field : specified.attrs) { auto field2 = result.attrs.find(field.first); if (field2 != result.attrs.end() && field.second != field2->second) - throw Error("mismatch in field '%s' of input '%s', got '%s'", + throw Error( + "mismatch in field '%s' of input '%s', got '%s'", field.first, attrsToJSON(specified.attrs), attrsToJSON(result.attrs)); @@ -251,30 +255,38 @@ void Input::checkLocks(Input specified, Input & result) if (auto prevNarHash = specified.getNarHash()) { if (result.getNarHash() != prevNarHash) { if (result.getNarHash()) - throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'", - specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), result.getNarHash()->to_string(HashFormat::SRI, true)); + throw Error( + (unsigned int) 102, + "NAR hash mismatch in input '%s', expected '%s' but got '%s'", + specified.to_string(), + prevNarHash->to_string(HashFormat::SRI, true), + result.getNarHash()->to_string(HashFormat::SRI, true)); else - throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none", - specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true)); + throw Error( + (unsigned int) 102, + "NAR hash mismatch in input '%s', expected '%s' but got none", + specified.to_string(), + prevNarHash->to_string(HashFormat::SRI, true)); } } if (auto prevLastModified = specified.getLastModified()) { if (result.getLastModified() != prevLastModified) - throw Error("'lastModified' attribute mismatch in input '%s', expected %d, got %d", - result.to_string(), *prevLastModified, result.getLastModified().value_or(-1)); + throw Error( + "'lastModified' attribute mismatch in input '%s', expected %d, got %d", + result.to_string(), + *prevLastModified, + result.getLastModified().value_or(-1)); } if (auto prevRev = specified.getRev()) { if (result.getRev() != prevRev) - throw Error("'rev' attribute mismatch in input '%s', expected %s", - result.to_string(), prevRev->gitRev()); + throw Error("'rev' attribute mismatch in input '%s', expected %s", result.to_string(), prevRev->gitRev()); } if (auto prevRevCount = specified.getRevCount()) { if (result.getRevCount() != prevRevCount) - throw Error("'revCount' attribute mismatch in input '%s', expected %d", - result.to_string(), *prevRevCount); + throw Error("'revCount' attribute mismatch in input '%s', expected %d", result.to_string(), *prevRevCount); } } @@ -318,8 +330,7 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto store->ensurePath(storePath); - debug("using substituted/cached input '%s' in '%s'", - to_string(), store->printStorePath(storePath)); + debug("using substituted/cached input '%s' in '%s'", to_string(), store->printStorePath(storePath)); auto accessor = makeStorePathAccessor(store, storePath); @@ -341,11 +352,10 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto return {accessor, std::move(result)}; } -Input Input::applyOverrides( - std::optional ref, - std::optional rev) const +Input Input::applyOverrides(std::optional ref, std::optional rev) const { - if (!scheme) return *this; + if (!scheme) + return *this; return scheme->applyOverrides(*this, ref, rev); } @@ -361,10 +371,7 @@ std::optional Input::getSourcePath() const return scheme->getSourcePath(*this); } -void Input::putFile( - const CanonPath & path, - std::string_view contents, - std::optional commitMsg) const +void Input::putFile(const CanonPath & path, std::string_view contents, std::optional commitMsg) const { assert(scheme); return scheme->putFile(*this, path, contents, commitMsg); @@ -380,11 +387,13 @@ StorePath Input::computeStorePath(Store & store) const auto narHash = getNarHash(); if (!narHash) throw Error("cannot compute store path for unlocked input '%s'", to_string()); - return store.makeFixedOutputPath(getName(), FixedOutputInfo { - .method = FileIngestionMethod::NixArchive, - .hash = *narHash, - .references = {}, - }); + return store.makeFixedOutputPath( + getName(), + FixedOutputInfo{ + .method = FileIngestionMethod::NixArchive, + .hash = *narHash, + .references = {}, + }); } std::string Input::getType() const @@ -417,7 +426,7 @@ std::optional Input::getRev() const if (auto s = maybeGetStrAttr(attrs, "rev")) { try { hash = Hash::parseAnyPrefixed(*s); - } catch (BadHash &e) { + } catch (BadHash & e) { // Default to sha1 for backwards compatibility with existing // usages (e.g. `builtins.fetchTree` calls or flake inputs). hash = Hash::parseAny(*s, HashAlgorithm::SHA1); @@ -446,10 +455,7 @@ ParsedURL InputScheme::toURL(const Input & input) const throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs)); } -Input InputScheme::applyOverrides( - const Input & input, - std::optional ref, - std::optional rev) const +Input InputScheme::applyOverrides(const Input & input, std::optional ref, std::optional rev) const { if (ref) throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref); @@ -464,10 +470,7 @@ std::optional InputScheme::getSourcePath(const Input & in } void InputScheme::putFile( - const Input & input, - const CanonPath & path, - std::string_view contents, - std::optional commitMsg) const + const Input & input, const CanonPath & path, std::string_view contents, std::optional commitMsg) const { throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path); } @@ -482,12 +485,12 @@ std::optional InputScheme::experimentalFeature() const return {}; } -std::string publicKeys_to_string(const std::vector& publicKeys) +std::string publicKeys_to_string(const std::vector & publicKeys) { return ((nlohmann::json) publicKeys).dump(); } -} +} // namespace nix::fetchers namespace nlohmann { @@ -497,7 +500,7 @@ using namespace nix; fetchers::PublicKey adl_serializer::from_json(const json & json) { - fetchers::PublicKey res = { }; + fetchers::PublicKey res = {}; if (auto type = optionalValueAt(json, "type")) res.type = getString(*type); @@ -514,4 +517,4 @@ void adl_serializer::to_json(json & json, fetchers::PublicK #endif -} +} // namespace nlohmann diff --git a/src/libfetchers/filtering-source-accessor.cc b/src/libfetchers/filtering-source-accessor.cc index 72a3fb4eb..17f224ad2 100644 --- a/src/libfetchers/filtering-source-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -50,9 +50,8 @@ std::string FilteringSourceAccessor::showPath(const CanonPath & path) void FilteringSourceAccessor::checkAccess(const CanonPath & path) { if (!isAllowed(path)) - throw makeNotAllowedError - ? makeNotAllowedError(path) - : RestrictedPathError("access to path '%s' is forbidden", showPath(path)); + throw makeNotAllowedError ? makeNotAllowedError(path) + : RestrictedPathError("access to path '%s' is forbidden", showPath(path)); } struct AllowListSourceAccessorImpl : AllowListSourceAccessor @@ -68,13 +67,12 @@ struct AllowListSourceAccessorImpl : AllowListSourceAccessor : AllowListSourceAccessor(SourcePath(next), std::move(makeNotAllowedError)) , allowedPrefixes(std::move(allowedPrefixes)) , allowedPaths(std::move(allowedPaths)) - { } + { + } bool isAllowed(const CanonPath & path) override { - return - allowedPaths.contains(path) - || path.isAllowed(allowedPrefixes); + return allowedPaths.contains(path) || path.isAllowed(allowedPrefixes); } void allowPrefix(CanonPath prefix) override @@ -90,19 +88,17 @@ ref AllowListSourceAccessor::create( MakeNotAllowedError && makeNotAllowedError) { return make_ref( - next, - std::move(allowedPrefixes), - std::move(allowedPaths), - std::move(makeNotAllowedError)); + next, std::move(allowedPrefixes), std::move(allowedPaths), std::move(makeNotAllowedError)); } bool CachingFilteringSourceAccessor::isAllowed(const CanonPath & path) { auto i = cache.find(path); - if (i != cache.end()) return i->second; + if (i != cache.end()) + return i->second; auto res = isAllowedUncached(path); cache.emplace(path, res); return res; } -} +} // namespace nix diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index d76f6879d..f45360f71 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -37,23 +37,24 @@ namespace std { -template<> struct hash +template<> +struct hash { size_t operator()(const git_oid & oid) const { - return * (size_t *) oid.id; + return *(size_t *) oid.id; } }; -} +} // namespace std -std::ostream & operator << (std::ostream & str, const git_oid & oid) +std::ostream & operator<<(std::ostream & str, const git_oid & oid) { str << git_oid_tostr_s(&oid); return str; } -bool operator == (const git_oid & oid1, const git_oid & oid2) +bool operator==(const git_oid & oid1, const git_oid & oid2) { return git_oid_equal(&oid1, &oid2); } @@ -81,9 +82,9 @@ typedef std::unique_ptr> Indexer; Hash toHash(const git_oid & oid) { - #ifdef GIT_EXPERIMENTAL_SHA256 +#ifdef GIT_EXPERIMENTAL_SHA256 assert(oid.type == GIT_OID_SHA1); - #endif +#endif Hash hash(HashAlgorithm::SHA1); memcpy(hash.hash, oid.id, hash.hashSize); return hash; @@ -117,7 +118,7 @@ template T peelObject(git_object * obj, git_object_t type) { T obj2; - if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) { + if (git_object_peel((git_object **) (typename T::pointer *) Setter(obj2), obj, type)) { auto err = git_error_last(); throw Error("peeling Git object '%s': %s", *git_object_id(obj), err->message); } @@ -128,7 +129,7 @@ template T dupObject(typename T::pointer obj) { T obj2; - if (git_object_dup((git_object * *) (typename T::pointer *) Setter(obj2), (git_object *) obj)) + if (git_object_dup((git_object **) (typename T::pointer *) Setter(obj2), (git_object *) obj)) throw Error("duplicating object '%s': %s", *git_object_id((git_object *) obj), git_error_last()->message); return obj2; } @@ -147,21 +148,22 @@ static Object peelToTreeOrBlob(git_object * obj) return peelObject(obj, GIT_OBJECT_TREE); } -struct PackBuilderContext { +struct PackBuilderContext +{ std::exception_ptr exception; void handleException(const char * activity, int errCode) { switch (errCode) { - case GIT_OK: - break; - case GIT_EUSER: - if (!exception) - panic("PackBuilderContext::handleException: user error, but exception was not set"); + case GIT_OK: + break; + case GIT_EUSER: + if (!exception) + panic("PackBuilderContext::handleException: user error, but exception was not set"); - std::rethrow_exception(exception); - default: - throw Error("%s: %i, %s", Uncolored(activity), errCode, git_error_last()->message); + std::rethrow_exception(exception); + default: + throw Error("%s: %i, %s", Uncolored(activity), errCode, git_error_last()->message); } } }; @@ -171,9 +173,9 @@ extern "C" { /** * A `git_packbuilder_progress` implementation that aborts the pack building if needed. */ -static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32_t total, void *payload) +static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32_t total, void * payload) { - PackBuilderContext & args = * (PackBuilderContext *) payload; + PackBuilderContext & args = *(PackBuilderContext *) payload; try { checkInterrupt(); return GIT_OK; @@ -182,15 +184,17 @@ static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32 return GIT_EUSER; } }; + static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuilderProgressCheckInterrupt; } // extern "C" -static void initRepoAtomically(std::filesystem::path &path, bool bare) +static void initRepoAtomically(std::filesystem::path & path, bool bare) { - if (pathExists(path.string())) return; + if (pathExists(path.string())) + return; - Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() })); + Path tmpDir = createTempDir(os_string_to_string(PathViewNG{std::filesystem::path(path).parent_path()})); AutoDelete delTmpDir(tmpDir, true); Repository tmpRepo; @@ -204,8 +208,7 @@ static void initRepoAtomically(std::filesystem::path &path, bool bare) // `path` may be attempted to be deleted by s::f::rename, in which case the code is: || e.code() == std::errc::directory_not_empty) { return; - } - else + } else throw SysError("moving temporary git repository from %s to %s", tmpDir, path); } // we successfully moved the repository, so the temporary directory no longer exists. @@ -249,16 +252,17 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("adding mempack backend to Git object database: %s", git_error_last()->message); } - operator git_repository * () + operator git_repository *() { return repo.get(); } - void flush() override { + void flush() override + { checkInterrupt(); git_buf buf = GIT_BUF_INIT; - Finally _disposeBuf { [&] { git_buf_dispose(&buf); } }; + Finally _disposeBuf{[&] { git_buf_dispose(&buf); }}; PackBuilder packBuilder; PackBuilderContext packBuilderContext; git_packbuilder_new(Setter(packBuilder), *this); @@ -266,14 +270,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this git_packbuilder_set_threads(packBuilder.get(), 0 /* autodetect */); packBuilderContext.handleException( - "preparing packfile", - git_mempack_write_thin_pack(mempack_backend, packBuilder.get()) - ); + "preparing packfile", git_mempack_write_thin_pack(mempack_backend, packBuilder.get())); checkInterrupt(); - packBuilderContext.handleException( - "writing packfile", - git_packbuilder_write_buf(&buf, packBuilder.get()) - ); + packBuilderContext.handleException("writing packfile", git_packbuilder_write_buf(&buf, packBuilder.get())); checkInterrupt(); std::string repo_path = std::string(git_repository_path(repo.get())); @@ -318,7 +317,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this todo.push(peelObject(lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT)); while (auto commit = pop(todo)) { - if (!done.insert(*git_commit_id(commit->get())).second) continue; + if (!done.insert(*git_commit_id(commit->get())).second) + continue; for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) { git_commit * parent; @@ -330,8 +330,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this "or add set the shallow parameter to true in builtins.fetchGit, " "or fetch the complete history for this branch.", *git_commit_id(commit->get()), - git_error_last()->message - ); + git_error_last()->message); } todo.push(Commit(parent)); } @@ -382,7 +381,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this while (true) { git_config_entry * entry = nullptr; if (auto err = git_config_next(&entry, it.get())) { - if (err == GIT_ITEROVER) break; + if (err == GIT_ITEROVER) + break; throw Error("iterating over .gitmodules: %s", git_error_last()->message); } entries.emplace(entry->name + 10, entry->value); @@ -391,14 +391,16 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this std::vector result; for (auto & [key, value] : entries) { - if (!hasSuffix(key, ".path")) continue; + if (!hasSuffix(key, ".path")) + continue; std::string key2(key, 0, key.size() - 5); auto path = CanonPath(value); - result.push_back(Submodule { - .path = path, - .url = entries[key2 + ".url"], - .branch = entries[key2 + ".branch"], - }); + result.push_back( + Submodule{ + .path = path, + .url = entries[key2 + ".url"], + .branch = entries[key2 + ".branch"], + }); } return result; @@ -424,11 +426,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /* Get all tracked files and determine whether the working directory is dirty. */ - std::function statusCallback = [&](const char * path, unsigned int statusFlags) - { - if (!(statusFlags & GIT_STATUS_INDEX_DELETED) && - !(statusFlags & GIT_STATUS_WT_DELETED)) - { + std::function statusCallback = [&](const char * path, + unsigned int statusFlags) { + if (!(statusFlags & GIT_STATUS_INDEX_DELETED) && !(statusFlags & GIT_STATUS_WT_DELETED)) { info.files.insert(CanonPath(path)); if (statusFlags != GIT_STATUS_CURRENT) info.dirtyFiles.insert(CanonPath(path)); @@ -484,7 +484,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this Object obj; if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) { - if (errCode == GIT_ENOTFOUND) return false; + if (errCode == GIT_ENOTFOUND) + return false; auto err = git_error_last(); throw Error("getting Git object '%s': %s", oid, err->message); } @@ -495,15 +496,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /** * A 'GitSourceAccessor' with no regard for export-ignore or any other transformations. */ - ref getRawAccessor( - const Hash & rev, - bool smudgeLfs = false); + ref getRawAccessor(const Hash & rev, bool smudgeLfs = false); - ref getAccessor( - const Hash & rev, - bool exportIgnore, - std::string displayPrefix, - bool smudgeLfs = false) override; + ref + getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs = false) override; ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; @@ -519,7 +515,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this static int transferProgressCallback(const git_indexer_progress * stats, void * payload) { auto act = (Activity *) payload; - act->result(resFetchStatus, + act->result( + resFetchStatus, fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB", stats->received_objects, stats->total_objects, @@ -529,14 +526,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return getInterrupted() ? -1 : 0; } - void fetch( - const std::string & url, - const std::string & refspec, - bool shallow) override + void fetch(const std::string & url, const std::string & refspec, bool shallow) override { Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url)); - // TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that) + // TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support + // that) // then use code that was removed in this commit (see blame) auto dir = this->path; @@ -545,60 +540,57 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this append(gitArgs, {"--depth", "1"}); append(gitArgs, {std::string("--"), url, refspec}); - auto [status, output] = runProgram(RunOptions { - .program = "git", - .lookupPath = true, - // FIXME: git stderr messes up our progress indicator, so - // we're using --quiet for now. Should process its stderr. - .args = gitArgs, - .input = {}, - .mergeStderrToStdout = true, - .isInteractive = true - }); + auto [status, output] = runProgram( + RunOptions{ + .program = "git", + .lookupPath = true, + // FIXME: git stderr messes up our progress indicator, so + // we're using --quiet for now. Should process its stderr. + .args = gitArgs, + .input = {}, + .mergeStderrToStdout = true, + .isInteractive = true}); if (status > 0) { throw Error("Failed to fetch git repository %s : %s", url, output); } } - void verifyCommit( - const Hash & rev, - const std::vector & publicKeys) override + void verifyCommit(const Hash & rev, const std::vector & publicKeys) override { // Create ad-hoc allowedSignersFile and populate it with publicKeys auto allowedSignersFile = createTempFile().second; std::string allowedSigners; for (const fetchers::PublicKey & k : publicKeys) { - if (k.type != "ssh-dsa" - && k.type != "ssh-ecdsa" - && k.type != "ssh-ecdsa-sk" - && k.type != "ssh-ed25519" - && k.type != "ssh-ed25519-sk" - && k.type != "ssh-rsa") - throw Error("Unknown key type '%s'.\n" + if (k.type != "ssh-dsa" && k.type != "ssh-ecdsa" && k.type != "ssh-ecdsa-sk" && k.type != "ssh-ed25519" + && k.type != "ssh-ed25519-sk" && k.type != "ssh-rsa") + throw Error( + "Unknown key type '%s'.\n" "Please use one of\n" "- ssh-dsa\n" " ssh-ecdsa\n" " ssh-ecdsa-sk\n" " ssh-ed25519\n" " ssh-ed25519-sk\n" - " ssh-rsa", k.type); + " ssh-rsa", + k.type); allowedSigners += "* " + k.type + " " + k.key + "\n"; } writeFile(allowedSignersFile, allowedSigners); // Run verification command - auto [status, output] = runProgram(RunOptions { + auto [status, output] = runProgram( + RunOptions{ .program = "git", - .args = { - "-c", - "gpg.ssh.allowedSignersFile=" + allowedSignersFile, - "-C", path.string(), - "verify-commit", - rev.gitRev() - }, + .args = + {"-c", + "gpg.ssh.allowedSignersFile=" + allowedSignersFile, + "-C", + path.string(), + "verify-commit", + rev.gitRev()}, .mergeStderrToStdout = true, - }); + }); /* Evaluate result through status code and checking if public key fingerprints appear on stderr. This is necessary @@ -606,7 +598,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this commit being signed by gpg keys that are present in the users key agent. */ std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; - for (const fetchers::PublicKey & k : publicKeys){ + for (const fetchers::PublicKey & k : publicKeys) { // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally std::string keyDecoded; try { @@ -614,8 +606,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } catch (Error & e) { e.addTrace({}, "while decoding public key '%s' used for git signature", k.key); } - auto fingerprint = trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "="); - auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); + auto fingerprint = + trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "="); + auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+"); re += "(" + escaped_fingerprint + ")"; } re += "]"; @@ -680,13 +673,11 @@ struct GitSourceAccessor : SourceAccessor Sync state_; GitSourceAccessor(ref repo_, const Hash & rev, bool smudgeLfs) - : state_{ - State { - .repo = repo_, - .root = peelToTreeOrBlob(lookupObject(*repo_, hashToOID(rev)).get()), - .lfsFetch = smudgeLfs ? std::make_optional(lfs::Fetch(*repo_, hashToOID(rev))) : std::nullopt, - } - } + : state_{State{ + .repo = repo_, + .root = peelToTreeOrBlob(lookupObject(*repo_, hashToOID(rev)).get()), + .lfsFetch = smudgeLfs ? std::make_optional(lfs::Fetch(*repo_, hashToOID(rev))) : std::nullopt, + }} { } @@ -702,8 +693,9 @@ struct GitSourceAccessor : SourceAccessor try { // FIXME: do we need to hold the state lock while // doing this? - auto contents = std::string((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get())); - state->lfsFetch->fetch(contents, path, s, [&s](uint64_t size){ s.s.reserve(size); }); + auto contents = + std::string((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get())); + state->lfsFetch->fetch(contents, path, s, [&s](uint64_t size) { s.s.reserve(size); }); } catch (Error & e) { e.addTrace({}, "while smudging git-lfs file '%s'", path); throw; @@ -731,7 +723,7 @@ struct GitSourceAccessor : SourceAccessor auto state(state_.lock()); if (path.isRoot()) - return Stat { .type = git_object_type(state->root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular }; + return Stat{.type = git_object_type(state->root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular}; auto entry = lookup(*state, path); if (!entry) @@ -740,20 +732,20 @@ struct GitSourceAccessor : SourceAccessor auto mode = git_tree_entry_filemode(entry); if (mode == GIT_FILEMODE_TREE) - return Stat { .type = tDirectory }; + return Stat{.type = tDirectory}; else if (mode == GIT_FILEMODE_BLOB) - return Stat { .type = tRegular }; + return Stat{.type = tRegular}; else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE) - return Stat { .type = tRegular, .isExecutable = true }; + return Stat{.type = tRegular, .isExecutable = true}; else if (mode == GIT_FILEMODE_LINK) - return Stat { .type = tSymlink }; + return Stat{.type = tSymlink}; else if (mode == GIT_FILEMODE_COMMIT) // Treat submodules as an empty directory. - return Stat { .type = tDirectory }; + return Stat{.type = tDirectory}; else throw Error("file '%s' has an unsupported Git file type"); @@ -763,24 +755,23 @@ struct GitSourceAccessor : SourceAccessor { auto state(state_.lock()); - return std::visit(overloaded { - [&](Tree tree) { - DirEntries res; + return std::visit( + overloaded{ + [&](Tree tree) { + DirEntries res; - auto count = git_tree_entrycount(tree.get()); + auto count = git_tree_entrycount(tree.get()); - for (size_t n = 0; n < count; ++n) { - auto entry = git_tree_entry_byindex(tree.get(), n); - // FIXME: add to cache - res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); - } + for (size_t n = 0; n < count; ++n) { + auto entry = git_tree_entry_byindex(tree.get(), n); + // FIXME: add to cache + res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); + } - return res; - }, - [&](Submodule) { - return DirEntries(); - } - }, getTree(*state, path)); + return res; + }, + [&](Submodule) { return DirEntries(); }}, + getTree(*state, path)); } std::string readLink(const CanonPath & path) override @@ -810,15 +801,18 @@ struct GitSourceAccessor : SourceAccessor git_tree_entry * lookup(State & state, const CanonPath & path) { auto i = lookupCache.find(path); - if (i != lookupCache.end()) return i->second.get(); + if (i != lookupCache.end()) + return i->second.get(); auto parent = path.parent(); - if (!parent) return nullptr; + if (!parent) + return nullptr; auto name = path.baseName().value(); auto parentTree = lookupTree(state, *parent); - if (!parentTree) return nullptr; + if (!parentTree) + return nullptr; auto count = git_tree_entrycount(parentTree->get()); @@ -860,7 +854,7 @@ struct GitSourceAccessor : SourceAccessor return std::nullopt; Tree tree; - if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *state.repo, entry)) + if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(tree), *state.repo, entry)) throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message); return tree; @@ -874,7 +868,8 @@ struct GitSourceAccessor : SourceAccessor return entry; } - struct Submodule { }; + struct Submodule + {}; std::variant getTree(State & state, const CanonPath & path) { @@ -894,7 +889,7 @@ struct GitSourceAccessor : SourceAccessor throw Error("'%s' is not a directory", showPath(path)); Tree tree; - if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *state.repo, entry)) + if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(tree), *state.repo, entry)) throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message); return tree; @@ -905,16 +900,12 @@ struct GitSourceAccessor : SourceAccessor if (!expectSymlink && git_object_type(state.root.get()) == GIT_OBJECT_BLOB) return dupObject((git_blob *) &*state.root); - auto notExpected = [&]() - { - throw Error( - expectSymlink - ? "'%s' is not a symlink" - : "'%s' is not a regular file", - showPath(path)); + auto notExpected = [&]() { + throw Error(expectSymlink ? "'%s' is not a symlink" : "'%s' is not a regular file", showPath(path)); }; - if (path.isRoot()) notExpected(); + if (path.isRoot()) + notExpected(); auto entry = need(state, path); @@ -931,26 +922,31 @@ struct GitSourceAccessor : SourceAccessor } Blob blob; - if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *state.repo, entry)) + if (git_tree_entry_to_object((git_object **) (git_blob **) Setter(blob), *state.repo, entry)) throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message); return blob; } }; -struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor { +struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor +{ ref repo; std::optional rev; GitExportIgnoreSourceAccessor(ref repo, ref next, std::optional rev) - : CachingFilteringSourceAccessor(next, [&](const CanonPath & path) { - return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); - }) + : CachingFilteringSourceAccessor( + next, + [&](const CanonPath & path) { + return RestrictedPathError( + fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); + }) , repo(repo) , rev(rev) - { } + { + } - bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut) + bool gitAttrGet(const CanonPath & path, const char * attrName, const char *& valueOut) { const char * pathCStr = path.rel_c_str(); @@ -960,27 +956,16 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor { // TODO: test that gitattributes from global and system are not used // (ie more or less: home and etc - both of them!) opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM; - return git_attr_get_ext( - &valueOut, - *repo, - &opts, - pathCStr, - attrName - ); - } - else { + return git_attr_get_ext(&valueOut, *repo, &opts, pathCStr, attrName); + } else { return git_attr_get( - &valueOut, - *repo, - GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM, - pathCStr, - attrName); + &valueOut, *repo, GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM, pathCStr, attrName); } } bool isExportIgnored(const CanonPath & path) { - const char *exportIgnoreEntry = nullptr; + const char * exportIgnoreEntry = nullptr; // GIT_ATTR_CHECK_INDEX_ONLY: // > It will use index only for creating archives or for a bare repo @@ -991,8 +976,7 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor { return false; else throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); - } - else { + } else { // Official git will silently reject export-ignore lines that have // values. We do the same. return GIT_ATTR_IS_TRUE(exportIgnoreEntry); @@ -1003,7 +987,6 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor { { return !isExportIgnored(path); } - }; struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink @@ -1023,26 +1006,25 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink const git_tree_entry * entry; Tree prevTree = nullptr; - if (!pendingDirs.empty() && - (entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str()))) - { + if (!pendingDirs.empty() && (entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str()))) { /* Clone a tree that we've already finished. This happens if a tarball has directory entries that are not contiguous. */ if (git_tree_entry_type(entry) != GIT_OBJECT_TREE) throw Error("parent of '%s' is not a directory", name); - if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(prevTree), *repo, entry)) + if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(prevTree), *repo, entry)) throw Error("looking up parent of '%s': %s", name, git_error_last()->message); } git_treebuilder * b; if (git_treebuilder_new(&b, *repo, prevTree.get())) throw Error("creating a tree builder: %s", git_error_last()->message); - pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) }); + pendingDirs.push_back({.name = std::move(name), .builder = TreeBuilder(b)}); }; - GitFileSystemObjectSinkImpl(ref repo) : repo(repo) + GitFileSystemObjectSinkImpl(ref repo) + : repo(repo) { pushBuilder(""); } @@ -1089,53 +1071,54 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink { std::span pathComponents2{pathComponents}; - updateBuilders( - isDir - ? pathComponents2 - : pathComponents2.first(pathComponents2.size() - 1)); + updateBuilders(isDir ? pathComponents2 : pathComponents2.first(pathComponents2.size() - 1)); return true; } - void createRegularFile( - const CanonPath & path, - std::function func) override + void createRegularFile(const CanonPath & path, std::function func) override { auto pathComponents = tokenizeString>(path.rel(), "/"); - if (!prepareDirs(pathComponents, false)) return; + if (!prepareDirs(pathComponents, false)) + return; git_writestream * stream = nullptr; if (git_blob_create_from_stream(&stream, *repo, nullptr)) throw Error("creating a blob stream object: %s", git_error_last()->message); - struct CRF : CreateRegularFileSink { + struct CRF : CreateRegularFileSink + { const CanonPath & path; GitFileSystemObjectSinkImpl & back; git_writestream * stream; bool executable = false; + CRF(const CanonPath & path, GitFileSystemObjectSinkImpl & back, git_writestream * stream) - : path(path), back(back), stream(stream) - {} - void operator () (std::string_view data) override + : path(path) + , back(back) + , stream(stream) + { + } + + void operator()(std::string_view data) override { if (stream->write(stream, data.data(), data.size())) throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message); } + void isExecutable() override { executable = true; } - } crf { path, *this, stream }; + } crf{path, *this, stream}; + func(crf); git_oid oid; if (git_blob_create_from_stream_commit(&oid, stream)) throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message); - addToTree(*pathComponents.rbegin(), oid, - crf.executable - ? GIT_FILEMODE_BLOB_EXECUTABLE - : GIT_FILEMODE_BLOB); + addToTree(*pathComponents.rbegin(), oid, crf.executable ? GIT_FILEMODE_BLOB_EXECUTABLE : GIT_FILEMODE_BLOB); } void createDirectory(const CanonPath & path) override @@ -1147,7 +1130,8 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink void createSymlink(const CanonPath & path, const std::string & target) override { auto pathComponents = tokenizeString>(path.rel(), "/"); - if (!prepareDirs(pathComponents, false)) return; + if (!prepareDirs(pathComponents, false)) + return; git_oid oid; if (git_blob_create_from_buffer(&oid, *repo, target.c_str(), target.size())) @@ -1162,7 +1146,8 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink for (auto & c : path) pathComponents.emplace_back(c); - if (!prepareDirs(pathComponents, false)) return; + if (!prepareDirs(pathComponents, false)) + return; // We can't just look up the path from the start of the root, since // some parent directories may not have finished yet, so we compute @@ -1206,9 +1191,7 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink assert(entry); - addToTree(*pathComponents.rbegin(), - *git_tree_entry_id(entry), - git_tree_entry_filemode(entry)); + addToTree(*pathComponents.rbegin(), *git_tree_entry_id(entry), git_tree_entry_filemode(entry)); } Hash flush() override @@ -1223,19 +1206,14 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink } }; -ref GitRepoImpl::getRawAccessor( - const Hash & rev, - bool smudgeLfs) +ref GitRepoImpl::getRawAccessor(const Hash & rev, bool smudgeLfs) { auto self = ref(shared_from_this()); return make_ref(self, rev, smudgeLfs); } -ref GitRepoImpl::getAccessor( - const Hash & rev, - bool exportIgnore, - std::string displayPrefix, - bool smudgeLfs) +ref +GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs) { auto self = ref(shared_from_this()); ref rawGitAccessor = getRawAccessor(rev, smudgeLfs); @@ -1246,16 +1224,17 @@ ref GitRepoImpl::getAccessor( return rawGitAccessor; } -ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) +ref +GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) { auto self = ref(shared_from_this()); - ref fileAccessor = - AllowListSourceAccessor::create( - makeFSSourceAccessor(path), - std::set{ wd.files }, - // Always allow access to the root, but not its children. - std::unordered_set{CanonPath::root}, - std::move(makeNotAllowedError)).cast(); + ref fileAccessor = AllowListSourceAccessor::create( + makeFSSourceAccessor(path), + std::set{wd.files}, + // Always allow access to the root, but not its children. + std::unordered_set{CanonPath::root}, + std::move(makeNotAllowedError)) + .cast(); if (exportIgnore) return make_ref(self, fileAccessor, std::nullopt); else @@ -1273,7 +1252,8 @@ std::vector> GitRepoImpl::getSubmodules CanonPath modulesFile(".gitmodules"); auto accessor = getAccessor(rev, exportIgnore, ""); - if (!accessor->pathExists(modulesFile)) return {}; + if (!accessor->pathExists(modulesFile)) + return {}; /* Parse it and get the revision of each submodule. */ auto configS = accessor->readFile(modulesFile); @@ -1313,11 +1293,12 @@ GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path & { auto cache(_cache.lock()); auto i = cache->find(path); - if (i != cache->end()) return i->second; + if (i != cache->end()) + return i->second; } auto workdirInfo = GitRepo::openRepo(path)->getWorkdirInfo(); _cache.lock()->emplace(path, workdirInfo); return workdirInfo; } -} +} // namespace nix diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 88fe2e83d..1ab78c77b 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -43,10 +43,8 @@ bool isCacheFileWithinTtl(time_t now, const struct stat & st) Path getCachePath(std::string_view key, bool shallow) { - return getCacheDir() - + "/gitv3/" - + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) - + (shallow ? "-shallow" : ""); + return getCacheDir() + "/gitv3/" + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) + + (shallow ? "-shallow" : ""); } // Returns the name of the HEAD branch. @@ -58,24 +56,26 @@ Path getCachePath(std::string_view key, bool shallow) // ... std::optional readHead(const Path & path) { - auto [status, output] = runProgram(RunOptions { - .program = "git", - // FIXME: use 'HEAD' to avoid returning all refs - .args = {"ls-remote", "--symref", path}, - .isInteractive = true, - }); - if (status != 0) return std::nullopt; + auto [status, output] = runProgram( + RunOptions{ + .program = "git", + // FIXME: use 'HEAD' to avoid returning all refs + .args = {"ls-remote", "--symref", path}, + .isInteractive = true, + }); + if (status != 0) + return std::nullopt; std::string_view line = output; line = line.substr(0, line.find("\n")); if (const auto parseResult = git::parseLsRemoteLine(line); parseResult && parseResult->reference == "HEAD") { switch (parseResult->kind) { - case git::LsRemoteRefLine::Kind::Symbolic: - debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path); - break; - case git::LsRemoteRefLine::Kind::Object: - debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path); - break; + case git::LsRemoteRefLine::Kind::Symbolic: + debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path); + break; + case git::LsRemoteRefLine::Kind::Object: + debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path); + break; } return parseResult->target; } @@ -87,15 +87,15 @@ bool storeCachedHead(const std::string & actualUrl, bool shallow, const std::str { Path cacheDir = getCachePath(actualUrl, shallow); try { - runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef }); - } catch (ExecError &e) { + runProgram("git", true, {"-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef}); + } catch (ExecError & e) { if ( #ifndef WIN32 // TODO abstract over exit status handling on Windows !WIFEXITED(e.status) #else e.status != 0 #endif - ) + ) throw; return false; @@ -116,17 +116,15 @@ std::optional readHeadCached(const std::string & actualUrl, bool sh std::optional cachedRef; if (stat(headRefFile.c_str(), &st) == 0) { cachedRef = readHead(cacheDir); - if (cachedRef != std::nullopt && - *cachedRef != gitInitialBranch && - isCacheFileWithinTtl(now, st)) - { + if (cachedRef != std::nullopt && *cachedRef != gitInitialBranch && isCacheFileWithinTtl(now, st)) { debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl); return cachedRef; } } auto ref = readHead(actualUrl); - if (ref) return ref; + if (ref) + return ref; if (cachedRef) { // If the cached git ref is expired in fetch() below, and the 'git fetch' @@ -152,28 +150,26 @@ std::vector getPublicKeys(const Attrs & attrs) } } if (attrs.contains("publicKey")) - publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")}); + publicKeys.push_back( + PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"), getStrAttr(attrs, "publicKey")}); return publicKeys; } -} // end namespace +} // end namespace static const Hash nullRev{HashAlgorithm::SHA1}; struct GitInputScheme : InputScheme { - std::optional inputFromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) const override + std::optional inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != "git" && - url.scheme != "git+http" && - url.scheme != "git+https" && - url.scheme != "git+ssh" && - url.scheme != "git+file") return {}; + if (url.scheme != "git" && url.scheme != "git+http" && url.scheme != "git+https" && url.scheme != "git+ssh" + && url.scheme != "git+file") + return {}; auto url2(url); - if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4); + if (hasPrefix(url2.scheme, "git+")) + url2.scheme = std::string(url2.scheme, 4); url2.query.clear(); Attrs attrs; @@ -182,8 +178,10 @@ struct GitInputScheme : InputScheme for (auto & [name, value] : url.query) { if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys") attrs.emplace(name, value); - else if (name == "shallow" || name == "submodules" || name == "lfs" || name == "exportIgnore" || name == "allRefs" || name == "verifyCommit") - attrs.emplace(name, Explicit { value == "1" }); + else if ( + name == "shallow" || name == "submodules" || name == "lfs" || name == "exportIgnore" + || name == "allRefs" || name == "verifyCommit") + attrs.emplace(name, Explicit{value == "1"}); else url2.query.emplace(name, value); } @@ -193,7 +191,6 @@ struct GitInputScheme : InputScheme return inputFromAttrs(settings, attrs); } - std::string_view schemeName() const override { return "git"; @@ -223,15 +220,10 @@ struct GitInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { for (auto & [name, _] : attrs) - if (name == "verifyCommit" - || name == "keytype" - || name == "publicKey" - || name == "publicKeys") + if (name == "verifyCommit" || name == "keytype" || name == "publicKey" || name == "publicKeys") experimentalFeatureSettings.require(Xp::VerifiedFetches); maybeGetBoolAttr(attrs, "verifyCommit"); @@ -255,9 +247,12 @@ struct GitInputScheme : InputScheme ParsedURL toURL(const Input & input) const override { auto url = parseURL(getStrAttr(input.attrs, "url")); - if (url.scheme != "git") url.scheme = "git+" + url.scheme; - if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev()); - if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref); + if (url.scheme != "git") + url.scheme = "git+" + url.scheme; + if (auto rev = input.getRev()) + url.query.insert_or_assign("rev", rev->gitRev()); + if (auto ref = input.getRef()) + url.query.insert_or_assign("ref", *ref); if (getShallowAttr(input)) url.query.insert_or_assign("shallow", "1"); if (getLfsAttr(input)) @@ -272,20 +267,18 @@ struct GitInputScheme : InputScheme if (publicKeys.size() == 1) { url.query.insert_or_assign("keytype", publicKeys.at(0).type); url.query.insert_or_assign("publicKey", publicKeys.at(0).key); - } - else if (publicKeys.size() > 1) + } else if (publicKeys.size() > 1) url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys)); return url; } - Input applyOverrides( - const Input & input, - std::optional ref, - std::optional rev) const override + Input applyOverrides(const Input & input, std::optional ref, std::optional rev) const override { auto res(input); - if (rev) res.attrs.insert_or_assign("rev", rev->gitRev()); - if (ref) res.attrs.insert_or_assign("ref", *ref); + if (rev) + res.attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + res.attrs.insert_or_assign("ref", *ref); if (!res.getRef() && res.getRev()) throw Error("Git input '%s' has a commit hash but no branch/tag name", res.to_string()); return res; @@ -304,7 +297,8 @@ struct GitInputScheme : InputScheme args.push_back(*ref); } - if (input.getRev()) throw UnimplementedError("cloning a specific revision is not implemented"); + if (input.getRev()) + throw UnimplementedError("cloning a specific revision is not implemented"); args.push_back(destDir); @@ -325,14 +319,23 @@ struct GitInputScheme : InputScheme auto repoInfo = getRepoInfo(input); auto repoPath = repoInfo.getPath(); if (!repoPath) - throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); + throw Error( + "cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); writeFile(*repoPath / path.rel(), contents); - auto result = runProgram(RunOptions { - .program = "git", - .args = {"-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())}, - }); + auto result = runProgram( + RunOptions{ + .program = "git", + .args = + {"-C", + repoPath->string(), + "--git-dir", + repoInfo.gitDir, + "check-ignore", + "--quiet", + std::string(path.rel())}, + }); auto exitCode = #ifndef WIN32 // TODO abstract over exit status handling on Windows WEXITSTATUS(result.first) @@ -343,15 +346,32 @@ struct GitInputScheme : InputScheme if (exitCode != 0) { // The path is not `.gitignore`d, we can add the file. - runProgram("git", true, - { "-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); - + runProgram( + "git", + true, + {"-C", + repoPath->string(), + "--git-dir", + repoInfo.gitDir, + "add", + "--intent-to-add", + "--", + std::string(path.rel())}); if (commitMsg) { // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` auto suspension = logger->suspend(); - runProgram("git", true, - { "-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-F", "-" }, + runProgram( + "git", + true, + {"-C", + repoPath->string(), + "--git-dir", + repoInfo.gitDir, + "commit", + std::string(path.rel()), + "-F", + "-"}, *commitMsg); } } @@ -370,12 +390,10 @@ struct GitInputScheme : InputScheme std::string locationToArg() const { return std::visit( - overloaded { - [&](const std::filesystem::path & path) - { return path.string(); }, - [&](const ParsedURL & url) - { return url.to_string(); } - }, location); + overloaded{ + [&](const std::filesystem::path & path) { return path.string(); }, + [&](const ParsedURL & url) { return url.to_string(); }}, + location); } std::optional getPath() const @@ -427,10 +445,11 @@ struct GitInputScheme : InputScheme RepoInfo getRepoInfo(const Input & input) const { - auto checkHashAlgorithm = [&](const std::optional & hash) - { + auto checkHashAlgorithm = [&](const std::optional & hash) { if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256)) - throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); + throw Error( + "Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", + hash->to_string(HashFormat::Base16, true)); }; if (auto rev = input.getRev()) @@ -447,8 +466,7 @@ struct GitInputScheme : InputScheme // Why are we checking for bare repository? // well if it's a bare repository we want to force a git fetch rather than copying the folder - bool isBareRepository = url.scheme == "file" && pathExists(url.path) && - !pathExists(url.path + "/.git"); + bool isBareRepository = url.scheme == "file" && pathExists(url.path) && !pathExists(url.path + "/.git"); // // FIXME: here we turn a possibly relative path into an absolute path. // This allows relative git flake inputs to be resolved against the @@ -490,7 +508,11 @@ struct GitInputScheme : InputScheme return repoInfo; } - uint64_t getLastModified(const Settings & settings, const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const + uint64_t getLastModified( + const Settings & settings, + const RepoInfo & repoInfo, + const std::filesystem::path & repoDir, + const Hash & rev) const { Cache::Key key{"gitLastModified", {{"rev", rev.gitRev()}}}; @@ -506,7 +528,11 @@ struct GitInputScheme : InputScheme return lastModified; } - uint64_t getRevCount(const Settings & settings, const RepoInfo & repoInfo, const std::filesystem::path & repoDir, const Hash & rev) const + uint64_t getRevCount( + const Settings & settings, + const RepoInfo & repoInfo, + const std::filesystem::path & repoDir, + const Hash & rev) const { Cache::Key key{"gitRevCount", {{"rev", rev.gitRev()}}}; @@ -515,7 +541,8 @@ struct GitInputScheme : InputScheme if (auto revCountAttrs = cache->lookup(key)) return getIntAttr(*revCountAttrs, "revCount"); - Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg())); + Activity act( + *logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg())); auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev); @@ -527,12 +554,10 @@ struct GitInputScheme : InputScheme std::string getDefaultRef(const RepoInfo & repoInfo, bool shallow) const { auto head = std::visit( - overloaded { - [&](const std::filesystem::path & path) - { return GitRepo::openRepo(path)->getWorkdirRef(); }, - [&](const ParsedURL & url) - { return readHeadCached(url.to_string(), shallow); } - }, repoInfo.location); + overloaded{ + [&](const std::filesystem::path & path) { return GitRepo::openRepo(path)->getWorkdirRef(); }, + [&](const ParsedURL & url) { return readHeadCached(url.to_string(), shallow); }}, + repoInfo.location); if (!head) { warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.locationToArg()); return "master"; @@ -566,14 +591,13 @@ struct GitInputScheme : InputScheme if (input.getRev() && repo) repo->verifyCommit(*input.getRev(), publicKeys); else - throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string()); + throw Error( + "commit verification is required for Git repository '%s', but it's dirty", input.to_string()); } } - std::pair, Input> getAccessorFromCommit( - ref store, - RepoInfo & repoInfo, - Input && input) const + std::pair, Input> + getAccessorFromCommit(ref store, RepoInfo & repoInfo, Input && input) const { assert(!repoInfo.workdirInfo.isDirty); @@ -604,10 +628,7 @@ struct GitInputScheme : InputScheme // We need to set the origin so resolving submodule URLs works repo->setRemote("origin", repoUrl.to_string()); - auto localRefFile = - ref.compare(0, 5, "refs/") == 0 - ? cacheDir / ref - : cacheDir / "refs/heads" / ref; + auto localRefFile = ref.compare(0, 5, "refs/") == 0 ? cacheDir / ref : cacheDir / "refs/heads" / ref; bool doFetch = false; time_t now = time(0); @@ -623,30 +644,27 @@ struct GitInputScheme : InputScheme /* If the local ref is older than ‘tarball-ttl’ seconds, do a git fetch to update the local ref to the remote ref. */ struct stat st; - doFetch = stat(localRefFile.string().c_str(), &st) != 0 || - !isCacheFileWithinTtl(now, st); + doFetch = stat(localRefFile.string().c_str(), &st) != 0 || !isCacheFileWithinTtl(now, st); } } if (doFetch) { bool shallow = getShallowAttr(input); try { - auto fetchRef = - getAllRefsAttr(input) - ? "refs/*:refs/*" - : input.getRev() - ? input.getRev()->gitRev() - : ref.compare(0, 5, "refs/") == 0 - ? fmt("%1%:%1%", ref) - : ref == "HEAD" - ? ref - : fmt("%1%:%1%", "refs/heads/" + ref); + auto fetchRef = getAllRefsAttr(input) ? "refs/*:refs/*" + : input.getRev() ? input.getRev()->gitRev() + : ref.compare(0, 5, "refs/") == 0 ? fmt("%1%:%1%", ref) + : ref == "HEAD" ? ref + : fmt("%1%:%1%", "refs/heads/" + ref); repo->fetch(repoUrl.to_string(), fetchRef, shallow); } catch (Error & e) { - if (!std::filesystem::exists(localRefFile)) throw; + if (!std::filesystem::exists(localRefFile)) + throw; logError(e.info()); - warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.locationToArg()); + warn( + "could not update local clone of Git repository '%s'; continuing with the most recent version", + repoInfo.locationToArg()); } try { @@ -663,16 +681,17 @@ struct GitInputScheme : InputScheme if (!repo->hasObject(*rev)) throw Error( "Cannot find Git revision '%s' in ref '%s' of repository '%s'! " - "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the " - ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD - "allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".", + "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the " ANSI_BOLD + "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD "allRefs = true;" ANSI_NORMAL + " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".", rev->gitRev(), ref, repoInfo.locationToArg()); } else input.attrs.insert_or_assign("rev", repo->resolveRef(ref).gitRev()); - // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder + // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in + // the remainder } auto repo = GitRepo::openRepo(repoDir); @@ -680,7 +699,9 @@ struct GitInputScheme : InputScheme auto isShallow = repo->isShallow(); if (isShallow && !getShallowAttr(input)) - throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.locationToArg()); + throw Error( + "'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", + repoInfo.locationToArg()); // FIXME: check whether rev is an ancestor of ref? @@ -692,8 +713,7 @@ struct GitInputScheme : InputScheme }); if (!getShallowAttr(input)) - infoAttrs.insert_or_assign("revCount", - getRevCount(*input.settings, repoInfo, repoDir, rev)); + infoAttrs.insert_or_assign("revCount", getRevCount(*input.settings, repoInfo, repoDir, rev)); printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg()); @@ -711,21 +731,25 @@ struct GitInputScheme : InputScheme for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) { auto resolved = repo->resolveSubmoduleUrl(submodule.url); - debug("Git submodule %s: %s %s %s -> %s", - submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved); + debug( + "Git submodule %s: %s %s %s -> %s", + submodule.path, + submodule.url, + submodule.branch, + submoduleRev.gitRev(), + resolved); fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", resolved); if (submodule.branch != "") attrs.insert_or_assign("ref", submodule.branch); attrs.insert_or_assign("rev", submoduleRev.gitRev()); - attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); - attrs.insert_or_assign("submodules", Explicit{ true }); - attrs.insert_or_assign("lfs", Explicit{ smudgeLfs }); - attrs.insert_or_assign("allRefs", Explicit{ true }); + attrs.insert_or_assign("exportIgnore", Explicit{exportIgnore}); + attrs.insert_or_assign("submodules", Explicit{true}); + attrs.insert_or_assign("lfs", Explicit{smudgeLfs}); + attrs.insert_or_assign("allRefs", Explicit{true}); auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs)); - auto [submoduleAccessor, submoduleInput2] = - submoduleInput.getAccessor(store); + auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»"); mounts.insert_or_assign(submodule.path, submoduleAccessor); } @@ -744,10 +768,8 @@ struct GitInputScheme : InputScheme return {accessor, std::move(input)}; } - std::pair, Input> getAccessorFromWorkdir( - ref store, - RepoInfo & repoInfo, - Input && input) const + std::pair, Input> + getAccessorFromWorkdir(ref store, RepoInfo & repoInfo, Input && input) const { auto repoPath = repoInfo.getPath().value(); @@ -761,9 +783,7 @@ struct GitInputScheme : InputScheme auto exportIgnore = getExportIgnoreAttr(input); ref accessor = - repo->getAccessor(repoInfo.workdirInfo, - exportIgnore, - makeNotAllowedError(repoPath)); + repo->getAccessor(repoInfo.workdirInfo, exportIgnore, makeNotAllowedError(repoPath)); /* If the repo has submodules, return a mounted input accessor consisting of the accessor for the top-level repo and the @@ -776,14 +796,13 @@ struct GitInputScheme : InputScheme fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.string()); - attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); - attrs.insert_or_assign("submodules", Explicit{ true }); + attrs.insert_or_assign("exportIgnore", Explicit{exportIgnore}); + attrs.insert_or_assign("submodules", Explicit{true}); // TODO: fall back to getAccessorFromCommit-like fetch when submodules aren't checked out // attrs.insert_or_assign("allRefs", Explicit{ true }); auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs)); - auto [submoduleAccessor, submoduleInput2] = - submoduleInput.getAccessor(store); + auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»"); /* If the submodule is dirty, mark this repo dirty as @@ -809,8 +828,8 @@ struct GitInputScheme : InputScheme input.attrs.insert_or_assign("rev", rev.gitRev()); if (!getShallowAttr(input)) { - input.attrs.insert_or_assign("revCount", - rev == nullRev ? 0 : getRevCount(*input.settings, repoInfo, repoPath, rev)); + input.attrs.insert_or_assign( + "revCount", rev == nullRev ? 0 : getRevCount(*input.settings, repoInfo, repoPath, rev)); } verifyCommit(input, repo); @@ -818,10 +837,8 @@ struct GitInputScheme : InputScheme repoInfo.warnDirty(*input.settings); if (repoInfo.workdirInfo.headRev) { - input.attrs.insert_or_assign("dirtyRev", - repoInfo.workdirInfo.headRev->gitRev() + "-dirty"); - input.attrs.insert_or_assign("dirtyShortRev", - repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty"); + input.attrs.insert_or_assign("dirtyRev", repoInfo.workdirInfo.headRev->gitRev() + "-dirty"); + input.attrs.insert_or_assign("dirtyShortRev", repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty"); } verifyCommit(input, nullptr); @@ -830,8 +847,8 @@ struct GitInputScheme : InputScheme input.attrs.insert_or_assign( "lastModified", repoInfo.workdirInfo.headRev - ? getLastModified(*input.settings, repoInfo, repoPath, *repoInfo.workdirInfo.headRev) - : 0); + ? getLastModified(*input.settings, repoInfo, repoPath, *repoInfo.workdirInfo.headRev) + : 0); return {accessor, std::move(input)}; } @@ -842,8 +859,7 @@ struct GitInputScheme : InputScheme auto repoInfo = getRepoInfo(input); - if (getExportIgnoreAttr(input) - && getSubmodulesAttr(input)) { + if (getExportIgnoreAttr(input) && getSubmodulesAttr(input)) { /* In this situation, we don't have a git CLI behavior that we can copy. `git archive` does not support submodules, so it is unclear whether rules from the parent should affect the submodule or not. @@ -852,26 +868,26 @@ struct GitInputScheme : InputScheme throw UnimplementedError("exportIgnore and submodules are not supported together yet"); } - auto [accessor, final] = - input.getRef() || input.getRev() || !repoInfo.getPath() - ? getAccessorFromCommit(store, repoInfo, std::move(input)) - : getAccessorFromWorkdir(store, repoInfo, std::move(input)); + auto [accessor, final] = input.getRef() || input.getRev() || !repoInfo.getPath() + ? getAccessorFromCommit(store, repoInfo, std::move(input)) + : getAccessorFromWorkdir(store, repoInfo, std::move(input)); return {accessor, std::move(final)}; } std::optional getFingerprint(ref store, const Input & input) const override { - auto makeFingerprint = [&](const Hash & rev) - { - return rev.gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "") + (getLfsAttr(input) ? ";l" : ""); + auto makeFingerprint = [&](const Hash & rev) { + return rev.gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "") + + (getLfsAttr(input) ? ";l" : ""); }; if (auto rev = input.getRev()) return makeFingerprint(*rev); else { auto repoInfo = getRepoInfo(input); - if (auto repoPath = repoInfo.getPath(); repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) { + if (auto repoPath = repoInfo.getPath(); + repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) { /* Calculate a fingerprint that takes into account the deleted and modified/added files. */ HashSink hashSink{HashAlgorithm::SHA512}; @@ -885,7 +901,7 @@ struct GitInputScheme : InputScheme writeString(file.abs(), hashSink); } return makeFingerprint(*repoInfo.workdirInfo.headRev) - + ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false); + + ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false); } return std::nullopt; } @@ -900,4 +916,4 @@ struct GitInputScheme : InputScheme static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 7a902d816..c91f3ad3a 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -29,13 +29,14 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript); struct GitArchiveInputScheme : InputScheme { - virtual std::optional> accessHeaderFromToken(const std::string & token) const = 0; + virtual std::optional> + accessHeaderFromToken(const std::string & token) const = 0; - std::optional inputFromURL( - const fetchers::Settings & settings, - const ParsedURL & url, bool requireTree) const override + std::optional + inputFromURL(const fetchers::Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != schemeName()) return {}; + if (url.scheme != schemeName()) + return {}; auto path = tokenizeString>(url.path, "/"); @@ -68,20 +69,18 @@ struct GitArchiveInputScheme : InputScheme } else if (size < 2) throw BadURL("URL '%s' is invalid", url); - for (auto &[name, value] : url.query) { + for (auto & [name, value] : url.query) { if (name == "rev") { if (rev) throw BadURL("URL '%s' contains multiple commit hashes", url); rev = Hash::parseAny(value, HashAlgorithm::SHA1); - } - else if (name == "ref") { + } else if (name == "ref") { if (!std::regex_match(value, refRegex)) throw BadURL("URL '%s' contains an invalid branch/tag name", url); if (ref) throw BadURL("URL '%s' contains multiple branch/tag names", url); ref = value; - } - else if (name == "host") { + } else if (name == "host") { if (!std::regex_match(value, hostRegex)) throw BadURL("URL '%s' contains an invalid instance host", url); host_url = value; @@ -93,12 +92,15 @@ struct GitArchiveInputScheme : InputScheme throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev()); Input input{settings}; - input.attrs.insert_or_assign("type", std::string { schemeName() }); + input.attrs.insert_or_assign("type", std::string{schemeName()}); input.attrs.insert_or_assign("owner", path[0]); input.attrs.insert_or_assign("repo", path[1]); - if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); - if (ref) input.attrs.insert_or_assign("ref", *ref); - if (host_url) input.attrs.insert_or_assign("host", *host_url); + if (rev) + input.attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + input.attrs.insert_or_assign("ref", *ref); + if (host_url) + input.attrs.insert_or_assign("host", *host_url); auto narHash = url.query.find("narHash"); if (narHash != url.query.end()) @@ -121,9 +123,7 @@ struct GitArchiveInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const fetchers::Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const fetchers::Settings & settings, const Attrs & attrs) const override { getStrAttr(attrs, "owner"); getStrAttr(attrs, "repo"); @@ -141,10 +141,12 @@ struct GitArchiveInputScheme : InputScheme auto rev = input.getRev(); auto path = owner + "/" + repo; assert(!(ref && rev)); - if (ref) path += "/" + *ref; - if (rev) path += "/" + rev->to_string(HashFormat::Base16, false); - auto url = ParsedURL { - .scheme = std::string { schemeName() }, + if (ref) + path += "/" + *ref; + if (rev) + path += "/" + rev->to_string(HashFormat::Base16, false); + auto url = ParsedURL{ + .scheme = std::string{schemeName()}, .path = path, }; if (auto narHash = input.getNarHash()) @@ -155,15 +157,15 @@ struct GitArchiveInputScheme : InputScheme return url; } - Input applyOverrides( - const Input & _input, - std::optional ref, - std::optional rev) const override + Input applyOverrides(const Input & _input, std::optional ref, std::optional rev) const override { auto input(_input); if (rev && ref) - throw BadURL("cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'", - rev->gitRev(), *ref, input.to_string()); + throw BadURL( + "cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'", + rev->gitRev(), + *ref, + input.to_string()); if (rev) { input.attrs.insert_or_assign("rev", rev->gitRev()); input.attrs.erase("ref"); @@ -176,22 +178,18 @@ struct GitArchiveInputScheme : InputScheme } // Search for the longest possible match starting from the beginning and ending at either the end or a path segment. - std::optional getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const override + std::optional getAccessToken( + const fetchers::Settings & settings, const std::string & host, const std::string & url) const override { auto tokens = settings.accessTokens.get(); std::string answer; size_t answer_match_len = 0; - if(! url.empty()) { + if (!url.empty()) { for (auto & token : tokens) { auto first = url.find(token.first); - if ( - first != std::string::npos - && token.first.length() > answer_match_len - && first == 0 - && url.substr(0,token.first.length()) == token.first - && (url.length() == token.first.length() || url[token.first.length()] == '/') - ) - { + if (first != std::string::npos && token.first.length() > answer_match_len && first == 0 + && url.substr(0, token.first.length()) == token.first + && (url.length() == token.first.length() || url[token.first.length()] == '/')) { answer = token.second; answer_match_len = token.first.length(); } @@ -204,21 +202,17 @@ struct GitArchiveInputScheme : InputScheme return {}; } - Headers makeHeadersWithAuthTokens( - const fetchers::Settings & settings, - const std::string & host, - const Input & input) const + Headers + makeHeadersWithAuthTokens(const fetchers::Settings & settings, const std::string & host, const Input & input) const { auto owner = getStrAttr(input.attrs, "owner"); auto repo = getStrAttr(input.attrs, "repo"); - auto hostAndPath = fmt( "%s/%s/%s", host, owner, repo); + auto hostAndPath = fmt("%s/%s/%s", host, owner, repo); return makeHeadersWithAuthTokens(settings, host, hostAndPath); } Headers makeHeadersWithAuthTokens( - const fetchers::Settings & settings, - const std::string & host, - const std::string & hostAndPath) const + const fetchers::Settings & settings, const std::string & host, const std::string & hostAndPath) const { Headers headers; auto accessToken = getAccessToken(settings, host, hostAndPath); @@ -250,7 +244,8 @@ struct GitArchiveInputScheme : InputScheme std::pair downloadArchive(ref store, Input input) const { - if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD"); + if (!maybeGetStrAttr(input.attrs, "ref")) + input.attrs.insert_or_assign("ref", "HEAD"); std::optional upstreamTreeHash; @@ -275,7 +270,7 @@ struct GitArchiveInputScheme : InputScheme auto treeHash = getRevAttr(*treeHashAttrs, "treeHash"); auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified"); if (getTarballCache()->hasObject(treeHash)) - return {std::move(input), TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }}; + return {std::move(input), TarballInfo{.treeHash = treeHash, .lastModified = (time_t) lastModified}}; else debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev()); } @@ -290,10 +285,10 @@ struct GitArchiveInputScheme : InputScheme getFileTransfer()->download(std::move(req), sink); }); - auto act = std::make_unique(*logger, lvlInfo, actUnknown, - fmt("unpacking '%s' into the Git cache", input.to_string())); + auto act = std::make_unique( + *logger, lvlInfo, actUnknown, fmt("unpacking '%s' into the Git cache", input.to_string())); - TarArchive archive { *source }; + TarArchive archive{*source}; auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); @@ -301,22 +296,20 @@ struct GitArchiveInputScheme : InputScheme act.reset(); - TarballInfo tarballInfo { - .treeHash = tarballCache->dereferenceSingletonDirectory(tree), - .lastModified = lastModified - }; + TarballInfo tarballInfo{ + .treeHash = tarballCache->dereferenceSingletonDirectory(tree), .lastModified = lastModified}; cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}}); cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}}); - #if 0 +#if 0 if (upstreamTreeHash != tarballInfo.treeHash) warn( "Git tree hash mismatch for revision '%s' of '%s': " "expected '%s', got '%s'. " "This can happen if the Git repository uses submodules.", rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev()); - #endif +#endif return {std::move(input), tarballInfo}; } @@ -325,15 +318,12 @@ struct GitArchiveInputScheme : InputScheme { auto [input, tarballInfo] = downloadArchive(store, _input); - #if 0 +#if 0 input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev()); - #endif +#endif input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); - auto accessor = getTarballCache()->getAccessor( - tarballInfo.treeHash, - false, - "«" + input.to_string() + "»"); + auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»"); return {accessor, input}; } @@ -345,8 +335,7 @@ struct GitArchiveInputScheme : InputScheme locking. FIXME: in the future, we may want to require a Git tree hash instead of a NAR hash. */ return input.getRev().has_value() - && (input.settings->trustTarballsFromGitForges || - input.getNarHash().has_value()); + && (input.settings->trustTarballsFromGitForges || input.getNarHash().has_value()); } std::optional experimentalFeature() const override @@ -365,7 +354,10 @@ struct GitArchiveInputScheme : InputScheme struct GitHubInputScheme : GitArchiveInputScheme { - std::string_view schemeName() const override { return "github"; } + std::string_view schemeName() const override + { + return "github"; + } std::optional> accessHeaderFromToken(const std::string & token) const override { @@ -397,22 +389,20 @@ struct GitHubInputScheme : GitArchiveInputScheme { auto host = getHost(input); auto url = fmt( - host == "github.com" - ? "https://api.%s/repos/%s/%s/commits/%s" - : "https://%s/api/v3/repos/%s/%s/commits/%s", - host, getOwner(input), getRepo(input), *input.getRef()); + host == "github.com" ? "https://api.%s/repos/%s/%s/commits/%s" : "https://%s/api/v3/repos/%s/%s/commits/%s", + host, + getOwner(input), + getRepo(input), + *input.getRef()); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); auto json = nlohmann::json::parse( - readFile( - store->toRealPath( - downloadFile(store, *input.settings, url, "source", headers).storePath))); + readFile(store->toRealPath(downloadFile(store, *input.settings, url, "source", headers).storePath))); - return RefInfo { - .rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1), - .treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, HashAlgorithm::SHA1) - }; + return RefInfo{ + .rev = Hash::parseAny(std::string{json["sha"]}, HashAlgorithm::SHA1), + .treeHash = Hash::parseAny(std::string{json["commit"]["tree"]["sha"]}, HashAlgorithm::SHA1)}; } DownloadUrl getDownloadUrl(const Input & input) const override @@ -423,24 +413,20 @@ struct GitHubInputScheme : GitArchiveInputScheme // If we have no auth headers then we default to the public archive // urls so we do not run into rate limits. - const auto urlFmt = - host != "github.com" - ? "https://%s/api/v3/repos/%s/%s/tarball/%s" - : headers.empty() - ? "https://%s/%s/%s/archive/%s.tar.gz" - : "https://api.%s/repos/%s/%s/tarball/%s"; + const auto urlFmt = host != "github.com" ? "https://%s/api/v3/repos/%s/%s/tarball/%s" + : headers.empty() ? "https://%s/%s/%s/archive/%s.tar.gz" + : "https://api.%s/repos/%s/%s/tarball/%s"; - const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input), - input.getRev()->to_string(HashFormat::Base16, false)); + const auto url = + fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false)); - return DownloadUrl { url, headers }; + return DownloadUrl{url, headers}; } void clone(const Input & input, const Path & destDir) const override { auto host = getHost(input); - Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git", - host, getOwner(input), getRepo(input))) + Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git", host, getOwner(input), getRepo(input))) .applyOverrides(input.getRef(), input.getRev()) .clone(destDir); } @@ -448,7 +434,10 @@ struct GitHubInputScheme : GitArchiveInputScheme struct GitLabInputScheme : GitArchiveInputScheme { - std::string_view schemeName() const override { return "gitlab"; } + std::string_view schemeName() const override + { + return "gitlab"; + } std::optional> accessHeaderFromToken(const std::string & token) const override { @@ -462,32 +451,33 @@ struct GitLabInputScheme : GitArchiveInputScheme auto fldsplit = token.find_first_of(':'); // n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ... if ("OAuth2" == token.substr(0, fldsplit)) - return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1))); + return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit + 1))); if ("PAT" == token.substr(0, fldsplit)) - return std::make_pair("Private-token", token.substr(fldsplit+1)); - warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit)); - return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1)); + return std::make_pair("Private-token", token.substr(fldsplit + 1)); + warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit)); + return std::make_pair(token.substr(0, fldsplit), token.substr(fldsplit + 1)); } RefInfo getRevFromRef(nix::ref store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // See rate limiting note below - auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); + auto url = + fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", + host, + getStrAttr(input.attrs, "owner"), + getStrAttr(input.attrs, "repo"), + *input.getRef()); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); auto json = nlohmann::json::parse( - readFile( - store->toRealPath( - downloadFile(store, *input.settings, url, "source", headers).storePath))); + readFile(store->toRealPath(downloadFile(store, *input.settings, url, "source", headers).storePath))); if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) { - return RefInfo { - .rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1) - }; - } if (json.is_array() && json.size() == 0) { + return RefInfo{.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)}; + } + if (json.is_array() && json.size() == 0) { throw Error("No commits returned by GitLab API -- does the git ref really exist?"); } else { throw Error("Unexpected response received from GitLab: %s", json); @@ -502,20 +492,24 @@ struct GitLabInputScheme : GitArchiveInputScheme // is 10 reqs/sec/ip-addr. See // https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); - auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), - input.getRev()->to_string(HashFormat::Base16, false)); + auto url = + fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", + host, + getStrAttr(input.attrs, "owner"), + getStrAttr(input.attrs, "repo"), + input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - return DownloadUrl { url, headers }; + return DownloadUrl{url, headers}; } void clone(const Input & input, const Path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // FIXME: get username somewhere - Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) + Input::fromURL( + *input.settings, + fmt("git+https://%s/%s/%s.git", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef(), input.getRev()) .clone(destDir); } @@ -523,7 +517,10 @@ struct GitLabInputScheme : GitArchiveInputScheme struct SourceHutInputScheme : GitArchiveInputScheme { - std::string_view schemeName() const override { return "sourcehut"; } + std::string_view schemeName() const override + { + return "sourcehut"; + } std::optional> accessHeaderFromToken(const std::string & token) const override { @@ -543,8 +540,8 @@ struct SourceHutInputScheme : GitArchiveInputScheme auto ref = *input.getRef(); auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); - auto base_url = fmt("https://%s/%s/%s", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")); + auto base_url = + fmt("https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); @@ -572,7 +569,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme std::string line; std::optional id; - while(!id && getline(is, line)) { + while (!id && getline(is, line)) { auto parsedLine = git::parseLsRemoteLine(line); if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex)) id = parsedLine->target; @@ -581,27 +578,29 @@ struct SourceHutInputScheme : GitArchiveInputScheme if (!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - return RefInfo { - .rev = Hash::parseAny(*id, HashAlgorithm::SHA1) - }; + return RefInfo{.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)}; } DownloadUrl getDownloadUrl(const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); - auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), - input.getRev()->to_string(HashFormat::Base16, false)); + auto url = + fmt("https://%s/%s/%s/archive/%s.tar.gz", + host, + getStrAttr(input.attrs, "owner"), + getStrAttr(input.attrs, "repo"), + input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - return DownloadUrl { url, headers }; + return DownloadUrl{url, headers}; } void clone(const Input & input, const Path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); - Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s", - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) + Input::fromURL( + *input.settings, + fmt("git+https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef(), input.getRev()) .clone(destDir); } @@ -611,4 +610,4 @@ static auto rGitHubInputScheme = OnStartup([] { registerInputScheme(std::make_un static auto rGitLabInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); static auto rSourceHutInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/attrs.hh b/src/libfetchers/include/nix/fetchers/attrs.hh index 582abd144..8a21b8ddb 100644 --- a/src/libfetchers/include/nix/fetchers/attrs.hh +++ b/src/libfetchers/include/nix/fetchers/attrs.hh @@ -41,4 +41,4 @@ StringMap attrsToQuery(const Attrs & attrs); Hash getRevAttr(const Attrs & attrs, const std::string & name); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/cache.hh b/src/libfetchers/include/nix/fetchers/cache.hh index 6ac693183..7219635ec 100644 --- a/src/libfetchers/include/nix/fetchers/cache.hh +++ b/src/libfetchers/include/nix/fetchers/cache.hh @@ -12,7 +12,7 @@ namespace nix::fetchers { */ struct Cache { - virtual ~Cache() { } + virtual ~Cache() {} /** * A domain is a partition of the key/value cache for a particular @@ -28,22 +28,18 @@ struct Cache /** * Add a key/value pair to the cache. */ - virtual void upsert( - const Key & key, - const Attrs & value) = 0; + virtual void upsert(const Key & key, const Attrs & value) = 0; /** * Look up a key with infinite TTL. */ - virtual std::optional lookup( - const Key & key) = 0; + virtual std::optional lookup(const Key & key) = 0; /** * Look up a key. Return nothing if its TTL has exceeded * `settings.tarballTTL`. */ - virtual std::optional lookupWithTTL( - const Key & key) = 0; + virtual std::optional lookupWithTTL(const Key & key) = 0; struct Result { @@ -55,19 +51,14 @@ struct Cache * Look up a key. Return a bool denoting whether its TTL has * exceeded `settings.tarballTTL`. */ - virtual std::optional lookupExpired( - const Key & key) = 0; + virtual std::optional lookupExpired(const Key & key) = 0; /** * Insert a cache entry that has a store path associated with * it. Such cache entries are always considered stale if the * associated store path is invalid. */ - virtual void upsert( - Key key, - Store & store, - Attrs value, - const StorePath & storePath) = 0; + virtual void upsert(Key key, Store & store, Attrs value, const StorePath & storePath) = 0; struct ResultWithStorePath : Result { @@ -78,17 +69,13 @@ struct Cache * Look up a store path in the cache. The returned store path will * be valid, but it may be expired. */ - virtual std::optional lookupStorePath( - Key key, - Store & store) = 0; + virtual std::optional lookupStorePath(Key key, Store & store) = 0; /** * Look up a store path in the cache. Return nothing if its TTL * has exceeded `settings.tarballTTL`. */ - virtual std::optional lookupStorePathWithTTL( - Key key, - Store & store) = 0; + virtual std::optional lookupStorePathWithTTL(Key key, Store & store) = 0; }; -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/fetch-settings.hh b/src/libfetchers/include/nix/fetchers/fetch-settings.hh index 9cfd25e0b..605b95e0d 100644 --- a/src/libfetchers/include/nix/fetchers/fetch-settings.hh +++ b/src/libfetchers/include/nix/fetchers/fetch-settings.hh @@ -19,7 +19,10 @@ struct Settings : public Config { Settings(); - Setting accessTokens{this, {}, "access-tokens", + Setting accessTokens{ + this, + {}, + "access-tokens", R"( Access tokens used to access protected GitHub, GitLab, or other locations requiring token-based authentication. @@ -70,11 +73,9 @@ struct Settings : public Config value. )"}; - Setting allowDirty{this, true, "allow-dirty", - "Whether to allow dirty Git/Mercurial trees."}; + Setting allowDirty{this, true, "allow-dirty", "Whether to allow dirty Git/Mercurial trees."}; - Setting warnDirty{this, true, "warn-dirty", - "Whether to warn about dirty Git/Mercurial trees."}; + Setting warnDirty{this, true, "warn-dirty", "Whether to warn about dirty Git/Mercurial trees."}; Setting allowDirtyLocks{ this, @@ -93,7 +94,9 @@ struct Settings : public Config Xp::Flakes}; Setting trustTarballsFromGitForges{ - this, true, "trust-tarballs-from-git-forges", + this, + true, + "trust-tarballs-from-git-forges", R"( If enabled (the default), Nix considers tarballs from GitHub and similar Git forges to be locked if a Git revision @@ -107,13 +110,18 @@ struct Settings : public Config e.g. `github:NixOS/patchelf/7c2f768bf9601268a4e71c2ebe91e2011918a70f?narHash=sha256-PPXqKY2hJng4DBVE0I4xshv/vGLUskL7jl53roB8UdU%3D`. )"}; - Setting flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry", + Setting flakeRegistry{ + this, + "https://channels.nixos.org/flake-registry.json", + "flake-registry", R"( Path or URI of the global flake registry. When empty, disables the global flake registry. )", - {}, true, Xp::Flakes}; + {}, + true, + Xp::Flakes}; ref getCache() const; @@ -121,4 +129,4 @@ private: mutable Sync> _cache; }; -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/fetch-to-store.hh b/src/libfetchers/include/nix/fetchers/fetch-to-store.hh index a52d567ec..3a2232302 100644 --- a/src/libfetchers/include/nix/fetchers/fetch-to-store.hh +++ b/src/libfetchers/include/nix/fetchers/fetch-to-store.hh @@ -27,4 +27,4 @@ StorePath fetchToStore( fetchers::Cache::Key makeFetchToStoreCacheKey( const std::string & name, const std::string & fingerprint, ContentAddressMethod method, const std::string & path); -} +} // namespace nix diff --git a/src/libfetchers/include/nix/fetchers/fetchers.hh b/src/libfetchers/include/nix/fetchers/fetchers.hh index 1f8f6bdac..9dcd365ea 100644 --- a/src/libfetchers/include/nix/fetchers/fetchers.hh +++ b/src/libfetchers/include/nix/fetchers/fetchers.hh @@ -13,7 +13,11 @@ #include "nix/util/ref.hh" -namespace nix { class Store; class StorePath; struct SourceAccessor; } +namespace nix { +class Store; +class StorePath; +struct SourceAccessor; +} // namespace nix namespace nix::fetchers { @@ -36,7 +40,8 @@ struct Input Input(const Settings & settings) : settings{&settings} - { } + { + } std::shared_ptr scheme; // note: can be null Attrs attrs; @@ -52,22 +57,16 @@ public: * * The URL indicate which sort of fetcher, and provides information to that fetcher. */ - static Input fromURL( - const Settings & settings, - const std::string & url, bool requireTree = true); + static Input fromURL(const Settings & settings, const std::string & url, bool requireTree = true); - static Input fromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree = true); + static Input fromURL(const Settings & settings, const ParsedURL & url, bool requireTree = true); /** * Create an `Input` from a an `Attrs`. * * The URL indicate which sort of fetcher, and provides information to that fetcher. */ - static Input fromAttrs( - const Settings & settings, - Attrs && attrs); + static Input fromAttrs(const Settings & settings, Attrs && attrs); ParsedURL toURL() const; @@ -108,9 +107,9 @@ public: */ bool isFinal() const; - bool operator ==(const Input & other) const noexcept; + bool operator==(const Input & other) const noexcept; - bool operator <(const Input & other) const + bool operator<(const Input & other) const { return attrs < other.attrs; } @@ -149,9 +148,7 @@ private: public: - Input applyOverrides( - std::optional ref, - std::optional rev) const; + Input applyOverrides(std::optional ref, std::optional rev) const; void clone(const Path & destDir) const; @@ -161,10 +158,7 @@ public: * Write a file to this input, for input types that support * writing. Optionally commit the change (for e.g. Git inputs). */ - void putFile( - const CanonPath & path, - std::string_view contents, - std::optional commitMsg) const; + void putFile(const CanonPath & path, std::string_view contents, std::optional commitMsg) const; std::string getName() const; @@ -200,16 +194,12 @@ public: */ struct InputScheme { - virtual ~InputScheme() - { } + virtual ~InputScheme() {} - virtual std::optional inputFromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) const = 0; + virtual std::optional + inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const = 0; - virtual std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const = 0; + virtual std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const = 0; /** * What is the name of the scheme? @@ -231,10 +221,7 @@ struct InputScheme virtual ParsedURL toURL(const Input & input) const; - virtual Input applyOverrides( - const Input & input, - std::optional ref, - std::optional rev) const; + virtual Input applyOverrides(const Input & input, std::optional ref, std::optional rev) const; virtual void clone(const Input & input, const Path & destDir) const; @@ -254,19 +241,30 @@ struct InputScheme virtual std::optional experimentalFeature() const; virtual bool isDirect(const Input & input) const - { return true; } + { + return true; + } virtual std::optional getFingerprint(ref store, const Input & input) const - { return std::nullopt; } + { + return std::nullopt; + } virtual bool isLocked(const Input & input) const - { return false; } + { + return false; + } virtual std::optional isRelative(const Input & input) const - { return std::nullopt; } + { + return std::nullopt; + } - virtual std::optional getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const - { return {};} + virtual std::optional + getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const + { + return {}; + } }; void registerInputScheme(std::shared_ptr && fetcher); @@ -278,11 +276,11 @@ struct PublicKey std::string type = "ssh-ed25519"; std::string key; - auto operator <=>(const PublicKey &) const = default; + auto operator<=>(const PublicKey &) const = default; }; -std::string publicKeys_to_string(const std::vector&); +std::string publicKeys_to_string(const std::vector &); -} +} // namespace nix::fetchers JSON_IMPL(fetchers::PublicKey) diff --git a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh index 2b59f03ca..70e837ff4 100644 --- a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh +++ b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh @@ -92,4 +92,4 @@ struct CachingFilteringSourceAccessor : FilteringSourceAccessor virtual bool isAllowedUncached(const CanonPath & path) = 0; }; -} +} // namespace nix diff --git a/src/libfetchers/include/nix/fetchers/git-utils.hh b/src/libfetchers/include/nix/fetchers/git-utils.hh index 2926deb4f..2ea2acd02 100644 --- a/src/libfetchers/include/nix/fetchers/git-utils.hh +++ b/src/libfetchers/include/nix/fetchers/git-utils.hh @@ -5,7 +5,10 @@ namespace nix { -namespace fetchers { struct PublicKey; struct Settings; } +namespace fetchers { +struct PublicKey; +struct Settings; +} // namespace fetchers /** * A sink that writes into a Git repository. Note that nothing may be written @@ -21,8 +24,7 @@ struct GitFileSystemObjectSink : ExtendedFileSystemObjectSink struct GitRepo { - virtual ~GitRepo() - { } + virtual ~GitRepo() {} static ref openRepo(const std::filesystem::path & path, bool create = false, bool bare = false); @@ -86,30 +88,23 @@ struct GitRepo virtual bool hasObject(const Hash & oid) = 0; - virtual ref getAccessor( - const Hash & rev, - bool exportIgnore, - std::string displayPrefix, - bool smudgeLfs = false) = 0; + virtual ref + getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs = false) = 0; - virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; + virtual ref + getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; virtual ref getFileSystemObjectSink() = 0; virtual void flush() = 0; - virtual void fetch( - const std::string & url, - const std::string & refspec, - bool shallow) = 0; + virtual void fetch(const std::string & url, const std::string & refspec, bool shallow) = 0; /** * Verify that commit `rev` is signed by one of the keys in * `publicKeys`. Throw an error if it isn't. */ - virtual void verifyCommit( - const Hash & rev, - const std::vector & publicKeys) = 0; + virtual void verifyCommit(const Hash & rev, const std::vector & publicKeys) = 0; /** * Given a Git tree hash, compute the hash of its NAR @@ -131,8 +126,11 @@ ref getTarballCache(); template struct Deleter { - template - void operator()(T * p) const { del(p); }; + template + void operator()(T * p) const + { + del(p); + }; }; // A helper to ensure that we don't leak objects returned by libgit2. @@ -142,11 +140,21 @@ struct Setter T & t; typename T::pointer p = nullptr; - Setter(T & t) : t(t) { } + Setter(T & t) + : t(t) + { + } - ~Setter() { if (p) t = T(p); } + ~Setter() + { + if (p) + t = T(p); + } - operator typename T::pointer * () { return &p; } + operator typename T::pointer *() + { + return &p; + } }; -} +} // namespace nix diff --git a/src/libfetchers/include/nix/fetchers/input-cache.hh b/src/libfetchers/include/nix/fetchers/input-cache.hh index f9278053a..b2fc84245 100644 --- a/src/libfetchers/include/nix/fetchers/input-cache.hh +++ b/src/libfetchers/include/nix/fetchers/input-cache.hh @@ -32,4 +32,4 @@ struct InputCache virtual ~InputCache() = default; }; -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/registry.hh b/src/libfetchers/include/nix/fetchers/registry.hh index efbfe07c8..90fc3d853 100644 --- a/src/libfetchers/include/nix/fetchers/registry.hh +++ b/src/libfetchers/include/nix/fetchers/registry.hh @@ -4,7 +4,9 @@ #include "nix/util/types.hh" #include "nix/fetchers/fetchers.hh" -namespace nix { class Store; } +namespace nix { +class Store; +} namespace nix::fetchers { @@ -34,18 +36,14 @@ struct Registry Registry(const Settings & settings, RegistryType type) : settings{settings} , type{type} - { } + { + } - static std::shared_ptr read( - const Settings & settings, - const Path & path, RegistryType type); + static std::shared_ptr read(const Settings & settings, const Path & path, RegistryType type); void write(const Path & path); - void add( - const Input & from, - const Input & to, - const Attrs & extraAttrs); + void add(const Input & from, const Input & to, const Attrs & extraAttrs); void remove(const Input & input); }; @@ -60,10 +58,7 @@ Path getUserRegistryPath(); Registries getRegistries(const Settings & settings, ref store); -void overrideRegistry( - const Input & from, - const Input & to, - const Attrs & extraAttrs); +void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs); enum class UseRegistries : int { No, @@ -75,9 +70,6 @@ enum class UseRegistries : int { * Rewrite a flakeref using the registries. If `filter` is set, only * use the registries for which the filter function returns true. */ -std::pair lookupInRegistries( - ref store, - const Input & input, - UseRegistries useRegistries); +std::pair lookupInRegistries(ref store, const Input & input, UseRegistries useRegistries); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/store-path-accessor.hh b/src/libfetchers/include/nix/fetchers/store-path-accessor.hh index 021df5a62..a107293f8 100644 --- a/src/libfetchers/include/nix/fetchers/store-path-accessor.hh +++ b/src/libfetchers/include/nix/fetchers/store-path-accessor.hh @@ -11,4 +11,4 @@ ref makeStorePathAccessor(ref store, const StorePath & st SourcePath getUnfilteredRootPath(CanonPath path); -} +} // namespace nix diff --git a/src/libfetchers/include/nix/fetchers/tarball.hh b/src/libfetchers/include/nix/fetchers/tarball.hh index 2c5ea209f..be816a24c 100644 --- a/src/libfetchers/include/nix/fetchers/tarball.hh +++ b/src/libfetchers/include/nix/fetchers/tarball.hh @@ -10,7 +10,7 @@ namespace nix { class Store; struct SourceAccessor; -} +} // namespace nix namespace nix::fetchers { @@ -43,9 +43,6 @@ struct DownloadTarballResult * Download and import a tarball into the Git cache. The result is the * Git tree hash of the root directory. */ -ref downloadTarball( - ref store, - const Settings & settings, - const std::string & url); +ref downloadTarball(ref store, const Settings & settings, const std::string & url); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 47cb7587c..f949679c2 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -8,11 +8,10 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript); struct IndirectInputScheme : InputScheme { - std::optional inputFromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) const override + std::optional inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != "flake") return {}; + if (url.scheme != "flake") + return {}; auto path = tokenizeString>(url.path, "/"); @@ -46,8 +45,10 @@ struct IndirectInputScheme : InputScheme Input input{settings}; input.attrs.insert_or_assign("type", "indirect"); input.attrs.insert_or_assign("id", id); - if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); - if (ref) input.attrs.insert_or_assign("ref", *ref); + if (rev) + input.attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + input.attrs.insert_or_assign("ref", *ref); return input; } @@ -67,9 +68,7 @@ struct IndirectInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { auto id = getStrAttr(attrs, "id"); if (!std::regex_match(id, flakeRegex)) @@ -85,19 +84,24 @@ struct IndirectInputScheme : InputScheme ParsedURL url; url.scheme = "flake"; url.path = getStrAttr(input.attrs, "id"); - if (auto ref = input.getRef()) { url.path += '/'; url.path += *ref; }; - if (auto rev = input.getRev()) { url.path += '/'; url.path += rev->gitRev(); }; + if (auto ref = input.getRef()) { + url.path += '/'; + url.path += *ref; + }; + if (auto rev = input.getRev()) { + url.path += '/'; + url.path += rev->gitRev(); + }; return url; } - Input applyOverrides( - const Input & _input, - std::optional ref, - std::optional rev) const override + Input applyOverrides(const Input & _input, std::optional ref, std::optional rev) const override { auto input(_input); - if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); - if (ref) input.attrs.insert_or_assign("ref", *ref); + if (rev) + input.attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + input.attrs.insert_or_assign("ref", *ref); return input; } @@ -112,9 +116,11 @@ struct IndirectInputScheme : InputScheme } bool isDirect(const Input & input) const override - { return false; } + { + return false; + } }; static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/input-cache.cc b/src/libfetchers/input-cache.cc index 1a4bb28a3..1422c1d9a 100644 --- a/src/libfetchers/input-cache.cc +++ b/src/libfetchers/input-cache.cc @@ -73,4 +73,4 @@ ref InputCache::create() return make_ref(); } -} +} // namespace nix::fetchers diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 0b63876de..9b17d675e 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -21,12 +21,7 @@ static RunOptions hgOptions(const Strings & args) // Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc. env["HGPLAIN"] = ""; - return { - .program = "hg", - .lookupPath = true, - .args = args, - .environment = env - }; + return {.program = "hg", .lookupPath = true, .args = args, .environment = env}; } // runProgram wrapper that uses hgOptions instead of stock RunOptions. @@ -45,14 +40,10 @@ static std::string runHg(const Strings & args, const std::optional struct MercurialInputScheme : InputScheme { - std::optional inputFromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) const override + std::optional inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != "hg+http" && - url.scheme != "hg+https" && - url.scheme != "hg+ssh" && - url.scheme != "hg+file") return {}; + if (url.scheme != "hg+http" && url.scheme != "hg+https" && url.scheme != "hg+ssh" && url.scheme != "hg+file") + return {}; auto url2(url); url2.scheme = std::string(url2.scheme, 3); @@ -61,7 +52,7 @@ struct MercurialInputScheme : InputScheme Attrs attrs; attrs.emplace("type", "hg"); - for (auto &[name, value] : url.query) { + for (auto & [name, value] : url.query) { if (name == "rev" || name == "ref") attrs.emplace(name, value); else @@ -90,9 +81,7 @@ struct MercurialInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { parseURL(getStrAttr(attrs, "url")); @@ -110,19 +99,20 @@ struct MercurialInputScheme : InputScheme { auto url = parseURL(getStrAttr(input.attrs, "url")); url.scheme = "hg+" + url.scheme; - if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev()); - if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref); + if (auto rev = input.getRev()) + url.query.insert_or_assign("rev", rev->gitRev()); + if (auto ref = input.getRef()) + url.query.insert_or_assign("ref", *ref); return url; } - Input applyOverrides( - const Input & input, - std::optional ref, - std::optional rev) const override + Input applyOverrides(const Input & input, std::optional ref, std::optional rev) const override { auto res(input); - if (rev) res.attrs.insert_or_assign("rev", rev->gitRev()); - if (ref) res.attrs.insert_or_assign("ref", *ref); + if (rev) + res.attrs.insert_or_assign("rev", rev->gitRev()); + if (ref) + res.attrs.insert_or_assign("ref", *ref); return res; } @@ -142,19 +132,20 @@ struct MercurialInputScheme : InputScheme { auto [isLocal, repoPath] = getActualUrl(input); if (!isLocal) - throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string()); + throw Error( + "cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", + path, + input.to_string()); auto absPath = CanonPath(repoPath) / path; writeFile(absPath.abs(), contents); // FIXME: shut up if file is already tracked. - runHg( - { "add", absPath.abs() }); + runHg({"add", absPath.abs()}); if (commitMsg) - runHg( - { "commit", absPath.abs(), "-m", *commitMsg }); + runHg({"commit", absPath.abs(), "-m", *commitMsg}); } std::pair getActualUrl(const Input & input) const @@ -179,7 +170,7 @@ struct MercurialInputScheme : InputScheme if (!input.getRef() && !input.getRev() && isLocal && pathExists(actualUrl + "/.hg")) { - bool clean = runHg({ "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == ""; + bool clean = runHg({"status", "-R", actualUrl, "--modified", "--added", "--removed"}) == ""; if (!clean) { @@ -192,10 +183,11 @@ struct MercurialInputScheme : InputScheme if (input.settings->warnDirty) warn("Mercurial tree '%s' is unclean", actualUrl); - input.attrs.insert_or_assign("ref", chomp(runHg({ "branch", "-R", actualUrl }))); + input.attrs.insert_or_assign("ref", chomp(runHg({"branch", "-R", actualUrl}))); auto files = tokenizeString( - runHg({ "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s); + runHg({"status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0"}), + "\0"s); Path actualPath(absPath(actualUrl)); @@ -217,29 +209,28 @@ struct MercurialInputScheme : InputScheme auto storePath = store->addToStore( input.getName(), {getFSSourceAccessor(), CanonPath(actualPath)}, - ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {}, + ContentAddressMethod::Raw::NixArchive, + HashAlgorithm::SHA256, + {}, filter); return storePath; } } - if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); + if (!input.getRef()) + input.attrs.insert_or_assign("ref", "default"); - auto revInfoKey = [&](const Hash & rev) - { + auto revInfoKey = [&](const Hash & rev) { if (rev.algo != HashAlgorithm::SHA1) - throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", rev.to_string(HashFormat::Base16, true)); + throw Error( + "Hash '%s' is not supported by Mercurial. Only sha1 is supported.", + rev.to_string(HashFormat::Base16, true)); - return Cache::Key{"hgRev", { - {"store", store->storeDir}, - {"name", name}, - {"rev", input.getRev()->gitRev()} - }}; + return Cache::Key{"hgRev", {{"store", store->storeDir}, {"name", name}, {"rev", input.getRev()->gitRev()}}}; }; - auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath - { + auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath { assert(input.getRev()); assert(!origRev || origRev == input.getRev()); input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount")); @@ -247,10 +238,7 @@ struct MercurialInputScheme : InputScheme }; /* Check the cache for the most recent rev for this URL/ref. */ - Cache::Key refToRevKey{"hgRefToRev", { - {"url", actualUrl}, - {"ref", *input.getRef()} - }}; + Cache::Key refToRevKey{"hgRefToRev", {{"url", actualUrl}, {"ref", *input.getRef()}}}; if (!input.getRev()) { if (auto res = input.settings->getCache()->lookupWithTTL(refToRevKey)) @@ -263,43 +251,47 @@ struct MercurialInputScheme : InputScheme return makeResult(res->value, res->storePath); } - Path cacheDir = fmt("%s/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); + Path cacheDir = + fmt("%s/hg/%s", + getCacheDir(), + hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ - if (!(input.getRev() - && pathExists(cacheDir) - && runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1")) - { + if (!(input.getRev() && pathExists(cacheDir) + && runProgram(hgOptions({"log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1"})) + .second + == "1")) { Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl)); if (pathExists(cacheDir)) { try { - runHg({ "pull", "-R", cacheDir, "--", actualUrl }); - } - catch (ExecError & e) { + runHg({"pull", "-R", cacheDir, "--", actualUrl}); + } catch (ExecError & e) { auto transJournal = cacheDir + "/.hg/store/journal"; /* hg throws "abandoned transaction" error only if this file exists */ if (pathExists(transJournal)) { - runHg({ "recover", "-R", cacheDir }); - runHg({ "pull", "-R", cacheDir, "--", actualUrl }); + runHg({"recover", "-R", cacheDir}); + runHg({"pull", "-R", cacheDir, "--", actualUrl}); } else { throw ExecError(e.status, "'hg pull' %s", statusToString(e.status)); } } } else { createDirs(dirOf(cacheDir)); - runHg({ "clone", "--noupdate", "--", actualUrl, cacheDir }); + runHg({"clone", "--noupdate", "--", actualUrl, cacheDir}); } } /* Fetch the remote rev or ref. */ - auto tokens = tokenizeString>( - runHg({ - "log", "-R", cacheDir, - "-r", input.getRev() ? input.getRev()->gitRev() : *input.getRef(), - "--template", "{node} {rev} {branch}" - })); + auto tokens = tokenizeString>(runHg( + {"log", + "-R", + cacheDir, + "-r", + input.getRev() ? input.getRev()->gitRev() : *input.getRef(), + "--template", + "{node} {rev} {branch}"})); assert(tokens.size() == 3); auto rev = Hash::parseAny(tokens[0], HashAlgorithm::SHA1); @@ -315,7 +307,7 @@ struct MercurialInputScheme : InputScheme Path tmpDir = createTempDir(); AutoDelete delTmpDir(tmpDir, true); - runHg({ "archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir }); + runHg({"archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir}); deletePath(tmpDir + "/.hg_archival.txt"); @@ -362,4 +354,4 @@ struct MercurialInputScheme : InputScheme static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 9239fd274..9f8344edf 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -10,11 +10,10 @@ namespace nix::fetchers { struct PathInputScheme : InputScheme { - std::optional inputFromURL( - const Settings & settings, - const ParsedURL & url, bool requireTree) const override + std::optional inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != "path") return {}; + if (url.scheme != "path") + return {}; if (url.authority && *url.authority != "") throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority); @@ -31,8 +30,7 @@ struct PathInputScheme : InputScheme input.attrs.insert_or_assign(name, *n); else throw Error("path URL '%s' has invalid parameter '%s'", url, name); - } - else + } else throw Error("path URL '%s' has unsupported parameter '%s'", url, name); return input; @@ -59,9 +57,7 @@ struct PathInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { getStrAttr(attrs, "path"); @@ -76,7 +72,7 @@ struct PathInputScheme : InputScheme query.erase("path"); query.erase("type"); query.erase("__final"); - return ParsedURL { + return ParsedURL{ .scheme = "path", .path = getStrAttr(input.attrs, "path"), .query = query, @@ -139,9 +135,8 @@ struct PathInputScheme : InputScheme time_t mtime = 0; if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) { // FIXME: try to substitute storePath. - auto src = sinkToSource([&](Sink & sink) { - mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter); - }); + auto src = sinkToSource( + [&](Sink & sink) { mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter); }); storePath = store->addToStoreFromDump(*src, "source"); } @@ -186,4 +181,4 @@ struct PathInputScheme : InputScheme static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 335935f53..e570fc84b 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -10,9 +10,7 @@ namespace nix::fetchers { -std::shared_ptr Registry::read( - const Settings & settings, - const Path & path, RegistryType type) +std::shared_ptr Registry::read(const Settings & settings, const Path & path, RegistryType type) { debug("reading registry '%s'", path); @@ -38,12 +36,11 @@ std::shared_ptr Registry::read( } auto exact = i.find("exact"); registry->entries.push_back( - Entry { + Entry{ .from = Input::fromAttrs(settings, jsonToAttrs(i["from"])), .to = Input::fromAttrs(settings, std::move(toAttrs)), .extraAttrs = extraAttrs, - .exact = exact != i.end() && exact.value() - }); + .exact = exact != i.end() && exact.value()}); } } @@ -81,17 +78,9 @@ void Registry::write(const Path & path) writeFile(path, json.dump(2)); } -void Registry::add( - const Input & from, - const Input & to, - const Attrs & extraAttrs) +void Registry::add(const Input & from, const Input & to, const Attrs & extraAttrs) { - entries.emplace_back( - Entry { - .from = from, - .to = to, - .extraAttrs = extraAttrs - }); + entries.emplace_back(Entry{.from = from, .to = to, .extraAttrs = extraAttrs}); } void Registry::remove(const Input & input) @@ -108,8 +97,7 @@ static Path getSystemRegistryPath() static std::shared_ptr getSystemRegistry(const Settings & settings) { - static auto systemRegistry = - Registry::read(settings, getSystemRegistryPath(), Registry::System); + static auto systemRegistry = Registry::read(settings, getSystemRegistryPath(), Registry::System); return systemRegistry; } @@ -120,29 +108,23 @@ Path getUserRegistryPath() std::shared_ptr getUserRegistry(const Settings & settings) { - static auto userRegistry = - Registry::read(settings, getUserRegistryPath(), Registry::User); + static auto userRegistry = Registry::read(settings, getUserRegistryPath(), Registry::User); return userRegistry; } std::shared_ptr getCustomRegistry(const Settings & settings, const Path & p) { - static auto customRegistry = - Registry::read(settings, p, Registry::Custom); + static auto customRegistry = Registry::read(settings, p, Registry::Custom); return customRegistry; } std::shared_ptr getFlagRegistry(const Settings & settings) { - static auto flagRegistry = - std::make_shared(settings, Registry::Flag); + static auto flagRegistry = std::make_shared(settings, Registry::Flag); return flagRegistry; } -void overrideRegistry( - const Input & from, - const Input & to, - const Attrs & extraAttrs) +void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs) { getFlagRegistry(*from.settings)->add(from, to, extraAttrs); } @@ -178,10 +160,7 @@ Registries getRegistries(const Settings & settings, ref store) return registries; } -std::pair lookupInRegistries( - ref store, - const Input & _input, - UseRegistries useRegistries) +std::pair lookupInRegistries(ref store, const Input & _input, UseRegistries useRegistries) { Attrs extraAttrs; int n = 0; @@ -190,10 +169,11 @@ std::pair lookupInRegistries( if (useRegistries == UseRegistries::No) return {input, extraAttrs}; - restart: +restart: n++; - if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string()); + if (n > 100) + throw Error("cycle detected in flake registry for '%s'", input.to_string()); for (auto & registry : getRegistries(*input.settings, store)) { if (useRegistries == UseRegistries::Limited @@ -229,4 +209,4 @@ std::pair lookupInRegistries( return {input, extraAttrs}; } -} +} // namespace nix::fetchers diff --git a/src/libfetchers/store-path-accessor.cc b/src/libfetchers/store-path-accessor.cc index f389d0327..65160e311 100644 --- a/src/libfetchers/store-path-accessor.cc +++ b/src/libfetchers/store-path-accessor.cc @@ -8,4 +8,4 @@ ref makeStorePathAccessor(ref store, const StorePath & st return projectSubdirSourceAccessor(store->getFSAccessor(), storePath.to_string()); } -} +} // namespace nix diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 59316eabd..309bbaf5a 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -22,15 +22,16 @@ DownloadFileResult downloadFile( { // FIXME: check store - Cache::Key key{"file", {{ - {"url", url}, - {"name", name}, - }}}; + Cache::Key key{ + "file", + {{ + {"url", url}, + {"name", name}, + }}}; auto cached = settings.getCache()->lookupStorePath(key, *store); - auto useCached = [&]() -> DownloadFileResult - { + auto useCached = [&]() -> DownloadFileResult { return { .storePath = std::move(cached->storePath), .etag = getStrAttr(cached->value, "etag"), @@ -73,10 +74,10 @@ DownloadFileResult downloadFile( StringSink sink; dumpString(res.data, sink); auto hash = hashString(HashAlgorithm::SHA256, res.data); - ValidPathInfo info { + ValidPathInfo info{ *store, name, - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::Flat, .hash = hash, .references = {}, @@ -84,7 +85,7 @@ DownloadFileResult downloadFile( hashString(HashAlgorithm::SHA256, sink.s), }; info.narSize = sink.s.size(); - auto source = StringSource { sink.s }; + auto source = StringSource{sink.s}; store->addToStore(info, source, NoRepair, NoCheckSigs); storePath = std::move(info.path); } @@ -106,10 +107,7 @@ DownloadFileResult downloadFile( } static DownloadTarballResult downloadTarball_( - const Settings & settings, - const std::string & url, - const Headers & headers, - const std::string & displayPrefix) + const Settings & settings, const std::string & url, const Headers & headers, const std::string & displayPrefix) { // Some friendly error messages for common mistakes. @@ -134,10 +132,9 @@ static DownloadTarballResult downloadTarball_( auto cached = settings.getCache()->lookupExpired(cacheKey); - auto attrsToResult = [&](const Attrs & infoAttrs) - { + auto attrsToResult = [&](const Attrs & infoAttrs) { auto treeHash = getRevAttr(infoAttrs, "treeHash"); - return DownloadTarballResult { + return DownloadTarballResult{ .treeHash = treeHash, .lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"), .immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"), @@ -158,39 +155,32 @@ static DownloadTarballResult downloadTarball_( auto source = sinkToSource([&](Sink & sink) { FileTransferRequest req(url); req.expectedETag = cached ? getStrAttr(cached->value, "etag") : ""; - getFileTransfer()->download(std::move(req), sink, - [_res](FileTransferResult r) - { - *_res->lock() = r; - }); + getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; }); }); // TODO: fall back to cached value if download fails. - auto act = std::make_unique(*logger, lvlInfo, actUnknown, - fmt("unpacking '%s' into the Git cache", url)); + auto act = std::make_unique(*logger, lvlInfo, actUnknown, fmt("unpacking '%s' into the Git cache", url)); AutoDelete cleanupTemp; /* Note: if the download is cached, `importTarball()` will receive no data, which causes it to import an empty tarball. */ - auto archive = - hasSuffix(toLower(parseURL(url).path), ".zip") - ? ({ - /* In streaming mode, libarchive doesn't handle - symlinks in zip files correctly (#10649). So write - the entire file to disk so libarchive can access it - in random-access mode. */ - auto [fdTemp, path] = createTempFile("nix-zipfile"); - cleanupTemp.reset(path); - debug("downloading '%s' into '%s'...", url, path); - { - FdSink sink(fdTemp.get()); - source->drainInto(sink); - } - TarArchive{path}; - }) - : TarArchive{*source}; + auto archive = hasSuffix(toLower(parseURL(url).path), ".zip") ? ({ + /* In streaming mode, libarchive doesn't handle + symlinks in zip files correctly (#10649). So write + the entire file to disk so libarchive can access it + in random-access mode. */ + auto [fdTemp, path] = createTempFile("nix-zipfile"); + cleanupTemp.reset(path); + debug("downloading '%s' into '%s'...", url, path); + { + FdSink sink(fdTemp.get()); + source->drainInto(sink); + } + TarArchive{path}; + }) + : TarArchive{*source}; auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); @@ -208,8 +198,7 @@ static DownloadTarballResult downloadTarball_( infoAttrs = cached->value; } else { infoAttrs.insert_or_assign("etag", res->etag); - infoAttrs.insert_or_assign("treeHash", - tarballCache->dereferenceSingletonDirectory(tree).gitRev()); + infoAttrs.insert_or_assign("treeHash", tarballCache->dereferenceSingletonDirectory(tree).gitRev()); infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified)); if (res->immutableUrl) infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl); @@ -227,10 +216,7 @@ static DownloadTarballResult downloadTarball_( return attrsToResult(infoAttrs); } -ref downloadTarball( - ref store, - const Settings & settings, - const std::string & url) +ref downloadTarball(ref store, const Settings & settings, const std::string & url) { /* Go through Input::getAccessor() to ensure that the resulting accessor has a fingerprint. */ @@ -250,19 +236,17 @@ struct CurlInputScheme : InputScheme bool hasTarballExtension(std::string_view path) const { - return hasSuffix(path, ".zip") || hasSuffix(path, ".tar") - || hasSuffix(path, ".tgz") || hasSuffix(path, ".tar.gz") - || hasSuffix(path, ".tar.xz") || hasSuffix(path, ".tar.bz2") - || hasSuffix(path, ".tar.zst"); + return hasSuffix(path, ".zip") || hasSuffix(path, ".tar") || hasSuffix(path, ".tgz") + || hasSuffix(path, ".tar.gz") || hasSuffix(path, ".tar.xz") || hasSuffix(path, ".tar.bz2") + || hasSuffix(path, ".tar.zst"); } virtual bool isValidURL(const ParsedURL & url, bool requireTree) const = 0; static const StringSet specialParams; - std::optional inputFromURL( - const Settings & settings, - const ParsedURL & _url, bool requireTree) const override + std::optional + inputFromURL(const Settings & settings, const ParsedURL & _url, bool requireTree) const override { if (!isValidURL(_url, requireTree)) return std::nullopt; @@ -296,7 +280,7 @@ struct CurlInputScheme : InputScheme for (auto & param : allowedAttrs()) url.query.erase(param); - input.attrs.insert_or_assign("type", std::string { schemeName() }); + input.attrs.insert_or_assign("type", std::string{schemeName()}); input.attrs.insert_or_assign("url", url.to_string()); return input; } @@ -315,14 +299,12 @@ struct CurlInputScheme : InputScheme }; } - std::optional inputFromAttrs( - const Settings & settings, - const Attrs & attrs) const override + std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { Input input{settings}; input.attrs = attrs; - //input.locked = (bool) maybeGetStrAttr(input.attrs, "hash"); + // input.locked = (bool) maybeGetStrAttr(input.attrs, "hash"); return input; } @@ -344,15 +326,17 @@ struct CurlInputScheme : InputScheme struct FileInputScheme : CurlInputScheme { - std::string_view schemeName() const override { return "file"; } + std::string_view schemeName() const override + { + return "file"; + } bool isValidURL(const ParsedURL & url, bool requireTree) const override { auto parsedUrlScheme = parseUrlScheme(url.scheme); return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) - && (parsedUrlScheme.application - ? parsedUrlScheme.application.value() == schemeName() - : (!requireTree && !hasTarballExtension(url.path))); + && (parsedUrlScheme.application ? parsedUrlScheme.application.value() == schemeName() + : (!requireTree && !hasTarballExtension(url.path))); } std::pair, Input> getAccessor(ref store, const Input & _input) const override @@ -378,27 +362,26 @@ struct FileInputScheme : CurlInputScheme struct TarballInputScheme : CurlInputScheme { - std::string_view schemeName() const override { return "tarball"; } + std::string_view schemeName() const override + { + return "tarball"; + } bool isValidURL(const ParsedURL & url, bool requireTree) const override { auto parsedUrlScheme = parseUrlScheme(url.scheme); return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) - && (parsedUrlScheme.application - ? parsedUrlScheme.application.value() == schemeName() - : (requireTree || hasTarballExtension(url.path))); + && (parsedUrlScheme.application ? parsedUrlScheme.application.value() == schemeName() + : (requireTree || hasTarballExtension(url.path))); } std::pair, Input> getAccessor(ref store, const Input & _input) const override { auto input(_input); - auto result = downloadTarball_( - *input.settings, - getStrAttr(input.attrs, "url"), - {}, - "«" + input.to_string() + "»"); + auto result = + downloadTarball_(*input.settings, getStrAttr(input.attrs, "url"), {}, "«" + input.to_string() + "»"); if (result.immutableUrl) { auto immutableInput = Input::fromURL(*input.settings, *result.immutableUrl); @@ -412,7 +395,8 @@ struct TarballInputScheme : CurlInputScheme if (result.lastModified && !input.attrs.contains("lastModified")) input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); - input.attrs.insert_or_assign("narHash", + input.attrs.insert_or_assign( + "narHash", getTarballCache()->treeHashToNarHash(*input.settings, result.treeHash).to_string(HashFormat::SRI, true)); return {result.accessor, input}; @@ -432,4 +416,4 @@ struct TarballInputScheme : CurlInputScheme static auto rTarballInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); static auto rFileInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); -} +} // namespace nix::fetchers diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index 1abaffb96..eafe74a2d 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -7,60 +7,58 @@ namespace nix { /* ----------- tests for flake/flakeref.hh --------------------------------------------------*/ - TEST(parseFlakeRef, path) { - experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes); +TEST(parseFlakeRef, path) +{ + experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes); - fetchers::Settings fetchSettings; + fetchers::Settings fetchSettings; - { - auto s = "/foo/bar"; - auto flakeref = parseFlakeRef(fetchSettings, s); - ASSERT_EQ(flakeref.to_string(), "path:/foo/bar"); - } - - { - auto s = "/foo/bar?revCount=123&rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - auto flakeref = parseFlakeRef(fetchSettings, s); - ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&revCount=123"); - } - - { - auto s = "/foo/bar?xyzzy=123"; - EXPECT_THROW( - parseFlakeRef(fetchSettings, s), - Error); - } - - { - auto s = "/foo/bar#bla"; - EXPECT_THROW( - parseFlakeRef(fetchSettings, s), - Error); - } - - { - auto s = "/foo/bar#bla"; - auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); - ASSERT_EQ(flakeref.to_string(), "path:/foo/bar"); - ASSERT_EQ(fragment, "bla"); - } - - { - auto s = "/foo/bar?revCount=123#bla"; - auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); - ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?revCount=123"); - ASSERT_EQ(fragment, "bla"); - } - } - - TEST(to_string, doesntReencodeUrl) { - fetchers::Settings fetchSettings; - auto s = "http://localhost:8181/test/+3d.tar.gz"; + { + auto s = "/foo/bar"; auto flakeref = parseFlakeRef(fetchSettings, s); - auto unparsed = flakeref.to_string(); - auto expected = "http://localhost:8181/test/%2B3d.tar.gz"; - - ASSERT_EQ(unparsed, expected); + ASSERT_EQ(flakeref.to_string(), "path:/foo/bar"); } + { + auto s = "/foo/bar?revCount=123&rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + auto flakeref = parseFlakeRef(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&revCount=123"); + } + + { + auto s = "/foo/bar?xyzzy=123"; + EXPECT_THROW(parseFlakeRef(fetchSettings, s), Error); + } + + { + auto s = "/foo/bar#bla"; + EXPECT_THROW(parseFlakeRef(fetchSettings, s), Error); + } + + { + auto s = "/foo/bar#bla"; + auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "path:/foo/bar"); + ASSERT_EQ(fragment, "bla"); + } + + { + auto s = "/foo/bar?revCount=123#bla"; + auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?revCount=123"); + ASSERT_EQ(fragment, "bla"); + } } + +TEST(to_string, doesntReencodeUrl) +{ + fetchers::Settings fetchSettings; + auto s = "http://localhost:8181/test/+3d.tar.gz"; + auto flakeref = parseFlakeRef(fetchSettings, s); + auto unparsed = flakeref.to_string(); + auto expected = "http://localhost:8181/test/%2B3d.tar.gz"; + + ASSERT_EQ(unparsed, expected); +} + +} // namespace nix diff --git a/src/libflake-tests/url-name.cc b/src/libflake-tests/url-name.cc index c795850f9..78de34458 100644 --- a/src/libflake-tests/url-name.cc +++ b/src/libflake-tests/url-name.cc @@ -5,66 +5,81 @@ namespace nix { /* ----------- tests for url-name.hh --------------------------------------------------*/ - TEST(getNameFromURL, getNameFromURL) { - ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); - ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#legacyPackages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); - ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); - ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); - ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); - ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); - ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#defaultPackage.x86_64-linux")), "myproj"); +TEST(getNameFromURL, getNameFromURL) +{ + ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#legacyPackages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); + ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); + ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#defaultPackage.x86_64-linux")), "myproj"); - ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#packages.x86_64-linux.default")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("github:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); - ASSERT_EQ(getNameFromURL(parseURL("github:edolstra/nix-warez?rev=1234&dir=blender&ref=master")), "blender"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + ASSERT_EQ(getNameFromURL(parseURL("github:edolstra/nix-warez?rev=1234&dir=blender&ref=master")), "blender"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#packages.x86_64-linux.default")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("gitlab:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#packages.x86_64-linux.default")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix")), "nix"); - ASSERT_EQ(getNameFromURL(parseURL("sourcehut:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); - ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/dwarffs")), "dwarffs"); - ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/nix-warez?dir=blender")), "blender"); - ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project")), "project"); - ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project?ref=fa1e2d23a22")), "project"); - ASSERT_EQ(getNameFromURL(parseURL("git+ssh://git@github.com/someuser/my-repo#")), "my-repo"); - ASSERT_EQ(getNameFromURL(parseURL("git+git://github.com/someuser/my-repo?rev=v1.2.3")), "my-repo"); - ASSERT_EQ(getNameFromURL(parseURL("git+ssh:///home/user/project?dir=subproject&rev=v2.4")), "subproject"); - ASSERT_EQ(getNameFromURL(parseURL("git+http://not-even-real#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("git+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/dwarffs")), "dwarffs"); + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/nix-warez?dir=blender")), "blender"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh://git@github.com/someuser/my-repo#")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+git://github.com/someuser/my-repo?rev=v1.2.3")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh:///home/user/project?dir=subproject&rev=v2.4")), "subproject"); + ASSERT_EQ(getNameFromURL(parseURL("git+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("git+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("tarball+http://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.jq")), "jq"); - ASSERT_EQ(getNameFromURL(parseURL("tarball+https://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.hg")), "hg"); - ASSERT_EQ(getNameFromURL(parseURL("tarball+file:///home/user/Downloads/nixpkgs-2.18.1#packages.aarch64-darwin.ripgrep")), "ripgrep"); + ASSERT_EQ( + getNameFromURL( + parseURL("tarball+http://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.jq")), + "jq"); + ASSERT_EQ( + getNameFromURL( + parseURL("tarball+https://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.hg")), + "hg"); + ASSERT_EQ( + getNameFromURL(parseURL("tarball+file:///home/user/Downloads/nixpkgs-2.18.1#packages.aarch64-darwin.ripgrep")), + "ripgrep"); - ASSERT_EQ(getNameFromURL(parseURL("https://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); - ASSERT_EQ(getNameFromURL(parseURL("http://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); + ASSERT_EQ( + getNameFromURL( + parseURL("https://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), + "pv"); + ASSERT_EQ( + getNameFromURL( + parseURL("http://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), + "pv"); - ASSERT_EQ(getNameFromURL(parseURL("file:///home/user/project?ref=fa1e2d23a22")), "project"); - ASSERT_EQ(getNameFromURL(parseURL("file+file:///home/user/project?ref=fa1e2d23a22")), "project"); - ASSERT_EQ(getNameFromURL(parseURL("file+http://not-even-real#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("file+http://gitfantasy.com/org/user/notaflake")), "notaflake"); - ASSERT_EQ(getNameFromURL(parseURL("file+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://gitfantasy.com/org/user/notaflake")), "notaflake"); + ASSERT_EQ(getNameFromURL(parseURL("file+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("https://www.github.com/")), std::nullopt); - ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); - ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); - ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); - ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt); - } + ASSERT_EQ(getNameFromURL(parseURL("https://www.github.com/")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt); } +} // namespace nix diff --git a/src/libflake/config.cc b/src/libflake/config.cc index 030104e7f..c9071f601 100644 --- a/src/libflake/config.cc +++ b/src/libflake/config.cc @@ -18,7 +18,8 @@ Path trustedListPath() static TrustedList readTrustedList() { auto path = trustedListPath(); - if (!pathExists(path)) return {}; + if (!pathExists(path)) + return {}; auto json = nlohmann::json::parse(readFile(path)); return json; } @@ -32,7 +33,13 @@ static void writeTrustedList(const TrustedList & trustedList) void ConfigFile::apply(const Settings & flakeSettings) { - StringSet whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry", "commit-lock-file-summary", "commit-lockfile-summary"}; + StringSet whitelist{ + "bash-prompt", + "bash-prompt-prefix", + "bash-prompt-suffix", + "flake-registry", + "commit-lock-file-summary", + "commit-lockfile-summary"}; for (auto & [name, value] : settings) { @@ -40,11 +47,11 @@ void ConfigFile::apply(const Settings & flakeSettings) // FIXME: Move into libutil/config.cc. std::string valueS; - if (auto* s = std::get_if(&value)) + if (auto * s = std::get_if(&value)) valueS = *s; - else if (auto* n = std::get_if(&value)) + else if (auto * n = std::get_if(&value)) valueS = fmt("%d", *n); - else if (auto* b = std::get_if>(&value)) + else if (auto * b = std::get_if>(&value)) valueS = b->t ? "true" : "false"; else if (auto ss = std::get_if>(&value)) valueS = dropEmptyInitThenConcatStringsSep(" ", *ss); // FIXME: evil @@ -57,19 +64,35 @@ void ConfigFile::apply(const Settings & flakeSettings) auto tlname = get(trustedList, name); if (auto saved = tlname ? get(*tlname, valueS) : nullptr) { trusted = *saved; - printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS); + printInfo( + "Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS); } else { // FIXME: filter ANSI escapes, newlines, \r, etc. - if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') { + if (std::tolower(logger + ->ask( + fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED + "%s" ANSI_NORMAL "' (y/N)?", + name, + valueS)) + .value_or('n')) + == 'y') { trusted = true; } - if (std::tolower(logger->ask(fmt("do you want to permanently mark this value as %s (y/N)?", trusted ? "trusted": "untrusted" )).value_or('n')) == 'y') { + if (std::tolower(logger + ->ask( + fmt("do you want to permanently mark this value as %s (y/N)?", + trusted ? "trusted" : "untrusted")) + .value_or('n')) + == 'y') { trustedList[name][valueS] = trusted; writeTrustedList(trustedList); } } if (!trusted) { - warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config"); + warn( + "ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", + name, + "--accept-flake-config"); continue; } } @@ -78,4 +101,4 @@ void ConfigFile::apply(const Settings & flakeSettings) } } -} +} // namespace nix::flake diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index 7a11e6047..b31bef211 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -25,10 +25,7 @@ using namespace flake; namespace flake { static StorePath copyInputToStore( - EvalState & state, - fetchers::Input & input, - const fetchers::Input & originalInput, - ref accessor) + EvalState & state, fetchers::Input & input, const fetchers::Input & originalInput, ref accessor) { auto storePath = fetchToStore(*input.settings, *state.store, accessor, FetchMode::Copy, input.getName()); @@ -48,13 +45,11 @@ static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos state.forceValue(value, pos); } -static void expectType(EvalState & state, ValueType type, - Value & value, const PosIdx pos) +static void expectType(EvalState & state, ValueType type, Value & value, const PosIdx pos) { forceTrivialValue(state, value, pos); if (value.type() != type) - throw Error("expected %s but got %s at %s", - showType(type), showType(value.type()), state.positions[pos]); + throw Error("expected %s but got %s at %s", showType(type), showType(value.type()), state.positions[pos]); } static std::pair, fetchers::Attrs> parseFlakeInputs( @@ -65,38 +60,43 @@ static std::pair, fetchers::Attrs> parseFlakeInput const SourcePath & flakeDir, bool allowSelf); -static void parseFlakeInputAttr( - EvalState & state, - const Attr & attr, - fetchers::Attrs & attrs) +static void parseFlakeInputAttr(EvalState & state, const Attr & attr, fetchers::Attrs & attrs) { - // Allow selecting a subset of enum values - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wswitch-enum" +// Allow selecting a subset of enum values +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" switch (attr.value->type()) { - case nString: - attrs.emplace(state.symbols[attr.name], attr.value->c_str()); - break; - case nBool: - attrs.emplace(state.symbols[attr.name], Explicit { attr.value->boolean() }); - break; - case nInt: { - auto intValue = attr.value->integer().value; - if (intValue < 0) - state.error("negative value given for flake input attribute %1%: %2%", state.symbols[attr.name], intValue).debugThrow(); - attrs.emplace(state.symbols[attr.name], uint64_t(intValue)); - break; - } - default: - if (attr.name == state.symbols.create("publicKeys")) { - experimentalFeatureSettings.require(Xp::VerifiedFetches); - NixStringContext emptyContext = {}; - attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, attr.pos, emptyContext).dump()); - } else - state.error("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", - state.symbols[attr.name], showType(*attr.value)).debugThrow(); + case nString: + attrs.emplace(state.symbols[attr.name], attr.value->c_str()); + break; + case nBool: + attrs.emplace(state.symbols[attr.name], Explicit{attr.value->boolean()}); + break; + case nInt: { + auto intValue = attr.value->integer().value; + if (intValue < 0) + state + .error( + "negative value given for flake input attribute %1%: %2%", state.symbols[attr.name], intValue) + .debugThrow(); + attrs.emplace(state.symbols[attr.name], uint64_t(intValue)); + break; } - #pragma GCC diagnostic pop + default: + if (attr.name == state.symbols.create("publicKeys")) { + experimentalFeatureSettings.require(Xp::VerifiedFetches); + NixStringContext emptyContext = {}; + attrs.emplace( + state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, attr.pos, emptyContext).dump()); + } else + state + .error( + "flake input attribute '%s' is %s while a string, Boolean, or integer is expected", + state.symbols[attr.name], + showType(*attr.value)) + .debugThrow(); + } +#pragma GCC diagnostic pop } static FlakeInput parseFlakeInput( @@ -127,19 +127,24 @@ static FlakeInput parseFlakeInput( else if (attr.value->type() == nPath) { auto path = attr.value->path(); if (path.accessor != flakeDir.accessor) - throw Error("input attribute path '%s' at %s must be in the same source tree as %s", - path, state.positions[attr.pos], flakeDir); + throw Error( + "input attribute path '%s' at %s must be in the same source tree as %s", + path, + state.positions[attr.pos], + flakeDir); url = "path:" + flakeDir.path.makeRelative(path.path); - } - else - throw Error("expected a string or a path but got %s at %s", - showType(attr.value->type()), state.positions[attr.pos]); + } else + throw Error( + "expected a string or a path but got %s at %s", + showType(attr.value->type()), + state.positions[attr.pos]); attrs.emplace("url", *url); } else if (attr.name == sFlake) { expectType(state, nBool, *attr.value, attr.pos); input.isFlake = attr.value->boolean(); } else if (attr.name == sInputs) { - input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first; + input.overrides = + parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first; } else if (attr.name == sFollows) { expectType(state, nString, *attr.value, attr.pos); auto follows(parseInputAttrPath(attr.value->c_str())); @@ -149,8 +154,7 @@ static FlakeInput parseFlakeInput( parseFlakeInputAttr(state, attr, attrs); } catch (Error & e) { e.addTrace( - state.positions[attr.pos], - HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); + state.positions[attr.pos], HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); throw; } } @@ -198,12 +202,8 @@ static std::pair, fetchers::Attrs> parseFlakeInput for (auto & attr : *inputAttr.value->attrs()) parseFlakeInputAttr(state, attr, selfAttrs); } else { - inputs.emplace(inputName, - parseFlakeInput(state, - inputAttr.value, - inputAttr.pos, - lockRootAttrPath, - flakeDir)); + inputs.emplace( + inputName, parseFlakeInput(state, inputAttr.value, inputAttr.pos, lockRootAttrPath, flakeDir)); } } @@ -225,7 +225,7 @@ static Flake readFlake( Value vInfo; state.evalFile(flakePath, vInfo, true); - Flake flake { + Flake flake{ .originalRef = originalRef, .resolvedRef = resolvedRef, .lockedRef = lockedRef, @@ -240,7 +240,8 @@ static Flake readFlake( auto sInputs = state.symbols.create("inputs"); if (auto inputs = vInfo.attrs()->get(sInputs)) { - auto [flakeInputs, selfAttrs] = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir, true); + auto [flakeInputs, selfAttrs] = + parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir, true); flake.inputs = std::move(flakeInputs); flake.selfAttrs = std::move(selfAttrs); } @@ -253,9 +254,9 @@ static Flake readFlake( if (outputs->value->isLambda() && outputs->value->lambda().fun->hasFormals()) { for (auto & formal : outputs->value->lambda().fun->formals->formals) { if (formal.name != state.sSelf) - flake.inputs.emplace(state.symbols[formal.name], FlakeInput { - .ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name])) - }); + flake.inputs.emplace( + state.symbols[formal.name], + FlakeInput{.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))}); } } @@ -271,53 +272,51 @@ static Flake readFlake( forceTrivialValue(state, *setting.value, setting.pos); if (setting.value->type() == nString) flake.config.settings.emplace( - state.symbols[setting.name], - std::string(state.forceStringNoCtx(*setting.value, setting.pos, ""))); + state.symbols[setting.name], std::string(state.forceStringNoCtx(*setting.value, setting.pos, ""))); else if (setting.value->type() == nPath) { - auto storePath = fetchToStore(state.fetchSettings, *state.store, setting.value->path(), FetchMode::Copy); + auto storePath = + fetchToStore(state.fetchSettings, *state.store, setting.value->path(), FetchMode::Copy); + flake.config.settings.emplace(state.symbols[setting.name], state.store->printStorePath(storePath)); + } else if (setting.value->type() == nInt) flake.config.settings.emplace( - state.symbols[setting.name], - state.store->printStorePath(storePath)); - } - else if (setting.value->type() == nInt) - flake.config.settings.emplace( - state.symbols[setting.name], - state.forceInt(*setting.value, setting.pos, "").value); + state.symbols[setting.name], state.forceInt(*setting.value, setting.pos, "").value); else if (setting.value->type() == nBool) flake.config.settings.emplace( - state.symbols[setting.name], - Explicit { state.forceBool(*setting.value, setting.pos, "") }); + state.symbols[setting.name], Explicit{state.forceBool(*setting.value, setting.pos, "")}); else if (setting.value->type() == nList) { std::vector ss; for (auto elem : setting.value->listView()) { if (elem->type() != nString) - state.error("list element in flake configuration setting '%s' is %s while a string is expected", - state.symbols[setting.name], showType(*setting.value)).debugThrow(); + state + .error( + "list element in flake configuration setting '%s' is %s while a string is expected", + state.symbols[setting.name], + showType(*setting.value)) + .debugThrow(); ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, "")); } flake.config.settings.emplace(state.symbols[setting.name], ss); - } - else - state.error("flake configuration setting '%s' is %s", - state.symbols[setting.name], showType(*setting.value)).debugThrow(); + } else + state + .error( + "flake configuration setting '%s' is %s", state.symbols[setting.name], showType(*setting.value)) + .debugThrow(); } } for (auto & attr : *vInfo.attrs()) { - if (attr.name != state.sDescription && - attr.name != sInputs && - attr.name != sOutputs && - attr.name != sNixConfig) - throw Error("flake '%s' has an unsupported attribute '%s', at %s", - resolvedRef, state.symbols[attr.name], state.positions[attr.pos]); + if (attr.name != state.sDescription && attr.name != sInputs && attr.name != sOutputs && attr.name != sNixConfig) + throw Error( + "flake '%s' has an unsupported attribute '%s', at %s", + resolvedRef, + state.symbols[attr.name], + state.positions[attr.pos]); } return flake; } -static FlakeRef applySelfAttrs( - const FlakeRef & ref, - const Flake & flake) +static FlakeRef applySelfAttrs(const FlakeRef & ref, const Flake & flake) { auto newRef(ref); @@ -371,22 +370,16 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, fetchers::UseReg return getFlake(state, originalRef, useRegistries, {}); } -static LockFile readLockFile( - const fetchers::Settings & fetchSettings, - const SourcePath & lockFilePath) +static LockFile readLockFile(const fetchers::Settings & fetchSettings, const SourcePath & lockFilePath) { - return lockFilePath.pathExists() - ? LockFile(fetchSettings, lockFilePath.readFile(), fmt("%s", lockFilePath)) - : LockFile(); + return lockFilePath.pathExists() ? LockFile(fetchSettings, lockFilePath.readFile(), fmt("%s", lockFilePath)) + : LockFile(); } /* Compute an in-memory lock file for the specified top-level flake, and optionally write it to file, if the flake is writable. */ -LockedFlake lockFlake( - const Settings & settings, - EvalState & state, - const FlakeRef & topRef, - const LockFlags & lockFlags) +LockedFlake +lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, const LockFlags & lockFlags) { experimentalFeatureSettings.require(Xp::Flakes); @@ -394,11 +387,7 @@ LockedFlake lockFlake( auto useRegistriesTop = useRegistries ? fetchers::UseRegistries::All : fetchers::UseRegistries::No; auto useRegistriesInputs = useRegistries ? fetchers::UseRegistries::Limited : fetchers::UseRegistries::No; - auto flake = getFlake( - state, - topRef, - useRegistriesTop, - {}); + auto flake = getFlake(state, topRef, useRegistriesTop, {}); if (lockFlags.applyNixConfig) { flake.config.apply(settings); @@ -410,10 +399,8 @@ LockedFlake lockFlake( throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false"); } - auto oldLockFile = readLockFile( - state.fetchSettings, - lockFlags.referenceLockFilePath.value_or( - flake.lockFilePath())); + auto oldLockFile = + readLockFile(state.fetchSettings, lockFlags.referenceLockFilePath.value_or(flake.lockFilePath())); debug("old lock file: %s", oldLockFile); @@ -432,8 +419,8 @@ LockedFlake lockFlake( for (auto & i : lockFlags.inputOverrides) { overrides.emplace( i.first, - OverrideTarget { - .input = FlakeInput { .ref = i.second }, + OverrideTarget{ + .input = FlakeInput{.ref = i.second}, /* Note: any relative overrides (e.g. `--override-input B/C "path:./foo/bar"`) are interpreted relative to the top-level @@ -458,42 +445,40 @@ LockedFlake lockFlake( computeLocks; computeLocks = [&]( - /* The inputs of this node, either from flake.nix or - flake.lock. */ - const FlakeInputs & flakeInputs, - /* The node whose locks are to be updated.*/ - ref node, - /* The path to this node in the lock file graph. */ - const InputAttrPath & inputAttrPathPrefix, - /* The old node, if any, from which locks can be - copied. */ - std::shared_ptr oldNode, - /* The prefix relative to which 'follows' should be - interpreted. When a node is initially locked, it's - relative to the node's flake; when it's already locked, - it's relative to the root of the lock file. */ - const InputAttrPath & followsPrefix, - /* The source path of this node's flake. */ - const SourcePath & sourcePath, - bool trustLock) - { + /* The inputs of this node, either from flake.nix or + flake.lock. */ + const FlakeInputs & flakeInputs, + /* The node whose locks are to be updated.*/ + ref node, + /* The path to this node in the lock file graph. */ + const InputAttrPath & inputAttrPathPrefix, + /* The old node, if any, from which locks can be + copied. */ + std::shared_ptr oldNode, + /* The prefix relative to which 'follows' should be + interpreted. When a node is initially locked, it's + relative to the node's flake; when it's already locked, + it's relative to the root of the lock file. */ + const InputAttrPath & followsPrefix, + /* The source path of this node's flake. */ + const SourcePath & sourcePath, + bool trustLock) { debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix)); /* Get the overrides (i.e. attributes of the form 'inputs.nixops.inputs.nixpkgs.url = ...'). */ std::function addOverrides; - addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix) - { + addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix) { for (auto & [idOverride, inputOverride] : input.overrides) { auto inputAttrPath(prefix); inputAttrPath.push_back(idOverride); if (inputOverride.ref || inputOverride.follows) - overrides.emplace(inputAttrPath, - OverrideTarget { + overrides.emplace( + inputAttrPath, + OverrideTarget{ .input = inputOverride, .sourcePath = sourcePath, - .parentInputAttrPath = inputAttrPathPrefix - }); + .parentInputAttrPath = inputAttrPathPrefix}); addOverrides(inputOverride, inputAttrPath); } }; @@ -513,7 +498,8 @@ LockedFlake lockFlake( if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow)) warn( "input '%s' has an override for a non-existent input '%s'", - printInputAttrPath(inputAttrPathPrefix), follow); + printInputAttrPath(inputAttrPathPrefix), + follow); } /* Go over the flake inputs, resolve/fetch them if @@ -558,36 +544,31 @@ LockedFlake lockFlake( } if (!input.ref) - input.ref = FlakeRef::fromAttrs(state.fetchSettings, {{"type", "indirect"}, {"id", std::string(id)}}); + input.ref = + FlakeRef::fromAttrs(state.fetchSettings, {{"type", "indirect"}, {"id", std::string(id)}}); auto overriddenParentPath = input.ref->input.isRelative() - ? std::optional(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix) - : std::nullopt; + ? std::optional( + hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix) + : std::nullopt; - auto resolveRelativePath = [&]() -> std::optional - { + auto resolveRelativePath = [&]() -> std::optional { if (auto relativePath = input.ref->input.isRelative()) { - return SourcePath { + return SourcePath{ overriddenSourcePath.accessor, - CanonPath(*relativePath, overriddenSourcePath.path.parent().value()) - }; + CanonPath(*relativePath, overriddenSourcePath.path.parent().value())}; } else return std::nullopt; }; /* Get the input flake, resolve 'path:./...' flakerefs relative to the parent flake. */ - auto getInputFlake = [&](const FlakeRef & ref, const fetchers::UseRegistries useRegistries) - { + auto getInputFlake = [&](const FlakeRef & ref, const fetchers::UseRegistries useRegistries) { if (auto resolvedPath = resolveRelativePath()) { return readFlake(state, ref, ref, ref, *resolvedPath, inputAttrPath); } else { - return getFlake( - state, - ref, - useRegistries, - inputAttrPath); + return getFlake(state, ref, useRegistries, inputAttrPath); } }; @@ -602,21 +583,15 @@ LockedFlake lockFlake( if (auto oldLock3 = std::get_if<0>(&*oldLock2)) oldLock = *oldLock3; - if (oldLock - && oldLock->originalRef.canonicalize() == input.ref->canonicalize() - && oldLock->parentInputAttrPath == overriddenParentPath - && !hasCliOverride) - { + if (oldLock && oldLock->originalRef.canonicalize() == input.ref->canonicalize() + && oldLock->parentInputAttrPath == overriddenParentPath && !hasCliOverride) { debug("keeping existing input '%s'", inputAttrPathS); /* Copy the input from the old lock since its flakeref didn't change and there is no override from a higher level flake. */ auto childNode = make_ref( - oldLock->lockedRef, - oldLock->originalRef, - oldLock->isFlake, - oldLock->parentInputAttrPath); + oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake, oldLock->parentInputAttrPath); node->inputs.insert_or_assign(id, childNode); @@ -624,10 +599,8 @@ LockedFlake lockFlake( must fetch the flake to update it. */ auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath); - auto mustRefetch = - lb != lockFlags.inputUpdates.end() - && lb->size() > inputAttrPath.size() - && std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin()); + auto mustRefetch = lb != lockFlags.inputUpdates.end() && lb->size() > inputAttrPath.size() + && std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin()); FlakeInputs fakeInputs; @@ -638,14 +611,17 @@ LockedFlake lockFlake( those. */ for (auto & i : oldLock->inputs) { if (auto lockedNode = std::get_if<0>(&i.second)) { - fakeInputs.emplace(i.first, FlakeInput { - .ref = (*lockedNode)->originalRef, - .isFlake = (*lockedNode)->isFlake, - }); + fakeInputs.emplace( + i.first, + FlakeInput{ + .ref = (*lockedNode)->originalRef, + .isFlake = (*lockedNode)->isFlake, + }); } else if (auto follows = std::get_if<1>(&i.second)) { if (!trustLock) { // It is possible that the flake has changed, - // so we must confirm all the follows that are in the lock file are also in the flake. + // so we must confirm all the follows that are in the lock file are also in the + // flake. auto overridePath(inputAttrPath); overridePath.push_back(i.first); auto o = overrides.find(overridePath); @@ -660,9 +636,11 @@ LockedFlake lockFlake( } auto absoluteFollows(followsPrefix); absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end()); - fakeInputs.emplace(i.first, FlakeInput { - .follows = absoluteFollows, - }); + fakeInputs.emplace( + i.first, + FlakeInput{ + .follows = absoluteFollows, + }); } } } @@ -670,10 +648,17 @@ LockedFlake lockFlake( if (mustRefetch) { auto inputFlake = getInputFlake(oldLock->lockedRef, useRegistriesInputs); nodePaths.emplace(childNode, inputFlake.path.parent()); - computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix, - inputFlake.path, false); + computeLocks( + inputFlake.inputs, + childNode, + inputAttrPath, + oldLock, + followsPrefix, + inputFlake.path, + false); } else { - computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true); + computeLocks( + fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true); } } else { @@ -681,9 +666,7 @@ LockedFlake lockFlake( this input. */ debug("creating new input '%s'", inputAttrPathS); - if (!lockFlags.allowUnlocked - && !input.ref->input.isLocked() - && !input.ref->input.isRelative()) + if (!lockFlags.allowUnlocked && !input.ref->input.isLocked() && !input.ref->input.isRelative()) throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS); /* Note: in case of an --override-input, we use @@ -697,13 +680,11 @@ LockedFlake lockFlake( auto ref = (input2.ref && inputIsOverride) ? *input2.ref : *input.ref; if (input.isFlake) { - auto inputFlake = getInputFlake(*input.ref, inputIsOverride ? fetchers::UseRegistries::All : useRegistriesInputs); + auto inputFlake = getInputFlake( + *input.ref, inputIsOverride ? fetchers::UseRegistries::All : useRegistriesInputs); - auto childNode = make_ref( - inputFlake.lockedRef, - ref, - true, - overriddenParentPath); + auto childNode = + make_ref(inputFlake.lockedRef, ref, true, overriddenParentPath); node->inputs.insert_or_assign(id, childNode); @@ -718,7 +699,9 @@ LockedFlake lockFlake( flake, using its own lock file. */ nodePaths.emplace(childNode, inputFlake.path.parent()); computeLocks( - inputFlake.inputs, childNode, inputAttrPath, + inputFlake.inputs, + childNode, + inputAttrPath, readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(), inputAttrPath, inputFlake.path, @@ -726,21 +709,19 @@ LockedFlake lockFlake( } else { - auto [path, lockedRef] = [&]() -> std::tuple - { + auto [path, lockedRef] = [&]() -> std::tuple { // Handle non-flake 'path:./...' inputs. if (auto resolvedPath = resolveRelativePath()) { return {*resolvedPath, *input.ref}; } else { auto cachedInput = state.inputCache->getAccessor( - state.store, - input.ref->input, - useRegistriesInputs); + state.store, input.ref->input, useRegistriesInputs); auto lockedRef = FlakeRef(std::move(cachedInput.lockedInput), input.ref->subdir); // FIXME: allow input to be lazy. - auto storePath = copyInputToStore(state, lockedRef.input, input.ref->input, cachedInput.accessor); + auto storePath = copyInputToStore( + state, lockedRef.input, input.ref->input, cachedInput.accessor); return {state.storePath(storePath), lockedRef}; } @@ -774,8 +755,10 @@ LockedFlake lockFlake( for (auto & i : lockFlags.inputOverrides) if (!overridesUsed.count(i.first)) - warn("the flag '--override-input %s %s' does not match any input", - printInputAttrPath(i.first), i.second); + warn( + "the flag '--override-input %s %s' does not match any input", + printInputAttrPath(i.first), + i.second); for (auto & i : lockFlags.inputUpdates) if (!updatesUsed.count(i)) @@ -799,12 +782,19 @@ LockedFlake lockFlake( if (lockFlags.failOnUnlocked) throw Error( "Not writing lock file of flake '%s' because it has an unlocked input ('%s'). " - "Use '--allow-dirty-locks' to allow this anyway.", topRef, *unlockedInput); + "Use '--allow-dirty-locks' to allow this anyway.", + topRef, + *unlockedInput); if (state.fetchSettings.warnDirty) - warn("not writing lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput); + warn( + "not writing lock file of flake '%s' because it has an unlocked input ('%s')", + topRef, + *unlockedInput); } else { if (!lockFlags.updateLockFile) - throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef); + throw Error( + "flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", + topRef); auto newLockFileS = fmt("%s\n", newLockFile); @@ -845,36 +835,31 @@ LockedFlake lockFlake( topRef.input.putFile( CanonPath((topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock"), - newLockFileS, commitMessage); + newLockFileS, + commitMessage); } /* Rewriting the lockfile changed the top-level repo, so we should re-read it. FIXME: we could also just clear the 'rev' field... */ auto prevLockedRef = flake.lockedRef; - flake = getFlake( - state, - topRef, - useRegistriesTop); + flake = getFlake(state, topRef, useRegistriesTop); - if (lockFlags.commitLockFile && - flake.lockedRef.input.getRev() && - prevLockedRef.input.getRev() != flake.lockedRef.input.getRev()) + if (lockFlags.commitLockFile && flake.lockedRef.input.getRev() + && prevLockedRef.input.getRev() != flake.lockedRef.input.getRev()) warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev()); } } else - throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef); + throw Error( + "cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef); } else { warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff)); flake.forceDirty = true; } } - return LockedFlake { - .flake = std::move(flake), - .lockFile = std::move(newLockFile), - .nodePaths = std::move(nodePaths) - }; + return LockedFlake{ + .flake = std::move(flake), .lockFile = std::move(newLockFile), .nodePaths = std::move(nodePaths)}; } catch (Error & e) { e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string()); @@ -882,28 +867,28 @@ LockedFlake lockFlake( } } -static ref makeInternalFS() { - auto internalFS = make_ref(MemorySourceAccessor {}); +static ref makeInternalFS() +{ + auto internalFS = make_ref(MemorySourceAccessor{}); internalFS->setPathDisplay("«flakes-internal»", ""); internalFS->addFile( CanonPath("call-flake.nix"), - #include "call-flake.nix.gen.hh" +#include "call-flake.nix.gen.hh" ); return internalFS; } static auto internalFS = makeInternalFS(); -static Value * requireInternalFile(EvalState & state, CanonPath path) { - SourcePath p {internalFS, path}; +static Value * requireInternalFile(EvalState & state, CanonPath path) +{ + SourcePath p{internalFS, path}; auto v = state.allocValue(); state.evalFile(p, *v); // has caching return v; } -void callFlake(EvalState & state, - const LockedFlake & lockedFlake, - Value & vRes) +void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) { experimentalFeatureSettings.require(Xp::Flakes); @@ -931,9 +916,7 @@ void callFlake(EvalState & state, auto key = keyMap.find(node); assert(key != keyMap.end()); - override - .alloc(state.symbols.create("dir")) - .mkString(CanonPath(subdir).rel()); + override.alloc(state.symbols.create("dir")).mkString(CanonPath(subdir).rel()); overrides.alloc(state.symbols.create(key->second)).mkAttrs(override); } @@ -952,16 +935,16 @@ void callFlake(EvalState & state, state.callFunction(*vCallFlake, args, vRes, noPos); } -} +} // namespace flake -std::optional LockedFlake::getFingerprint( - ref store, - const fetchers::Settings & fetchSettings) const +std::optional LockedFlake::getFingerprint(ref store, const fetchers::Settings & fetchSettings) const { - if (lockFile.isUnlocked(fetchSettings)) return std::nullopt; + if (lockFile.isUnlocked(fetchSettings)) + return std::nullopt; auto fingerprint = flake.lockedRef.input.getFingerprint(store); - if (!fingerprint) return std::nullopt; + if (!fingerprint) + return std::nullopt; *fingerprint += fmt(";%s;%s", flake.lockedRef.subdir, lockFile); @@ -979,6 +962,6 @@ std::optional LockedFlake::getFingerprint( return hashString(HashAlgorithm::SHA256, *fingerprint); } -Flake::~Flake() { } +Flake::~Flake() {} -} +} // namespace nix diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index 37b7eff4c..9a75a2259 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -29,15 +29,13 @@ fetchers::Attrs FlakeRef::toAttrs() const return attrs; } -std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef) +std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef) { str << flakeRef.to_string(); return str; } -FlakeRef FlakeRef::resolve( - ref store, - fetchers::UseRegistries useRegistries) const +FlakeRef FlakeRef::resolve(ref store, fetchers::UseRegistries useRegistries) const { auto [input2, extraAttrs] = lookupInRegistries(store, input, useRegistries); return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir)); @@ -51,16 +49,15 @@ FlakeRef parseFlakeRef( bool isFlake, bool preserveRelativePaths) { - auto [flakeRef, fragment] = parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths); + auto [flakeRef, fragment] = + parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths); if (fragment != "") throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url); return flakeRef; } -static std::pair fromParsedURL( - const fetchers::Settings & fetchSettings, - ParsedURL && parsedURL, - bool isFlake) +static std::pair +fromParsedURL(const fetchers::Settings & fetchSettings, ParsedURL && parsedURL, bool isFlake) { auto dir = getOr(parsedURL.query, "dir", ""); parsedURL.query.erase("dir"); @@ -79,9 +76,7 @@ std::pair parsePathFlakeRefWithFragment( bool isFlake, bool preserveRelativePaths) { - static std::regex pathFlakeRegex( - R"(([^?#]*)(\?([^#]*))?(#(.*))?)", - std::regex::ECMAScript); + static std::regex pathFlakeRegex(R"(([^?#]*)(\?([^#]*))?(#(.*))?)", std::regex::ECMAScript); std::smatch match; auto succeeds = std::regex_match(url, match, pathFlakeRegex); @@ -104,16 +99,17 @@ std::pair parsePathFlakeRefWithFragment( // Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar` warn( "Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. " - "Pretending that you meant '%s'" - , path, dirOf(path)); + "Pretending that you meant '%s'", + path, + dirOf(path)); path = dirOf(path); } else { throw BadURL("path '%s' is not a flake (because it's not a directory)", path); } } - if (!allowMissing && !pathExists(path + "/flake.nix")){ - notice("path '%s' does not contain a 'flake.nix', searching up",path); + if (!allowMissing && !pathExists(path + "/flake.nix")) { + notice("path '%s' does not contain a 'flake.nix', searching up", path); // Save device to detect filesystem boundary dev_t device = lstat(path).st_dev; @@ -123,7 +119,9 @@ std::pair parsePathFlakeRefWithFragment( found = true; break; } else if (pathExists(path + "/.git")) - throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path); + throw Error( + "path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", + path); else { if (lstat(path).st_dev != device) throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path); @@ -172,29 +170,23 @@ std::pair parsePathFlakeRefWithFragment( throw BadURL("flake reference '%s' is not an absolute path", url); } - return fromParsedURL(fetchSettings, { - .scheme = "path", - .authority = "", - .path = path, - .query = query, - .fragment = fragment - }, isFlake); + return fromParsedURL( + fetchSettings, + {.scheme = "path", .authority = "", .path = path, .query = query, .fragment = fragment}, + isFlake); } /** * Check if `url` is a flake ID. This is an abbreviated syntax for * `flake:?ref=&rev=`. */ -static std::optional> parseFlakeIdRef( - const fetchers::Settings & fetchSettings, - const std::string & url, - bool isFlake) +static std::optional> +parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & url, bool isFlake) { std::smatch match; static std::regex flakeRegex( - "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" - + "(?:#(" + fragmentRegex + "))?", + "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); if (std::regex_match(url, match, flakeRegex)) { @@ -205,8 +197,7 @@ static std::optional> parseFlakeIdRef( }; return std::make_pair( - FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake), ""), - percentDecode(match.str(6))); + FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake), ""), percentDecode(match.str(6))); } return {}; @@ -220,9 +211,7 @@ std::optional> parseURLFlakeRef( { try { auto parsed = parseURL(url); - if (baseDir - && (parsed.scheme == "path" || parsed.scheme == "git+file") - && !isAbsolute(parsed.path)) + if (baseDir && (parsed.scheme == "path" || parsed.scheme == "git+file") && !isAbsolute(parsed.path)) parsed.path = absPath(parsed.path, *baseDir); return fromParsedURL(fetchSettings, std::move(parsed), isFlake); } catch (BadURL &) { @@ -249,9 +238,7 @@ std::pair parseFlakeRefWithFragment( } } -FlakeRef FlakeRef::fromAttrs( - const fetchers::Settings & fetchSettings, - const fetchers::Attrs & attrs) +FlakeRef FlakeRef::fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs) { auto attrs2(attrs); attrs2.erase("dir"); @@ -323,12 +310,11 @@ std::tuple parseFlakeRefWithFragment bool isFlake) { auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url); - auto [flakeRef, fragment] = parseFlakeRefWithFragment( - fetchSettings, - std::string { prefix }, baseDir, allowMissing, isFlake); + auto [flakeRef, fragment] = + parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, baseDir, allowMissing, isFlake); return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)}; } std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript); -} +} // namespace nix diff --git a/src/libflake/include/nix/flake/flake-primops.hh b/src/libflake/include/nix/flake/flake-primops.hh index e7b86b9b3..35a7128f4 100644 --- a/src/libflake/include/nix/flake/flake-primops.hh +++ b/src/libflake/include/nix/flake/flake-primops.hh @@ -13,4 +13,4 @@ nix::PrimOp getFlake(const Settings & settings); extern nix::PrimOp parseFlakeRef; extern nix::PrimOp flakeRefToString; -} // namespace nix::flake +} // namespace nix::flake::primops diff --git a/src/libflake/include/nix/flake/flake.hh b/src/libflake/include/nix/flake/flake.hh index ed34aa9c8..13002b47c 100644 --- a/src/libflake/include/nix/flake/flake.hh +++ b/src/libflake/include/nix/flake/flake.hh @@ -134,9 +134,7 @@ struct LockedFlake */ std::map, SourcePath> nodePaths; - std::optional getFingerprint( - ref store, - const fetchers::Settings & fetchSettings) const; + std::optional getFingerprint(ref store, const fetchers::Settings & fetchSettings) const; }; struct LockFlags @@ -215,18 +213,12 @@ struct LockFlags std::set inputUpdates; }; -LockedFlake lockFlake( - const Settings & settings, - EvalState & state, - const FlakeRef & flakeRef, - const LockFlags & lockFlags); +LockedFlake +lockFlake(const Settings & settings, EvalState & state, const FlakeRef & flakeRef, const LockFlags & lockFlags); -void callFlake( - EvalState & state, - const LockedFlake & lockedFlake, - Value & v); +void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & v); -} +} // namespace flake void emitTreeAttrs( EvalState & state, @@ -241,6 +233,6 @@ void emitTreeAttrs( * always treats the input as final (i.e. no attributes can be * added/removed/changed). */ -void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v); +void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value ** args, Value & v); -} +} // namespace nix diff --git a/src/libflake/include/nix/flake/flakeref.hh b/src/libflake/include/nix/flake/flakeref.hh index c0045fcf3..12d337230 100644 --- a/src/libflake/include/nix/flake/flakeref.hh +++ b/src/libflake/include/nix/flake/flakeref.hh @@ -47,29 +47,27 @@ struct FlakeRef */ Path subdir; - bool operator ==(const FlakeRef & other) const = default; + bool operator==(const FlakeRef & other) const = default; - bool operator <(const FlakeRef & other) const + bool operator<(const FlakeRef & other) const { return std::tie(input, subdir) < std::tie(other.input, other.subdir); } FlakeRef(fetchers::Input && input, const Path & subdir) - : input(std::move(input)), subdir(subdir) - { } + : input(std::move(input)) + , subdir(subdir) + { + } // FIXME: change to operator <<. std::string to_string() const; fetchers::Attrs toAttrs() const; - FlakeRef resolve( - ref store, - fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const; + FlakeRef resolve(ref store, fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const; - static FlakeRef fromAttrs( - const fetchers::Settings & fetchSettings, - const fetchers::Attrs & attrs); + static FlakeRef fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs); std::pair, FlakeRef> lazyFetch(ref store) const; @@ -80,7 +78,7 @@ struct FlakeRef FlakeRef canonicalize() const; }; -std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef); +std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef); /** * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) @@ -117,4 +115,4 @@ std::tuple parseFlakeRefWithFragment const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*"; extern std::regex flakeIdRegex; -} +} // namespace nix diff --git a/src/libflake/include/nix/flake/lockfile.hh b/src/libflake/include/nix/flake/lockfile.hh index 97bd7a495..c5740a2f1 100644 --- a/src/libflake/include/nix/flake/lockfile.hh +++ b/src/libflake/include/nix/flake/lockfile.hh @@ -8,7 +8,7 @@ namespace nix { class Store; class StorePath; -} +} // namespace nix namespace nix::flake { @@ -27,7 +27,7 @@ struct Node : std::enable_shared_from_this std::map inputs; - virtual ~Node() { } + virtual ~Node() {} }; /** @@ -51,11 +51,10 @@ struct LockedNode : Node , originalRef(std::move(originalRef)) , isFlake(isFlake) , parentInputAttrPath(std::move(parentInputAttrPath)) - { } + { + } - LockedNode( - const fetchers::Settings & fetchSettings, - const nlohmann::json & json); + LockedNode(const fetchers::Settings & fetchSettings, const nlohmann::json & json); StorePath computeStorePath(Store & store) const; }; @@ -65,9 +64,7 @@ struct LockFile ref root = make_ref(); LockFile() {}; - LockFile( - const fetchers::Settings & fetchSettings, - std::string_view contents, std::string_view path); + LockFile(const fetchers::Settings & fetchSettings, std::string_view contents, std::string_view path); typedef std::map, std::string> KeyMap; @@ -81,7 +78,7 @@ struct LockFile */ std::optional isUnlocked(const fetchers::Settings & fetchSettings) const; - bool operator ==(const LockFile & other) const; + bool operator==(const LockFile & other) const; std::shared_ptr findInput(const InputAttrPath & path); @@ -95,10 +92,10 @@ struct LockFile void check(); }; -std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile); +std::ostream & operator<<(std::ostream & stream, const LockFile & lockFile); InputAttrPath parseInputAttrPath(std::string_view s); std::string printInputAttrPath(const InputAttrPath & path); -} +} // namespace nix::flake diff --git a/src/libflake/include/nix/flake/settings.hh b/src/libflake/include/nix/flake/settings.hh index b3bffad4c..618ed4d38 100644 --- a/src/libflake/include/nix/flake/settings.hh +++ b/src/libflake/include/nix/flake/settings.hh @@ -50,4 +50,4 @@ struct Settings : public Config Xp::Flakes}; }; -} +} // namespace nix::flake diff --git a/src/libflake/include/nix/flake/url-name.hh b/src/libflake/include/nix/flake/url-name.hh index d295ca8f8..b95d2dff6 100644 --- a/src/libflake/include/nix/flake/url-name.hh +++ b/src/libflake/include/nix/flake/url-name.hh @@ -17,4 +17,4 @@ namespace nix { */ std::optional getNameFromURL(const ParsedURL & url); -} +} // namespace nix diff --git a/src/libflake/lockfile.cc b/src/libflake/lockfile.cc index 646516caf..94e7f11f1 100644 --- a/src/libflake/lockfile.cc +++ b/src/libflake/lockfile.cc @@ -12,14 +12,10 @@ #include #include - namespace nix::flake { -static FlakeRef getFlakeRef( - const fetchers::Settings & fetchSettings, - const nlohmann::json & json, - const char * attr, - const char * info) +static FlakeRef +getFlakeRef(const fetchers::Settings & fetchSettings, const nlohmann::json & json, const char * attr, const char * info) { auto i = json.find(attr); if (i != json.end()) { @@ -38,13 +34,12 @@ static FlakeRef getFlakeRef( throw Error("attribute '%s' missing in lock file", attr); } -LockedNode::LockedNode( - const fetchers::Settings & fetchSettings, - const nlohmann::json & json) +LockedNode::LockedNode(const fetchers::Settings & fetchSettings, const nlohmann::json & json) : lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info" , originalRef(getFlakeRef(fetchSettings, json, "original", nullptr)) , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true) - , parentInputAttrPath(json.find("parent") != json.end() ? (std::optional) json["parent"] : std::nullopt) + , parentInputAttrPath( + json.find("parent") != json.end() ? (std::optional) json["parent"] : std::nullopt) { if (!lockedRef.input.isLocked() && !lockedRef.input.isRelative()) { if (lockedRef.input.getNarHash()) @@ -53,7 +48,8 @@ LockedNode::LockedNode( "This is deprecated since such inputs are verifiable but may not be reproducible.", lockedRef.to_string()); else - throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.", + throw Error( + "Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.", fetchers::attrsToJSON(lockedRef.input.toAttrs())); } @@ -67,7 +63,8 @@ StorePath LockedNode::computeStorePath(Store & store) const return lockedRef.input.computeStorePath(store); } -static std::shared_ptr doFind(const ref & root, const InputAttrPath & path, std::vector & visited) +static std::shared_ptr +doFind(const ref & root, const InputAttrPath & path, std::vector & visited) { auto pos = root; @@ -104,9 +101,7 @@ std::shared_ptr LockFile::findInput(const InputAttrPath & path) return doFind(root, path, visited); } -LockFile::LockFile( - const fetchers::Settings & fetchSettings, - std::string_view contents, std::string_view path) +LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view contents, std::string_view path) { auto json = [=] { try { @@ -123,9 +118,9 @@ LockFile::LockFile( std::function getInputs; - getInputs = [&](Node & node, const nlohmann::json & jsonNode) - { - if (jsonNode.find("inputs") == jsonNode.end()) return; + getInputs = [&](Node & node, const nlohmann::json & jsonNode) { + if (jsonNode.find("inputs") == jsonNode.end()) + return; for (auto & i : jsonNode["inputs"].items()) { if (i.value().is_array()) { // FIXME: remove, obsolete InputAttrPath path; @@ -171,14 +166,13 @@ std::pair LockFile::toJSON() const std::function node)> dumpNode; - dumpNode = [&](std::string key, ref node) -> std::string - { + dumpNode = [&](std::string key, ref node) -> std::string { auto k = nodeKeys.find(node); if (k != nodeKeys.end()) return k->second; if (!keys.insert(key).second) { - for (int n = 2; ; ++n) { + for (int n = 2;; ++n) { auto k = fmt("%s_%d", key, n); if (keys.insert(k).second) { key = k; @@ -239,7 +233,7 @@ std::pair LockFile::to_string() const return {json.dump(2), std::move(nodeKeys)}; } -std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile) +std::ostream & operator<<(std::ostream & stream, const LockFile & lockFile) { stream << lockFile.toJSON().first.dump(2); return stream; @@ -251,9 +245,9 @@ std::optional LockFile::isUnlocked(const fetchers::Settings & fetchSet std::function node)> visit; - visit = [&](ref node) - { - if (!nodes.insert(node).second) return; + visit = [&](ref node) { + if (!nodes.insert(node).second) + return; for (auto & i : node->inputs) if (auto child = std::get_if<0>(&i.second)) visit(*child); @@ -265,17 +259,15 @@ std::optional LockFile::isUnlocked(const fetchers::Settings & fetchSet `allow-dirty-locks` is enabled, it has a NAR hash. In the latter case, we can verify the input but we may not be able to fetch it from anywhere. */ - auto isConsideredLocked = [&](const fetchers::Input & input) - { + auto isConsideredLocked = [&](const fetchers::Input & input) { return input.isLocked() || (fetchSettings.allowDirtyLocks && input.getNarHash()); }; for (auto & i : nodes) { - if (i == ref(root)) continue; + if (i == ref(root)) + continue; auto node = i.dynamic_pointer_cast(); - if (node - && (!isConsideredLocked(node->lockedRef.input) - || !node->lockedRef.input.isFinal()) + if (node && (!isConsideredLocked(node->lockedRef.input) || !node->lockedRef.input.isFinal()) && !node->lockedRef.input.isRelative()) return node->lockedRef; } @@ -283,7 +275,7 @@ std::optional LockFile::isUnlocked(const fetchers::Settings & fetchSet return {}; } -bool LockFile::operator ==(const LockFile & other) const +bool LockFile::operator==(const LockFile & other) const { // FIXME: slow return toJSON().first == other.toJSON().first; @@ -309,11 +301,11 @@ std::map LockFile::getAllInputs() const std::function node)> recurse; - recurse = [&](const InputAttrPath & prefix, ref node) - { - if (!done.insert(node).second) return; + recurse = [&](const InputAttrPath & prefix, ref node) { + if (!done.insert(node).second) + return; - for (auto &[id, input] : node->inputs) { + for (auto & [id, input] : node->inputs) { auto inputAttrPath(prefix); inputAttrPath.push_back(id); res.emplace(inputAttrPath, input); @@ -337,7 +329,7 @@ static std::string describe(const FlakeRef & flakeRef) return s; } -std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge) +std::ostream & operator<<(std::ostream & stream, const Node::Edge & edge) { if (auto node = std::get_if<0>(&edge)) stream << describe((*node)->lockedRef); @@ -368,18 +360,19 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks) while (i != oldFlat.end() || j != newFlat.end()) { if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) { - res += fmt("• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n", - printInputAttrPath(j->first), j->second); + res += fmt( + "• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n", printInputAttrPath(j->first), j->second); ++j; } else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) { res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first)); ++i; } else { if (!equals(i->second, j->second)) { - res += fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n", - printInputAttrPath(i->first), - i->second, - j->second); + res += + fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n", + printInputAttrPath(i->first), + i->second, + j->second); } ++i; ++j; @@ -396,7 +389,8 @@ void LockFile::check() for (auto & [inputAttrPath, input] : inputs) { if (auto follows = std::get_if<1>(&input)) { if (!follows->empty() && !findInput(*follows)) - throw Error("input '%s' follows a non-existent input '%s'", + throw Error( + "input '%s' follows a non-existent input '%s'", printInputAttrPath(inputAttrPath), printInputAttrPath(*follows)); } @@ -410,4 +404,4 @@ std::string printInputAttrPath(const InputAttrPath & path) return concatStringsSep("/", path); } -} +} // namespace nix::flake diff --git a/src/libflake/settings.cc b/src/libflake/settings.cc index bab7f9439..e77bded30 100644 --- a/src/libflake/settings.cc +++ b/src/libflake/settings.cc @@ -12,4 +12,4 @@ void Settings::configureEvalSettings(nix::EvalSettings & evalSettings) const evalSettings.extraPrimOps.emplace_back(primops::flakeRefToString); } -} // namespace nix +} // namespace nix::flake diff --git a/src/libflake/url-name.cc b/src/libflake/url-name.cc index 3e3311cf7..b3eeca26a 100644 --- a/src/libflake/url-name.cc +++ b/src/libflake/url-name.cc @@ -5,10 +5,11 @@ namespace nix { static const std::string attributeNamePattern("[a-zA-Z0-9_-]+"); -static const std::regex lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern +")(\\^.*)?$"); +static const std::regex + lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern + ")(\\^.*)?$"); static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); -static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); -static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?"); +static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern + ")"); +static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern + ")(?:/.*)?"); static const std::regex gitProviderRegex("github|gitlab|sourcehut"); static const std::regex gitSchemeRegex("git($|\\+.*)"); @@ -21,8 +22,7 @@ std::optional getNameFromURL(const ParsedURL & url) return url.query.at("dir"); /* If the fragment isn't a "default" and contains two attribute elements, use the last one */ - if (std::regex_match(url.fragment, match, lastAttributeRegex) - && match.str(1) != "defaultPackage." + if (std::regex_match(url.fragment, match, lastAttributeRegex) && match.str(1) != "defaultPackage." && match.str(2) != "default") { return match.str(2); } @@ -43,4 +43,4 @@ std::optional getNameFromURL(const ParsedURL & url) return {}; } -} +} // namespace nix diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index dcf252a4f..6055ec0e7 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -51,15 +51,16 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) warn(e.what()); } }}, - .completer = [](AddCompletions & completions, size_t index, std::string_view prefix) { - if (index == 0) { - std::map settings; - globalConfig.getSettings(settings); - for (auto & s : settings) - if (hasPrefix(s.first, prefix)) - completions.add(s.first, fmt("Set the `%s` setting.", s.first)); - } - }, + .completer = + [](AddCompletions & completions, size_t index, std::string_view prefix) { + if (index == 0) { + std::map settings; + globalConfig.getSettings(settings); + for (auto & s : settings) + if (hasPrefix(s.first, prefix)) + completions.add(s.first, fmt("Set the `%s` setting.", s.first)); + } + }, }); addFlag({ @@ -75,16 +76,15 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) .shortName = 'j', .description = "The maximum number of parallel builds.", .labels = Strings{"jobs"}, - .handler = {[=](std::string s) { - settings.set("max-jobs", s); - }}, + .handler = {[=](std::string s) { settings.set("max-jobs", s); }}, }); std::string cat = "Options to override configuration settings"; globalConfig.convertToArgs(*this, cat); // Backward compatibility hack: nix-env already had a --system flag. - if (programName == "nix-env") longFlags.erase("system"); + if (programName == "nix-env") + longFlags.erase("system"); hiddenCategories.insert(cat); } @@ -95,7 +95,7 @@ void MixCommonArgs::initialFlagsProcessed() pluginsInited(); } -template +template void MixPrintJSON::printJSON(const T /* nlohmann::json */ & json) { auto suspension = logger->suspend(); @@ -108,5 +108,4 @@ void MixPrintJSON::printJSON(const T /* nlohmann::json */ & json) template void MixPrintJSON::printJSON(const nlohmann::json & json); - } // namespace nix diff --git a/src/libmain/include/nix/main/common-args.hh b/src/libmain/include/nix/main/common-args.hh index cc6d3d3f0..d67fc2ad0 100644 --- a/src/libmain/include/nix/main/common-args.hh +++ b/src/libmain/include/nix/main/common-args.hh @@ -6,7 +6,7 @@ namespace nix { -//static constexpr auto commonArgsCategory = "Miscellaneous common options"; +// static constexpr auto commonArgsCategory = "Miscellaneous common options"; static constexpr auto loggingCategory = "Logging-related options"; static constexpr auto miscCategory = "Miscellaneous global options"; @@ -86,7 +86,7 @@ struct MixPrintJSON : virtual Args * but you _can_ print a sole JSON string by explicitly coercing it to * `nlohmann::json` first. */ - template >> + template>> void printJSON(const T & json); }; @@ -113,13 +113,12 @@ struct MixRepair : virtual Args { addFlag({ .longName = "repair", - .description = - "During evaluation, rewrite missing or corrupted files in the Nix store. " - "During building, rebuild missing or corrupted store paths.", + .description = "During evaluation, rewrite missing or corrupted files in the Nix store. " + "During building, rebuild missing or corrupted store paths.", .category = miscCategory, .handler = {&repair, Repair}, }); } }; -} +} // namespace nix diff --git a/src/libmain/include/nix/main/loggers.hh b/src/libmain/include/nix/main/loggers.hh index 061b4a32a..b763f0b2a 100644 --- a/src/libmain/include/nix/main/loggers.hh +++ b/src/libmain/include/nix/main/loggers.hh @@ -6,14 +6,14 @@ namespace nix { enum class LogFormat { - raw, - rawWithLogs, - internalJSON, - bar, - barWithLogs, + raw, + rawWithLogs, + internalJSON, + bar, + barWithLogs, }; void setLogFormat(const std::string & logFormatStr); void setLogFormat(const LogFormat & logFormat); -} +} // namespace nix diff --git a/src/libmain/include/nix/main/plugin.hh b/src/libmain/include/nix/main/plugin.hh index 4221c1b17..0c03a4bb8 100644 --- a/src/libmain/include/nix/main/plugin.hh +++ b/src/libmain/include/nix/main/plugin.hh @@ -1,4 +1,5 @@ #pragma once + ///@file namespace nix { @@ -9,4 +10,4 @@ namespace nix { */ void initPlugins(); -} +} // namespace nix diff --git a/src/libmain/include/nix/main/shared.hh b/src/libmain/include/nix/main/shared.hh index 4d4b816e7..47d08a050 100644 --- a/src/libmain/include/nix/main/shared.hh +++ b/src/libmain/include/nix/main/shared.hh @@ -21,10 +21,12 @@ int handleExceptions(const std::string & programName, std::function fun) */ void initNix(bool loadConfig = true); -void parseCmdLine(int argc, char * * argv, - std::function parseArg); +void parseCmdLine( + int argc, char ** argv, std::function parseArg); -void parseCmdLine(const std::string & programName, const Strings & args, +void parseCmdLine( + const std::string & programName, + const Strings & args, std::function parseArg); void printVersion(const std::string & programName); @@ -37,33 +39,27 @@ void printGCWarning(); class Store; struct MissingPaths; -void printMissing( - ref store, - const std::vector & paths, - Verbosity lvl = lvlInfo); +void printMissing(ref store, const std::vector & paths, Verbosity lvl = lvlInfo); -void printMissing( - ref store, - const MissingPaths & missing, - Verbosity lvl = lvlInfo); +void printMissing(ref store, const MissingPaths & missing, Verbosity lvl = lvlInfo); -std::string getArg(const std::string & opt, - Strings::iterator & i, const Strings::iterator & end); +std::string getArg(const std::string & opt, Strings::iterator & i, const Strings::iterator & end); -template N getIntArg(const std::string & opt, - Strings::iterator & i, const Strings::iterator & end, bool allowUnit) +template +N getIntArg(const std::string & opt, Strings::iterator & i, const Strings::iterator & end, bool allowUnit) { ++i; - if (i == end) throw UsageError("'%1%' requires an argument", opt); + if (i == end) + throw UsageError("'%1%' requires an argument", opt); return string2IntWithUnitPrefix(*i); } - struct LegacyArgs : public MixCommonArgs, public RootArgs { std::function parseArg; - LegacyArgs(const std::string & programName, + LegacyArgs( + const std::string & programName, std::function parseArg); bool processFlag(Strings::iterator & pos, Strings::iterator end) override; @@ -71,7 +67,6 @@ struct LegacyArgs : public MixCommonArgs, public RootArgs bool processArgs(const Strings & args, bool finish) override; }; - /** * The constructor of this class starts a pager if standard output is a * terminal and $PAGER is set. Standard output is redirected to the @@ -92,7 +87,6 @@ private: extern volatile ::sig_atomic_t blockInt; - /* GC helpers. */ std::string showBytes(uint64_t bytes); @@ -103,12 +97,16 @@ struct PrintFreed { bool show; const GCResults & results; + PrintFreed(bool show, const GCResults & results) - : show(show), results(results) { } + : show(show) + , results(results) + { + } + ~PrintFreed(); }; - #ifndef _WIN32 /** * Install a SIGSEGV handler to detect stack overflows. @@ -141,4 +139,4 @@ extern std::function stackOverflowHandler; void defaultStackOverflowHandler(siginfo_t * info, void * ctx); #endif -} +} // namespace nix diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc index c78e49b63..a3e75c535 100644 --- a/src/libmain/loggers.cc +++ b/src/libmain/loggers.cc @@ -53,4 +53,4 @@ void setLogFormat(const LogFormat & logFormat) logger = makeDefaultLogger(); } -} +} // namespace nix diff --git a/src/libmain/plugin.cc b/src/libmain/plugin.cc index 760a096ad..321fd6a15 100644 --- a/src/libmain/plugin.cc +++ b/src/libmain/plugin.cc @@ -117,4 +117,4 @@ void initPlugins() pluginSettings.pluginFiles.pluginsLoaded = true; } -} +} // namespace nix diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index 173ab876c..c00f5d86b 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -133,8 +133,9 @@ public: updateThread.join(); } - void pause() override { - auto state (state_.lock()); + void pause() override + { + auto state(state_.lock()); state->suspensions++; if (state->suspensions > 1) { // already paused @@ -145,8 +146,9 @@ public: writeToStderr("\r\e[K"); } - void resume() override { - auto state (state_.lock()); + void resume() override + { + auto state(state_.lock()); if (state->suspensions == 0) { log(lvlError, "nix::ProgressBar: resume() called without a matching preceding pause(). This is a bug."); return; @@ -168,7 +170,8 @@ public: void log(Verbosity lvl, std::string_view s) override { - if (lvl > verbosity) return; + if (lvl > verbosity) + return; auto state(state_.lock()); log(*state, lvl, s); } @@ -193,20 +196,21 @@ public: } } - void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) override + void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) override { auto state(state_.lock()); if (lvl <= verbosity && !s.empty() && type != actBuildWaiting) log(*state, lvl, s + "..."); - state->activities.emplace_back(ActInfo { - .s = s, - .type = type, - .parent = parent, - .startTime = std::chrono::steady_clock::now() - }); + state->activities.emplace_back( + ActInfo{.s = s, .type = type, .parent = parent, .startTime = std::chrono::steady_clock::now()}); auto i = std::prev(state->activities.end()); state->its.emplace(act, i); state->activitiesByType[type].its.emplace(act, i); @@ -231,11 +235,11 @@ public: if (type == actSubstitute) { auto name = storePathToName(getS(fields, 0)); auto sub = getS(fields, 1); - i->s = fmt( - hasPrefix(sub, "local") - ? "copying " ANSI_BOLD "%s" ANSI_NORMAL " from %s" - : "fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", - name, sub); + i->s = + fmt(hasPrefix(sub, "local") ? "copying " ANSI_BOLD "%s" ANSI_NORMAL " from %s" + : "fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", + name, + sub); } if (type == actPostBuildHook) { @@ -265,8 +269,10 @@ public: { while (act != 0) { auto i = state.its.find(act); - if (i == state.its.end()) break; - if (i->second->type == type) return true; + if (i == state.its.end()) + break; + if (i->second->type == type) + return true; act = i->second->parent; } return false; @@ -400,7 +406,8 @@ public: auto nextWakeup = std::chrono::milliseconds::max(); state.haveUpdate = false; - if (state.isPaused() || !state.active) return nextWakeup; + if (state.isPaused() || !state.active) + return nextWakeup; std::string line; @@ -414,7 +421,8 @@ public: auto now = std::chrono::steady_clock::now(); if (!state.activities.empty()) { - if (!status.empty()) line += " "; + if (!status.empty()) + line += " "; auto i = state.activities.rbegin(); while (i != state.activities.rend()) { @@ -426,7 +434,9 @@ public: if (i->startTime + delay < now) break; else - nextWakeup = std::min(nextWakeup, std::chrono::duration_cast(delay - (now - i->startTime))); + nextWakeup = std::min( + nextWakeup, + std::chrono::duration_cast(delay - (now - i->startTime))); } ++i; } @@ -439,14 +449,16 @@ public: line += ")"; } if (!i->lastLine.empty()) { - if (!i->s.empty()) line += ": "; + if (!i->s.empty()) + line += ": "; line += i->lastLine; } } } auto width = getWindowSize().second; - if (width <= 0) width = std::numeric_limits::max(); + if (width <= 0) + width = std::numeric_limits::max(); redraw("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K"); @@ -459,51 +471,60 @@ public: std::string res; - auto renderActivity = [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { - auto & act = state.activitiesByType[type]; - uint64_t done = act.done, expected = act.done, running = 0, failed = act.failed; - for (auto & j : act.its) { - done += j.second->done; - expected += j.second->expected; - running += j.second->running; - failed += j.second->failed; - } + auto renderActivity = + [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { + auto & act = state.activitiesByType[type]; + uint64_t done = act.done, expected = act.done, running = 0, failed = act.failed; + for (auto & j : act.its) { + done += j.second->done; + expected += j.second->expected; + running += j.second->running; + failed += j.second->failed; + } - expected = std::max(expected, act.expected); + expected = std::max(expected, act.expected); - std::string s; + std::string s; - if (running || done || expected || failed) { - if (running) - if (expected != 0) - s = fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, - running / unit, done / unit, expected / unit); + if (running || done || expected || failed) { + if (running) + if (expected != 0) + s = + fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + + numberFmt, + running / unit, + done / unit, + expected / unit); + else + s = + fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL, + running / unit, + done / unit); + else if (expected != done) + if (expected != 0) + s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, done / unit, expected / unit); + else + s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL, done / unit); else - s = fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL, - running / unit, done / unit); - else if (expected != done) - if (expected != 0) - s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, - done / unit, expected / unit); - else - s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL, done / unit); - else - s = fmt(done ? ANSI_GREEN + numberFmt + ANSI_NORMAL : numberFmt, done / unit); - s = fmt(itemFmt, s); + s = fmt(done ? ANSI_GREEN + numberFmt + ANSI_NORMAL : numberFmt, done / unit); + s = fmt(itemFmt, s); - if (failed) - s += fmt(" (" ANSI_RED "%d failed" ANSI_NORMAL ")", failed / unit); - } + if (failed) + s += fmt(" (" ANSI_RED "%d failed" ANSI_NORMAL ")", failed / unit); + } - return s; - }; + return s; + }; - auto showActivity = [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { - auto s = renderActivity(type, itemFmt, numberFmt, unit); - if (s.empty()) return; - if (!res.empty()) res += ", "; - res += s; - }; + auto showActivity = + [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { + auto s = renderActivity(type, itemFmt, numberFmt, unit); + if (s.empty()) + return; + if (!res.empty()) + res += ", "; + res += s; + }; showActivity(actBuilds, "%s built"); @@ -511,9 +532,17 @@ public: auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB); if (!s1.empty() || !s2.empty()) { - if (!res.empty()) res += ", "; - if (s1.empty()) res += "0 copied"; else res += s1; - if (!s2.empty()) { res += " ("; res += s2; res += ')'; } + if (!res.empty()) + res += ", "; + if (s1.empty()) + res += "0 copied"; + else + res += s1; + if (!s2.empty()) { + res += " ("; + res += s2; + res += ')'; + } } showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB); @@ -522,7 +551,8 @@ public: auto s = renderActivity(actOptimiseStore, "%s paths optimised"); if (s != "") { s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked); - if (!res.empty()) res += ", "; + if (!res.empty()) + res += ", "; res += s; } } @@ -531,12 +561,14 @@ public: showActivity(actVerifyPaths, "%s paths verified"); if (state.corruptedPaths) { - if (!res.empty()) res += ", "; + if (!res.empty()) + res += ", "; res += fmt(ANSI_RED "%d corrupted" ANSI_NORMAL, state.corruptedPaths); } if (state.untrustedPaths) { - if (!res.empty()) res += ", "; + if (!res.empty()) + res += ", "; res += fmt(ANSI_RED "%d untrusted" ANSI_NORMAL, state.untrustedPaths); } @@ -558,10 +590,12 @@ public: std::optional ask(std::string_view msg) override { auto state(state_.lock()); - if (!state->active) return {}; + if (!state->active) + return {}; std::cerr << fmt("\r\e[K%s ", msg); auto s = trim(readLine(getStandardInput(), true)); - if (s.size() != 1) return {}; + if (s.size() != 1) + return {}; draw(*state); return s[0]; } @@ -577,4 +611,4 @@ std::unique_ptr makeProgressBar() return std::make_unique(isTTY()); } -} +} // namespace nix diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 0982810d1..7187e9720 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -17,7 +17,7 @@ #include #include #ifdef __linux__ -#include +# include #endif #include @@ -30,30 +30,27 @@ namespace nix { -char * * savedArgv; +char ** savedArgv; static bool gcWarning = true; void printGCWarning() { - if (!gcWarning) return; + if (!gcWarning) + return; static bool haveWarned = false; - warnOnce(haveWarned, + warnOnce( + haveWarned, "you did not specify '--add-root'; " "the result might be removed by the garbage collector"); } - void printMissing(ref store, const std::vector & paths, Verbosity lvl) { printMissing(store, store->queryMissing(paths), lvl); } - -void printMissing( - ref store, - const MissingPaths & missing, - Verbosity lvl) +void printMissing(ref store, const MissingPaths & missing, Verbosity lvl) { if (!missing.willBuild.empty()) { if (missing.willBuild.size() == 1) @@ -70,51 +67,53 @@ void printMissing( const float downloadSizeMiB = missing.downloadSize / (1024.f * 1024.f); const float narSizeMiB = missing.narSize / (1024.f * 1024.f); if (missing.willSubstitute.size() == 1) { - printMsg(lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", - downloadSizeMiB, - narSizeMiB); + printMsg( + lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", downloadSizeMiB, narSizeMiB); } else { - printMsg(lvl, "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", + printMsg( + lvl, + "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", missing.willSubstitute.size(), downloadSizeMiB, narSizeMiB); } std::vector willSubstituteSorted = {}; - std::for_each(missing.willSubstitute.begin(), missing.willSubstitute.end(), - [&](const StorePath &p) { willSubstituteSorted.push_back(&p); }); - std::sort(willSubstituteSorted.begin(), willSubstituteSorted.end(), - [](const StorePath *lhs, const StorePath *rhs) { - if (lhs->name() == rhs->name()) - return lhs->to_string() < rhs->to_string(); - else - return lhs->name() < rhs->name(); - }); + std::for_each(missing.willSubstitute.begin(), missing.willSubstitute.end(), [&](const StorePath & p) { + willSubstituteSorted.push_back(&p); + }); + std::sort( + willSubstituteSorted.begin(), willSubstituteSorted.end(), [](const StorePath * lhs, const StorePath * rhs) { + if (lhs->name() == rhs->name()) + return lhs->to_string() < rhs->to_string(); + else + return lhs->name() < rhs->name(); + }); for (auto p : willSubstituteSorted) printMsg(lvl, " %s", store->printStorePath(*p)); } if (!missing.unknown.empty()) { - printMsg(lvl, "don't know how to build these paths%s:", - (settings.readOnlyMode ? " (may be caused by read-only store access)" : "")); + printMsg( + lvl, + "don't know how to build these paths%s:", + (settings.readOnlyMode ? " (may be caused by read-only store access)" : "")); for (auto & i : missing.unknown) printMsg(lvl, " %s", store->printStorePath(i)); } } - -std::string getArg(const std::string & opt, - Strings::iterator & i, const Strings::iterator & end) +std::string getArg(const std::string & opt, Strings::iterator & i, const Strings::iterator & end) { ++i; - if (i == end) throw UsageError("'%1%' requires an argument", opt); + if (i == end) + throw UsageError("'%1%' requires an argument", opt); return *i; } #ifndef _WIN32 -static void sigHandler(int signo) { } +static void sigHandler(int signo) {} #endif - void initNix(bool loadConfig) { /* Turn on buffering for cerr. */ @@ -139,7 +138,8 @@ void initNix(bool loadConfig) /* Install a dummy SIGUSR1 handler for use with pthread_kill(). */ act.sa_handler = sigHandler; - if (sigaction(SIGUSR1, &act, 0)) throw SysError("handling SIGUSR1"); + if (sigaction(SIGUSR1, &act, 0)) + throw SysError("handling SIGUSR1"); #endif #ifdef __APPLE__ @@ -147,19 +147,26 @@ void initNix(bool loadConfig) * Instead, add a dummy sigaction handler, and signalHandlerThread * can handle the rest. */ act.sa_handler = sigHandler; - if (sigaction(SIGWINCH, &act, 0)) throw SysError("handling SIGWINCH"); + if (sigaction(SIGWINCH, &act, 0)) + throw SysError("handling SIGWINCH"); /* Disable SA_RESTART for interrupts, so that system calls on this thread * error with EINTR like they do on Linux. * Most signals on BSD systems default to SA_RESTART on, but Nix * expects EINTR from syscalls to properly exit. */ act.sa_handler = SIG_DFL; - if (sigaction(SIGINT, &act, 0)) throw SysError("handling SIGINT"); - if (sigaction(SIGTERM, &act, 0)) throw SysError("handling SIGTERM"); - if (sigaction(SIGHUP, &act, 0)) throw SysError("handling SIGHUP"); - if (sigaction(SIGPIPE, &act, 0)) throw SysError("handling SIGPIPE"); - if (sigaction(SIGQUIT, &act, 0)) throw SysError("handling SIGQUIT"); - if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP"); + if (sigaction(SIGINT, &act, 0)) + throw SysError("handling SIGINT"); + if (sigaction(SIGTERM, &act, 0)) + throw SysError("handling SIGTERM"); + if (sigaction(SIGHUP, &act, 0)) + throw SysError("handling SIGHUP"); + if (sigaction(SIGPIPE, &act, 0)) + throw SysError("handling SIGPIPE"); + if (sigaction(SIGQUIT, &act, 0)) + throw SysError("handling SIGQUIT"); + if (sigaction(SIGTRAP, &act, 0)) + throw SysError("handling SIGTRAP"); #endif #ifndef _WIN32 @@ -176,52 +183,52 @@ void initNix(bool loadConfig) umask(0022); } - -LegacyArgs::LegacyArgs(const std::string & programName, +LegacyArgs::LegacyArgs( + const std::string & programName, std::function parseArg) - : MixCommonArgs(programName), parseArg(parseArg) + : MixCommonArgs(programName) + , parseArg(parseArg) { addFlag({ .longName = "no-build-output", .shortName = 'Q', .description = "Do not show build output.", - .handler = {[&]() {setLogFormat(LogFormat::raw); }}, + .handler = {[&]() { setLogFormat(LogFormat::raw); }}, }); addFlag({ .longName = "keep-failed", - .shortName ='K', + .shortName = 'K', .description = "Keep temporary directories of failed builds.", - .handler = {&(bool&) settings.keepFailed, true}, + .handler = {&(bool &) settings.keepFailed, true}, }); addFlag({ .longName = "keep-going", - .shortName ='k', + .shortName = 'k', .description = "Keep going after a build fails.", - .handler = {&(bool&) settings.keepGoing, true}, + .handler = {&(bool &) settings.keepGoing, true}, }); addFlag({ .longName = "fallback", .description = "Build from source if substitution fails.", - .handler = {&(bool&) settings.tryFallback, true}, + .handler = {&(bool &) settings.tryFallback, true}, }); - auto intSettingAlias = [&](char shortName, const std::string & longName, - const std::string & description, const std::string & dest) - { - addFlag({ - .longName = longName, - .shortName = shortName, - .description = description, - .labels = {"n"}, - .handler = {[=](std::string s) { - auto n = string2IntWithUnitPrefix(s); - settings.set(dest, std::to_string(n)); - }}, - }); - }; + auto intSettingAlias = + [&](char shortName, const std::string & longName, const std::string & description, const std::string & dest) { + addFlag({ + .longName = longName, + .shortName = shortName, + .description = description, + .labels = {"n"}, + .handler = {[=](std::string s) { + auto n = string2IntWithUnitPrefix(s); + settings.set(dest, std::to_string(n)); + }}, + }); + }; intSettingAlias(0, "cores", "Maximum number of CPU cores to use inside a build.", "cores"); intSettingAlias(0, "max-silent-time", "Number of seconds of silence before a build is killed.", "max-silent-time"); @@ -243,23 +250,24 @@ LegacyArgs::LegacyArgs(const std::string & programName, .longName = "store", .description = "The URL of the Nix store to use.", .labels = {"store-uri"}, - .handler = {&(std::string&) settings.storeUri}, + .handler = {&(std::string &) settings.storeUri}, }); } - bool LegacyArgs::processFlag(Strings::iterator & pos, Strings::iterator end) { - if (MixCommonArgs::processFlag(pos, end)) return true; + if (MixCommonArgs::processFlag(pos, end)) + return true; bool res = parseArg(pos, end); - if (res) ++pos; + if (res) + ++pos; return res; } - bool LegacyArgs::processArgs(const Strings & args, bool finish) { - if (args.empty()) return true; + if (args.empty()) + return true; assert(args.size() == 1); Strings ss(args); auto pos = ss.begin(); @@ -268,21 +276,20 @@ bool LegacyArgs::processArgs(const Strings & args, bool finish) return true; } - -void parseCmdLine(int argc, char * * argv, - std::function parseArg) +void parseCmdLine( + int argc, char ** argv, std::function parseArg) { parseCmdLine(std::string(baseNameOf(argv[0])), argvToStrings(argc, argv), parseArg); } - -void parseCmdLine(const std::string & programName, const Strings & args, +void parseCmdLine( + const std::string & programName, + const Strings & args, std::function parseArg) { LegacyArgs(programName, parseArg).parseCmdline(args); } - void printVersion(const std::string & programName) { std::cout << fmt("%1% (Nix) %2%", programName, nixVersion) << std::endl; @@ -296,9 +303,7 @@ void printVersion(const std::string & programName) std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n"; std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n"; std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n"; - std::cout << "User configuration files: " << - concatStringsSep(":", settings.nixUserConfFiles) - << "\n"; + std::cout << "User configuration files: " << concatStringsSep(":", settings.nixUserConfFiles) << "\n"; std::cout << "Store directory: " << settings.nixStore << "\n"; std::cout << "State directory: " << settings.nixStateDir << "\n"; std::cout << "Data directory: " << settings.nixDataDir << "\n"; @@ -349,13 +354,15 @@ int handleExceptions(const std::string & programName, std::function fun) return 0; } - RunPager::RunPager() { - if (!isatty(STDOUT_FILENO)) return; + if (!isatty(STDOUT_FILENO)) + return; char * pager = getenv("NIX_PAGER"); - if (!pager) pager = getenv("PAGER"); - if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return; + if (!pager) + pager = getenv("PAGER"); + if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) + return; logger->stop(); @@ -386,7 +393,6 @@ RunPager::RunPager() #endif } - RunPager::~RunPager() { try { @@ -402,13 +408,10 @@ RunPager::~RunPager() } } - PrintFreed::~PrintFreed() { if (show) - std::cout << fmt("%d store paths deleted, %s freed\n", - results.paths.size(), - showBytes(results.bytesFreed)); + std::cout << fmt("%d store paths deleted, %s freed\n", results.paths.size(), showBytes(results.bytesFreed)); } -} +} // namespace nix diff --git a/src/libmain/unix/stack.cc b/src/libmain/unix/stack.cc index cee21d2a2..458693407 100644 --- a/src/libmain/unix/stack.cc +++ b/src/libmain/unix/stack.cc @@ -10,7 +10,6 @@ namespace nix { - static void sigsegvHandler(int signo, siginfo_t * info, void * ctx) { /* Detect stack overflows by comparing the faulting address with @@ -28,7 +27,8 @@ static void sigsegvHandler(int signo, siginfo_t * info, void * ctx) if (haveSP) { ptrdiff_t diff = (char *) info->si_addr - sp; - if (diff < 0) diff = -diff; + if (diff < 0) + diff = -diff; if (diff < 4096) { nix::stackOverflowHandler(info, ctx); } @@ -39,13 +39,13 @@ static void sigsegvHandler(int signo, siginfo_t * info, void * ctx) sigfillset(&act.sa_mask); act.sa_handler = SIG_DFL; act.sa_flags = 0; - if (sigaction(SIGSEGV, &act, 0)) abort(); + if (sigaction(SIGSEGV, &act, 0)) + abort(); } - void detectStackOverflow() { -#if defined(SA_SIGINFO) && defined (SA_ONSTACK) +#if defined(SA_SIGINFO) && defined(SA_ONSTACK) /* Install a SIGSEGV handler to detect stack overflows. This requires an alternative stack, otherwise the signal cannot be delivered when we're out of stack space. */ @@ -53,9 +53,11 @@ void detectStackOverflow() stack.ss_size = 4096 * 4 + MINSIGSTKSZ; static auto stackBuf = std::make_unique>(stack.ss_size); stack.ss_sp = stackBuf->data(); - if (!stack.ss_sp) throw Error("cannot allocate alternative stack"); + if (!stack.ss_sp) + throw Error("cannot allocate alternative stack"); stack.ss_flags = 0; - if (sigaltstack(&stack, 0) == -1) throw SysError("cannot set alternative stack"); + if (sigaltstack(&stack, 0) == -1) + throw SysError("cannot set alternative stack"); struct sigaction act; sigfillset(&act.sa_mask); @@ -68,10 +70,11 @@ void detectStackOverflow() std::function stackOverflowHandler(defaultStackOverflowHandler); -void defaultStackOverflowHandler(siginfo_t * info, void * ctx) { +void defaultStackOverflowHandler(siginfo_t * info, void * ctx) +{ char msg[] = "error: stack overflow (possible infinite recursion)\n"; [[gnu::unused]] auto res = write(2, msg, strlen(msg)); _exit(1); // maybe abort instead? } -} +} // namespace nix diff --git a/src/libstore-test-support/derived-path.cc b/src/libstore-test-support/derived-path.cc index c7714449c..225b86c79 100644 --- a/src/libstore-test-support/derived-path.cc +++ b/src/libstore-test-support/derived-path.cc @@ -68,4 +68,4 @@ Gen Arbitrary::arbitrary() }); } -} +} // namespace rc diff --git a/src/libstore-test-support/include/nix/store/tests/derived-path.hh b/src/libstore-test-support/include/nix/store/tests/derived-path.hh index 642ce557c..b3b43474a 100644 --- a/src/libstore-test-support/include/nix/store/tests/derived-path.hh +++ b/src/libstore-test-support/include/nix/store/tests/derived-path.hh @@ -12,28 +12,33 @@ namespace rc { using namespace nix; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; -} +} // namespace rc diff --git a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh index e51be3dab..608aa63d6 100644 --- a/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh +++ b/src/libstore-test-support/include/nix/store/tests/nix_api_store.hh @@ -44,8 +44,9 @@ protected: // no `mkdtemp` with MinGW auto tmpl = nix::defaultTempDir() + "/tests_nix-store."; for (size_t i = 0; true; ++i) { - nixDir = tmpl + std::string { i }; - if (std::filesystem::create_directory(nixDir)) break; + nixDir = tmpl + std::string{i}; + if (std::filesystem::create_directory(nixDir)) + break; } #else // resolve any symlinks in i.e. on macOS /tmp -> /private/tmp @@ -72,4 +73,4 @@ protected: }; } }; -} +} // namespace nixC diff --git a/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh b/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh index c13c992b6..865a97352 100644 --- a/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh +++ b/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh @@ -11,8 +11,9 @@ namespace rc { using namespace nix; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; -} +} // namespace rc diff --git a/src/libstore-test-support/include/nix/store/tests/path.hh b/src/libstore-test-support/include/nix/store/tests/path.hh index 59ff604d7..ff80b1299 100644 --- a/src/libstore-test-support/include/nix/store/tests/path.hh +++ b/src/libstore-test-support/include/nix/store/tests/path.hh @@ -7,26 +7,29 @@ namespace nix { -struct StorePathName { +struct StorePathName +{ std::string name; }; // For rapidcheck void showValue(const StorePath & p, std::ostream & os); -} +} // namespace nix namespace rc { using namespace nix; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; -} +} // namespace rc diff --git a/src/libstore-test-support/include/nix/store/tests/protocol.hh b/src/libstore-test-support/include/nix/store/tests/protocol.hh index acd10bf9d..3d7a9b073 100644 --- a/src/libstore-test-support/include/nix/store/tests/protocol.hh +++ b/src/libstore-test-support/include/nix/store/tests/protocol.hh @@ -14,8 +14,9 @@ class ProtoTest : public CharacterizationTest, public LibStoreTest { std::filesystem::path unitTestData = getUnitTestData() / protocolDir; - std::filesystem::path goldenMaster(std::string_view testStem) const override { - return unitTestData / (std::string { testStem + ".bin" }); + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / (std::string{testStem + ".bin"}); } }; @@ -31,10 +32,10 @@ public: { CharacterizationTest::readTest(testStem, [&](const auto & encoded) { T got = ({ - StringSource from { encoded }; + StringSource from{encoded}; Proto::template Serialise::read( *LibStoreTest::store, - typename Proto::ReadConn { + typename Proto::ReadConn{ .from = from, .version = version, }); @@ -54,7 +55,7 @@ public: StringSink to; Proto::template Serialise::write( *LibStoreTest::store, - typename Proto::WriteConn { + typename Proto::WriteConn{ .to = to, .version = version, }, @@ -65,11 +66,13 @@ public: }; #define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ - TEST_F(FIXTURE, NAME ## _read) { \ - readProtoTest(STEM, VERSION, VALUE); \ - } \ - TEST_F(FIXTURE, NAME ## _write) { \ - writeProtoTest(STEM, VERSION, VALUE); \ + TEST_F(FIXTURE, NAME##_read) \ + { \ + readProtoTest(STEM, VERSION, VALUE); \ + } \ + TEST_F(FIXTURE, NAME##_write) \ + { \ + writeProtoTest(STEM, VERSION, VALUE); \ } -} +} // namespace nix diff --git a/src/libstore-test-support/outputs-spec.cc b/src/libstore-test-support/outputs-spec.cc index 5b5251361..d5128a8bd 100644 --- a/src/libstore-test-support/outputs-spec.cc +++ b/src/libstore-test-support/outputs-spec.cc @@ -24,4 +24,4 @@ Gen Arbitrary::arbitrary() }); } -} +} // namespace rc diff --git a/src/libstore-test-support/path.cc b/src/libstore-test-support/path.cc index 47c1d693b..5d5902cc9 100644 --- a/src/libstore-test-support/path.cc +++ b/src/libstore-test-support/path.cc @@ -16,15 +16,16 @@ void showValue(const StorePath & p, std::ostream & os) os << p.to_string(); } -} +} // namespace nix namespace rc { using namespace nix; Gen storePathChar() { - return rc::gen::apply([](uint8_t i) -> char { - switch (i) { + return rc::gen::apply( + [](uint8_t i) -> char { + switch (i) { case 0 ... 9: return '0' + i; case 10 ... 35: @@ -45,36 +46,23 @@ Gen storePathChar() return '='; default: assert(false); - } - }, - gen::inRange(0, 10 + 2 * 26 + 6)); + } + }, + gen::inRange(0, 10 + 2 * 26 + 6)); } Gen Arbitrary::arbitrary() { return gen::construct( - gen::suchThat( - gen::container(storePathChar()), - [](const std::string & s) { - return - !( s == "" - || s == "." - || s == ".." - || s.starts_with(".-") - || s.starts_with("..-") - ); - } - ) - ); + gen::suchThat(gen::container(storePathChar()), [](const std::string & s) { + return !(s == "" || s == "." || s == ".." || s.starts_with(".-") || s.starts_with("..-")); + })); } Gen Arbitrary::arbitrary() { - return - gen::construct( - gen::arbitrary(), - gen::apply([](StorePathName n){ return n.name; }, gen::arbitrary()) - ); + return gen::construct( + gen::arbitrary(), gen::apply([](StorePathName n) { return n.name; }, gen::arbitrary())); } } // namespace rc diff --git a/src/libstore-tests/common-protocol.cc b/src/libstore-tests/common-protocol.cc index 5164f154a..2b039180c 100644 --- a/src/libstore-tests/common-protocol.cc +++ b/src/libstore-tests/common-protocol.cc @@ -24,10 +24,8 @@ public: { CharacterizationTest::readTest(testStem, [&](const auto & encoded) { T got = ({ - StringSource from { encoded }; - CommonProto::Serialise::read( - *store, - CommonProto::ReadConn { .from = from }); + StringSource from{encoded}; + CommonProto::Serialise::read(*store, CommonProto::ReadConn{.from = from}); }); ASSERT_EQ(got, expected); @@ -42,27 +40,26 @@ public: { CharacterizationTest::writeTest(testStem, [&]() -> std::string { StringSink to; - CommonProto::Serialise::write( - *store, - CommonProto::WriteConn { .to = to }, - decoded); + CommonProto::Serialise::write(*store, CommonProto::WriteConn{.to = to}, decoded); return to.s; }); } }; #define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \ - TEST_F(CommonProtoTest, NAME ## _read) { \ - readProtoTest(STEM, VALUE); \ - } \ - TEST_F(CommonProtoTest, NAME ## _write) { \ - writeProtoTest(STEM, VALUE); \ + TEST_F(CommonProtoTest, NAME##_read) \ + { \ + readProtoTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME##_write) \ + { \ + writeProtoTest(STEM, VALUE); \ } CHARACTERIZATION_TEST( string, "string", - (std::tuple { + (std::tuple{ "", "hi", "white rabbit", @@ -73,24 +70,24 @@ CHARACTERIZATION_TEST( CHARACTERIZATION_TEST( storePath, "store-path", - (std::tuple { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + (std::tuple{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, })) CHARACTERIZATION_TEST( contentAddress, "content-address", - (std::tuple { - ContentAddress { + (std::tuple{ + ContentAddress{ .method = ContentAddressMethod::Raw::Text, .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, @@ -99,12 +96,12 @@ CHARACTERIZATION_TEST( CHARACTERIZATION_TEST( drvOutput, "drv-output", - (std::tuple { + (std::tuple{ { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, - DrvOutput { + DrvOutput{ .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), .outputName = "quux", }, @@ -113,75 +110,82 @@ CHARACTERIZATION_TEST( CHARACTERIZATION_TEST( realisation, "realisation", - (std::tuple { - Realisation { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, - }, - Realisation { - .id = { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, - .dependentRealisations = { - { - DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", - }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + (std::tuple{ + Realisation{ + .id = + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + Realisation{ + .id = + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = + { + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, }, - }, }, })) CHARACTERIZATION_TEST( vector, "vector", - (std::tuple, std::vector, std::vector, std::vector>> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple< + std::vector, + std::vector, + std::vector, + std::vector>>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) CHARACTERIZATION_TEST( set, "set", - (std::tuple> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) CHARACTERIZATION_TEST( optionalStorePath, "optional-store-path", - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + std::optional{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, }, })) CHARACTERIZATION_TEST( optionalContentAddress, "optional-content-address", - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - ContentAddress { + std::optional{ + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) -} +} // namespace nix diff --git a/src/libstore-tests/content-address.cc b/src/libstore-tests/content-address.cc index c208c944d..51d591c38 100644 --- a/src/libstore-tests/content-address.cc +++ b/src/libstore-tests/content-address.cc @@ -8,30 +8,33 @@ namespace nix { * ContentAddressMethod::parse, ContentAddressMethod::render * --------------------------------------------------------------------------*/ -TEST(ContentAddressMethod, testRoundTripPrintParse_1) { +TEST(ContentAddressMethod, testRoundTripPrintParse_1) +{ for (ContentAddressMethod cam : { - ContentAddressMethod::Raw::Text, - ContentAddressMethod::Raw::Flat, - ContentAddressMethod::Raw::NixArchive, - ContentAddressMethod::Raw::Git, - }) { + ContentAddressMethod::Raw::Text, + ContentAddressMethod::Raw::Flat, + ContentAddressMethod::Raw::NixArchive, + ContentAddressMethod::Raw::Git, + }) { EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); } } -TEST(ContentAddressMethod, testRoundTripPrintParse_2) { +TEST(ContentAddressMethod, testRoundTripPrintParse_2) +{ for (const std::string_view camS : { - "text", - "flat", - "nar", - "git", - }) { + "text", + "flat", + "nar", + "git", + }) { EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS); } } -TEST(ContentAddressMethod, testParseContentAddressMethodOptException) { +TEST(ContentAddressMethod, testParseContentAddressMethodOptException) +{ EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError); } -} +} // namespace nix diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index b68134cd1..fbdf8ed29 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -497,4 +497,4 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) }); }; -} +} // namespace nix diff --git a/src/libstore-tests/derivation.cc b/src/libstore-tests/derivation.cc index fa6711d40..7d0507a7a 100644 --- a/src/libstore-tests/derivation.cc +++ b/src/libstore-tests/derivation.cc @@ -16,7 +16,8 @@ class DerivationTest : public CharacterizationTest, public LibStoreTest std::filesystem::path unitTestData = getUnitTestData() / "derivation"; public: - std::filesystem::path goldenMaster(std::string_view testStem) const override { + std::filesystem::path goldenMaster(std::string_view testStem) const override + { return unitTestData / testStem; } @@ -51,168 +52,169 @@ class ImpureDerivationTest : public DerivationTest } }; -TEST_F(DerivationTest, BadATerm_version) { +TEST_F(DerivationTest, BadATerm_version) +{ + ASSERT_THROW( + parseDerivation(*store, readFile(goldenMaster("bad-version.drv")), "whatever", mockXpSettings), FormatError); +} + +TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) +{ ASSERT_THROW( parseDerivation( - *store, - readFile(goldenMaster("bad-version.drv")), - "whatever", - mockXpSettings), + *store, readFile(goldenMaster("bad-old-version-dyn-deps.drv")), "dyn-dep-derivation", mockXpSettings), FormatError); } -TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) { - ASSERT_THROW( - parseDerivation( - *store, - readFile(goldenMaster("bad-old-version-dyn-deps.drv")), - "dyn-dep-derivation", - mockXpSettings), - FormatError); -} - -#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \ - TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _from_json) { \ - readTest("output-" #NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - DerivationOutput got = DerivationOutput::fromJSON( \ - *store, \ - DRV_NAME, \ - OUTPUT_NAME, \ - encoded, \ - mockXpSettings); \ - DerivationOutput expected { VAL }; \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _to_json) { \ - writeTest("output-" #NAME ".json", [&]() -> json { \ - return DerivationOutput { (VAL) }.toJSON( \ - *store, \ - (DRV_NAME), \ - (OUTPUT_NAME)); \ - }, [](const auto & file) { \ - return json::parse(readFile(file)); \ - }, [](const auto & file, const auto & got) { \ - return writeFile(file, got.dump(2) + "\n"); \ - }); \ +#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \ + TEST_F(FIXTURE, DerivationOutput_##NAME##_from_json) \ + { \ + readTest("output-" #NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + DerivationOutput got = DerivationOutput::fromJSON(*store, DRV_NAME, OUTPUT_NAME, encoded, mockXpSettings); \ + DerivationOutput expected{VAL}; \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(FIXTURE, DerivationOutput_##NAME##_to_json) \ + { \ + writeTest( \ + "output-" #NAME ".json", \ + [&]() -> json { return DerivationOutput{(VAL)}.toJSON(*store, (DRV_NAME), (OUTPUT_NAME)); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } -TEST_JSON(DerivationTest, inputAddressed, - (DerivationOutput::InputAddressed { +TEST_JSON( + DerivationTest, + inputAddressed, + (DerivationOutput::InputAddressed{ .path = store->parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"), }), - "drv-name", "output-name") + "drv-name", + "output-name") -TEST_JSON(DerivationTest, caFixedFlat, - (DerivationOutput::CAFixed { - .ca = { - .method = ContentAddressMethod::Raw::Flat, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, +TEST_JSON( + DerivationTest, + caFixedFlat, + (DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::Flat, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, }), - "drv-name", "output-name") + "drv-name", + "output-name") -TEST_JSON(DerivationTest, caFixedNAR, - (DerivationOutput::CAFixed { - .ca = { - .method = ContentAddressMethod::Raw::NixArchive, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, +TEST_JSON( + DerivationTest, + caFixedNAR, + (DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::NixArchive, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, }), - "drv-name", "output-name") + "drv-name", + "output-name") -TEST_JSON(DynDerivationTest, caFixedText, - (DerivationOutput::CAFixed { - .ca = { - .method = ContentAddressMethod::Raw::Text, - .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), - }, +TEST_JSON( + DynDerivationTest, + caFixedText, + (DerivationOutput::CAFixed{ + .ca = + { + .method = ContentAddressMethod::Raw::Text, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, }), - "drv-name", "output-name") + "drv-name", + "output-name") -TEST_JSON(CaDerivationTest, caFloating, - (DerivationOutput::CAFloating { +TEST_JSON( + CaDerivationTest, + caFloating, + (DerivationOutput::CAFloating{ .method = ContentAddressMethod::Raw::NixArchive, .hashAlgo = HashAlgorithm::SHA256, }), - "drv-name", "output-name") + "drv-name", + "output-name") -TEST_JSON(DerivationTest, deferred, - DerivationOutput::Deferred { }, - "drv-name", "output-name") +TEST_JSON(DerivationTest, deferred, DerivationOutput::Deferred{}, "drv-name", "output-name") -TEST_JSON(ImpureDerivationTest, impure, - (DerivationOutput::Impure { +TEST_JSON( + ImpureDerivationTest, + impure, + (DerivationOutput::Impure{ .method = ContentAddressMethod::Raw::NixArchive, .hashAlgo = HashAlgorithm::SHA256, }), - "drv-name", "output-name") + "drv-name", + "output-name") #undef TEST_JSON -#define TEST_JSON(FIXTURE, NAME, VAL) \ - TEST_F(FIXTURE, Derivation_ ## NAME ## _from_json) { \ - readTest(#NAME ".json", [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - Derivation expected { VAL }; \ - Derivation got = Derivation::fromJSON( \ - *store, \ - encoded, \ - mockXpSettings); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, Derivation_ ## NAME ## _to_json) { \ - writeTest(#NAME ".json", [&]() -> json { \ - return Derivation { VAL }.toJSON(*store); \ - }, [](const auto & file) { \ - return json::parse(readFile(file)); \ - }, [](const auto & file, const auto & got) { \ - return writeFile(file, got.dump(2) + "\n"); \ - }); \ +#define TEST_JSON(FIXTURE, NAME, VAL) \ + TEST_F(FIXTURE, Derivation_##NAME##_from_json) \ + { \ + readTest(#NAME ".json", [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + Derivation expected{VAL}; \ + Derivation got = Derivation::fromJSON(*store, encoded, mockXpSettings); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(FIXTURE, Derivation_##NAME##_to_json) \ + { \ + writeTest( \ + #NAME ".json", \ + [&]() -> json { return Derivation{VAL}.toJSON(*store); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } -#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \ - TEST_F(FIXTURE, Derivation_ ## NAME ## _from_aterm) { \ - readTest(#NAME ".drv", [&](auto encoded) { \ - Derivation expected { VAL }; \ - auto got = parseDerivation( \ - *store, \ - std::move(encoded), \ - DRV_NAME, \ - mockXpSettings); \ - ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)) ; \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(FIXTURE, Derivation_ ## NAME ## _to_aterm) { \ - writeTest(#NAME ".drv", [&]() -> std::string { \ - return (VAL).unparse(*store, false); \ - }); \ +#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \ + TEST_F(FIXTURE, Derivation_##NAME##_from_aterm) \ + { \ + readTest(#NAME ".drv", [&](auto encoded) { \ + Derivation expected{VAL}; \ + auto got = parseDerivation(*store, std::move(encoded), DRV_NAME, mockXpSettings); \ + ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(FIXTURE, Derivation_##NAME##_to_aterm) \ + { \ + writeTest(#NAME ".drv", [&]() -> std::string { return (VAL).unparse(*store, false); }); \ } -Derivation makeSimpleDrv(const Store & store) { +Derivation makeSimpleDrv(const Store & store) +{ Derivation drv; drv.name = "simple-derivation"; drv.inputSrcs = { store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), }; drv.inputDrvs = { - .map = { + .map = { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), { - .value = { - "cat", - "dog", + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + { + .value = + { + "cat", + "dog", + }, }, }, }, - }, }; drv.platform = "wasm-sel4"; drv.builder = "foo"; @@ -231,46 +233,50 @@ Derivation makeSimpleDrv(const Store & store) { TEST_JSON(DerivationTest, simple, makeSimpleDrv(*store)) -TEST_ATERM(DerivationTest, simple, - makeSimpleDrv(*store), - "simple-derivation") +TEST_ATERM(DerivationTest, simple, makeSimpleDrv(*store), "simple-derivation") -Derivation makeDynDepDerivation(const Store & store) { +Derivation makeDynDepDerivation(const Store & store) +{ Derivation drv; drv.name = "dyn-dep-derivation"; drv.inputSrcs = { store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), }; drv.inputDrvs = { - .map = { + .map = { - store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), - DerivedPathMap::ChildNode { - .value = { - "cat", - "dog", - }, - .childMap = { - { - "cat", - DerivedPathMap::ChildNode { - .value = { - "kitten", + { + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + DerivedPathMap::ChildNode{ + .value = + { + "cat", + "dog", + }, + .childMap = + { + { + "cat", + DerivedPathMap::ChildNode{ + .value = + { + "kitten", + }, + }, + }, + { + "goose", + DerivedPathMap::ChildNode{ + .value = + { + "gosling", + }, + }, }, }, - }, - { - "goose", - DerivedPathMap::ChildNode { - .value = { - "gosling", - }, - }, - }, }, }, }, - }, }; drv.platform = "wasm-sel4"; drv.builder = "foo"; @@ -289,11 +295,9 @@ Derivation makeDynDepDerivation(const Store & store) { TEST_JSON(DynDerivationTest, dynDerivationDeps, makeDynDepDerivation(*store)) -TEST_ATERM(DynDerivationTest, dynDerivationDeps, - makeDynDepDerivation(*store), - "dyn-dep-derivation") +TEST_ATERM(DynDerivationTest, dynDerivationDeps, makeDynDepDerivation(*store), "dyn-dep-derivation") #undef TEST_JSON #undef TEST_ATERM -} +} // namespace nix diff --git a/src/libstore-tests/derived-path.cc b/src/libstore-tests/derived-path.cc index 51df25198..c7d2c5817 100644 --- a/src/libstore-tests/derived-path.cc +++ b/src/libstore-tests/derived-path.cc @@ -9,14 +9,14 @@ namespace nix { class DerivedPathTest : public LibStoreTest -{ -}; +{}; /** * Round trip (string <-> data structure) test for * `DerivedPath::Opaque`. */ -TEST_F(DerivedPathTest, opaque) { +TEST_F(DerivedPathTest, opaque) +{ std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; auto elem = DerivedPath::parse(*store, opaque); auto * p = std::get_if(&elem); @@ -29,15 +29,18 @@ TEST_F(DerivedPathTest, opaque) { * Round trip (string <-> data structure) test for a simpler * `DerivedPath::Built`. */ -TEST_F(DerivedPathTest, built_opaque) { +TEST_F(DerivedPathTest, built_opaque) +{ std::string_view built = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^bar,foo"; auto elem = DerivedPath::parse(*store, built); auto * p = std::get_if(&elem); ASSERT_TRUE(p); - ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "foo", "bar" })); - ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { - .path = store->parseStorePath(built.substr(0, 49)), - })); + ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names{"foo", "bar"})); + ASSERT_EQ( + *p->drvPath, + ((SingleDerivedPath) SingleDerivedPath::Opaque{ + .path = store->parseStorePath(built.substr(0, 49)), + })); ASSERT_EQ(elem.to_string(*store), built); } @@ -45,7 +48,8 @@ TEST_F(DerivedPathTest, built_opaque) { * Round trip (string <-> data structure) test for a more complex, * inductive `DerivedPath::Built`. */ -TEST_F(DerivedPathTest, built_built) { +TEST_F(DerivedPathTest, built_built) +{ /** * We set these in tests rather than the regular globals so we don't have * to worry about race conditions if the tests run concurrently. @@ -57,13 +61,15 @@ TEST_F(DerivedPathTest, built_built) { auto elem = DerivedPath::parse(*store, built, mockXpSettings); auto * p = std::get_if(&elem); ASSERT_TRUE(p); - ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "bar", "baz" })); + ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names{"bar", "baz"})); auto * drvPath = std::get_if(&*p->drvPath); ASSERT_TRUE(drvPath); ASSERT_EQ(drvPath->output, "foo"); - ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { - .path = store->parseStorePath(built.substr(0, 49)), - })); + ASSERT_EQ( + *drvPath->drvPath, + ((SingleDerivedPath) SingleDerivedPath::Opaque{ + .path = store->parseStorePath(built.substr(0, 49)), + })); ASSERT_EQ(elem.to_string(*store), built); } @@ -71,7 +77,8 @@ TEST_F(DerivedPathTest, built_built) { * Without the right experimental features enabled, we cannot parse a * complex inductive derived path. */ -TEST_F(DerivedPathTest, built_built_xp) { +TEST_F(DerivedPathTest, built_built_xp) +{ ASSERT_THROW( DerivedPath::parse(*store, "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^foo^bar,baz"), MissingExperimentalFeature); @@ -84,20 +91,14 @@ TEST_F(DerivedPathTest, built_built_xp) { path '00000000000000000000000000000000-0^0' is not a valid store path: name '0^0' contains illegal character '^' */ -RC_GTEST_FIXTURE_PROP( - DerivedPathTest, - DISABLED_prop_legacy_round_rip, - (const DerivedPath & o)) +RC_GTEST_FIXTURE_PROP(DerivedPathTest, DISABLED_prop_legacy_round_rip, (const DerivedPath & o)) { ExperimentalFeatureSettings xpSettings; xpSettings.set("experimental-features", "dynamic-derivations"); RC_ASSERT(o == DerivedPath::parseLegacy(*store, o.to_string_legacy(*store), xpSettings)); } -RC_GTEST_FIXTURE_PROP( - DerivedPathTest, - prop_round_rip, - (const DerivedPath & o)) +RC_GTEST_FIXTURE_PROP(DerivedPathTest, prop_round_rip, (const DerivedPath & o)) { ExperimentalFeatureSettings xpSettings; xpSettings.set("experimental-features", "dynamic-derivations"); @@ -106,4 +107,4 @@ RC_GTEST_FIXTURE_PROP( #endif -} +} // namespace nix diff --git a/src/libstore-tests/downstream-placeholder.cc b/src/libstore-tests/downstream-placeholder.cc index 604c80017..4659a0f81 100644 --- a/src/libstore-tests/downstream-placeholder.cc +++ b/src/libstore-tests/downstream-placeholder.cc @@ -4,7 +4,8 @@ namespace nix { -TEST(DownstreamPlaceholder, unknownCaOutput) { +TEST(DownstreamPlaceholder, unknownCaOutput) +{ /** * We set these in tests rather than the regular globals so we don't have * to worry about race conditions if the tests run concurrently. @@ -14,13 +15,13 @@ TEST(DownstreamPlaceholder, unknownCaOutput) { ASSERT_EQ( DownstreamPlaceholder::unknownCaOutput( - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" }, - "out", - mockXpSettings).render(), + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, "out", mockXpSettings) + .render(), "/0c6rn30q4frawknapgwq386zq358m8r6msvywcvc89n6m5p2dgbz"); } -TEST(DownstreamPlaceholder, unknownDerivation) { +TEST(DownstreamPlaceholder, unknownDerivation) +{ /** * Same reason as above */ @@ -30,12 +31,11 @@ TEST(DownstreamPlaceholder, unknownDerivation) { ASSERT_EQ( DownstreamPlaceholder::unknownDerivation( DownstreamPlaceholder::unknownCaOutput( - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv.drv" }, - "out", - mockXpSettings), + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv.drv"}, "out", mockXpSettings), "out", - mockXpSettings).render(), + mockXpSettings) + .render(), "/0gn6agqxjyyalf0dpihgyf49xq5hqxgw100f0wydnj6yqrhqsb3w"); } -} +} // namespace nix diff --git a/src/libstore-tests/legacy-ssh-store.cc b/src/libstore-tests/legacy-ssh-store.cc index 158da2831..2ff5e69ed 100644 --- a/src/libstore-tests/legacy-ssh-store.cc +++ b/src/libstore-tests/legacy-ssh-store.cc @@ -23,4 +23,4 @@ TEST(LegacySSHStore, constructConfig) "bar", })); } -} +} // namespace nix diff --git a/src/libstore-tests/machines.cc b/src/libstore-tests/machines.cc index f11866e08..72562e6fc 100644 --- a/src/libstore-tests/machines.cc +++ b/src/libstore-tests/machines.cc @@ -13,16 +13,20 @@ using testing::Eq; using testing::Field; using testing::SizeIs; -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; -TEST(machines, getMachinesWithEmptyBuilders) { +TEST(machines, getMachinesWithEmptyBuilders) +{ auto actual = Machine::parseConfig({}, ""); ASSERT_THAT(actual, SizeIs(0)); } -TEST(machines, getMachinesUriOnly) { +TEST(machines, getMachinesUriOnly) +{ auto actual = Machine::parseConfig({"TEST_ARCH-TEST_OS"}, "nix@scratchy.labs.cs.uu.nl"); ASSERT_THAT(actual, SizeIs(1)); EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq(StoreReference::parse("ssh://nix@scratchy.labs.cs.uu.nl")))); @@ -35,7 +39,8 @@ TEST(machines, getMachinesUriOnly) { EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0))); } -TEST(machines, getMachinesDefaults) { +TEST(machines, getMachinesDefaults) +{ auto actual = Machine::parseConfig({"TEST_ARCH-TEST_OS"}, "nix@scratchy.labs.cs.uu.nl - - - - - - -"); ASSERT_THAT(actual, SizeIs(1)); EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq(StoreReference::parse("ssh://nix@scratchy.labs.cs.uu.nl")))); @@ -48,33 +53,35 @@ TEST(machines, getMachinesDefaults) { EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0))); } -MATCHER_P(AuthorityMatches, authority, "") { - *result_listener - << "where the authority of " - << arg.render() - << " is " - << authority; +MATCHER_P(AuthorityMatches, authority, "") +{ + *result_listener << "where the authority of " << arg.render() << " is " << authority; auto * generic = std::get_if(&arg.variant); - if (!generic) return false; + if (!generic) + return false; return generic->authority == authority; } -TEST(machines, getMachinesWithNewLineSeparator) { +TEST(machines, getMachinesWithNewLineSeparator) +{ auto actual = Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl\nnix@itchy.labs.cs.uu.nl"); ASSERT_THAT(actual, SizeIs(2)); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scratchy.labs.cs.uu.nl")))); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); } -TEST(machines, getMachinesWithSemicolonSeparator) { +TEST(machines, getMachinesWithSemicolonSeparator) +{ auto actual = Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl"); EXPECT_THAT(actual, SizeIs(2)); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scratchy.labs.cs.uu.nl")))); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); } -TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) { - auto actual = Machine::parseConfig({}, +TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) +{ + auto actual = Machine::parseConfig( + {}, "# This is a comment ; this is still that comment\n" "nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl\n" "# This is also a comment ; this also is still that comment\n" @@ -85,8 +92,10 @@ TEST(machines, getMachinesWithCommentsAndSemicolonSeparator) { EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scabby.labs.cs.uu.nl")))); } -TEST(machines, getMachinesWithFunnyWhitespace) { - auto actual = Machine::parseConfig({}, +TEST(machines, getMachinesWithFunnyWhitespace) +{ + auto actual = Machine::parseConfig( + {}, " # comment ; comment\n" " nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl \n" "\n \n" @@ -99,8 +108,10 @@ TEST(machines, getMachinesWithFunnyWhitespace) { EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scabby.labs.cs.uu.nl")))); } -TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) { - auto actual = Machine::parseConfig({}, +TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) +{ + auto actual = Machine::parseConfig( + {}, "nix@scratchy.labs.cs.uu.nl i686-linux " "/home/nix/.ssh/id_scratchy_auto 8 3 kvm " "benchmark SSH+HOST+PUBLIC+KEY+BASE64+ENCODED=="); @@ -115,9 +126,10 @@ TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) { EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED=="))); } -TEST(machines, - getMachinesWithCorrectCompleteSingleBuilderWithTabColumnDelimiter) { - auto actual = Machine::parseConfig({}, +TEST(machines, getMachinesWithCorrectCompleteSingleBuilderWithTabColumnDelimiter) +{ + auto actual = Machine::parseConfig( + {}, "nix@scratchy.labs.cs.uu.nl\ti686-linux\t/home/nix/.ssh/" "id_scratchy_auto\t8\t3\tkvm\tbenchmark\tSSH+HOST+PUBLIC+" "KEY+BASE64+ENCODED=="); @@ -132,8 +144,10 @@ TEST(machines, EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED=="))); } -TEST(machines, getMachinesWithMultiOptions) { - auto actual = Machine::parseConfig({}, +TEST(machines, getMachinesWithMultiOptions) +{ + auto actual = Machine::parseConfig( + {}, "nix@scratchy.labs.cs.uu.nl Arch1,Arch2 - - - " "SupportedFeature1,SupportedFeature2 " "MandatoryFeature1,MandatoryFeature2"); @@ -144,25 +158,17 @@ TEST(machines, getMachinesWithMultiOptions) { EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("MandatoryFeature1", "MandatoryFeature2"))); } -TEST(machines, getMachinesWithIncorrectFormat) { - EXPECT_THROW( - Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - eight"), - FormatError); - EXPECT_THROW( - Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - -1"), - FormatError); - EXPECT_THROW( - Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 three"), - FormatError); - EXPECT_THROW( - Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 -3"), - UsageError); - EXPECT_THROW( - Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64"), - FormatError); +TEST(machines, getMachinesWithIncorrectFormat) +{ + EXPECT_THROW(Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - eight"), FormatError); + EXPECT_THROW(Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - -1"), FormatError); + EXPECT_THROW(Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 three"), FormatError); + EXPECT_THROW(Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 -3"), UsageError); + EXPECT_THROW(Machine::parseConfig({}, "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64"), FormatError); } -TEST(machines, getMachinesWithCorrectFileReference) { +TEST(machines, getMachinesWithCorrectFileReference) +{ auto path = std::filesystem::weakly_canonical(getUnitTestData() / "machines/valid"); ASSERT_TRUE(std::filesystem::exists(path)); @@ -173,7 +179,8 @@ TEST(machines, getMachinesWithCorrectFileReference) { EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@poochie.labs.cs.uu.nl")))); } -TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) { +TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) +{ std::filesystem::path path = "/dev/null"; ASSERT_TRUE(std::filesystem::exists(path)); @@ -181,15 +188,18 @@ TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) { ASSERT_THAT(actual, SizeIs(0)); } -TEST(machines, getMachinesWithIncorrectFileReference) { +TEST(machines, getMachinesWithIncorrectFileReference) +{ auto path = std::filesystem::weakly_canonical("/not/a/file"); ASSERT_TRUE(!std::filesystem::exists(path)); auto actual = Machine::parseConfig({}, "@" + path.string()); ASSERT_THAT(actual, SizeIs(0)); } -TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) { +TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) +{ EXPECT_THROW( - Machine::parseConfig({}, "@" + std::filesystem::weakly_canonical(getUnitTestData() / "machines" / "bad_format").string()), + Machine::parseConfig( + {}, "@" + std::filesystem::weakly_canonical(getUnitTestData() / "machines" / "bad_format").string()), FormatError); } diff --git a/src/libstore-tests/nar-info-disk-cache.cc b/src/libstore-tests/nar-info-disk-cache.cc index 4c7354c0c..98a94b91e 100644 --- a/src/libstore-tests/nar-info-disk-cache.cc +++ b/src/libstore-tests/nar-info-disk-cache.cc @@ -5,10 +5,10 @@ #include "nix/store/sqlite.hh" #include - namespace nix { -TEST(NarInfoDiskCacheImpl, create_and_read) { +TEST(NarInfoDiskCacheImpl, create_and_read) +{ // This is a large single test to avoid some setup overhead. int prio = 12345; @@ -36,7 +36,8 @@ TEST(NarInfoDiskCacheImpl, create_and_read) { // Check that the fields are saved and returned correctly. This does not test // the select statement yet, because of in-memory caching. - savedId = cache->createCache("http://foo", "/nix/storedir", wantMassQuery, prio);; + savedId = cache->createCache("http://foo", "/nix/storedir", wantMassQuery, prio); + ; { auto r = cache->upToDateCacheExists("http://foo"); ASSERT_TRUE(r); @@ -120,4 +121,4 @@ TEST(NarInfoDiskCacheImpl, create_and_read) { } } -} +} // namespace nix diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index 1979deef8..a73df1190 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -15,38 +15,42 @@ class NarInfoTest : public CharacterizationTest, public LibStoreTest { std::filesystem::path unitTestData = getUnitTestData() / "nar-info"; - std::filesystem::path goldenMaster(PathView testStem) const override { + std::filesystem::path goldenMaster(PathView testStem) const override + { return unitTestData / (testStem + ".json"); } }; -static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { - NarInfo info = ValidPathInfo { +static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) +{ + NarInfo info = ValidPathInfo{ store, "foo", - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), - .references = { - .others = { - StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", - }, + .references = + { + .others = + { + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, }, - .self = true, - }, }, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.narSize = 34878; if (includeImpureInfo) { - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.registrationTime = 23423; info.ultimate = true; - info.sigs = { "asdf", "qwer" }; + info.sigs = {"asdf", "qwer"}; info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz"; info.compression = "xz"; @@ -56,31 +60,27 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { return info; } -#define JSON_TEST(STEM, PURE) \ - TEST_F(NarInfoTest, NarInfo_ ## STEM ## _from_json) { \ - readTest(#STEM, [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - auto expected = makeNarInfo(*store, PURE); \ - NarInfo got = NarInfo::fromJSON( \ - *store, \ - expected.path, \ - encoded); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(NarInfoTest, NarInfo_ ## STEM ## _to_json) { \ - writeTest(#STEM, [&]() -> json { \ - return makeNarInfo(*store, PURE) \ - .toJSON(*store, PURE, HashFormat::SRI); \ - }, [](const auto & file) { \ - return json::parse(readFile(file)); \ - }, [](const auto & file, const auto & got) { \ - return writeFile(file, got.dump(2) + "\n"); \ - }); \ +#define JSON_TEST(STEM, PURE) \ + TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + auto expected = makeNarInfo(*store, PURE); \ + NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \ + { \ + writeTest( \ + #STEM, \ + [&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } JSON_TEST(pure, false) JSON_TEST(impure, true) -} +} // namespace nix diff --git a/src/libstore-tests/outputs-spec.cc b/src/libstore-tests/outputs-spec.cc index 12f285e0d..b0b80e7c4 100644 --- a/src/libstore-tests/outputs-spec.cc +++ b/src/libstore-tests/outputs-spec.cc @@ -6,15 +6,16 @@ namespace nix { -TEST(OutputsSpec, no_empty_names) { - ASSERT_DEATH(OutputsSpec::Names { StringSet { } }, ""); +TEST(OutputsSpec, no_empty_names) +{ + ASSERT_DEATH(OutputsSpec::Names{StringSet{}}, ""); } -#define TEST_DONT_PARSE(NAME, STR) \ - TEST(OutputsSpec, bad_ ## NAME) { \ - std::optional OutputsSpecOpt = \ - OutputsSpec::parseOpt(STR); \ - ASSERT_FALSE(OutputsSpecOpt); \ +#define TEST_DONT_PARSE(NAME, STR) \ + TEST(OutputsSpec, bad_##NAME) \ + { \ + std::optional OutputsSpecOpt = OutputsSpec::parseOpt(STR); \ + ASSERT_FALSE(OutputsSpecOpt); \ } TEST_DONT_PARSE(empty, "") @@ -25,96 +26,109 @@ TEST_DONT_PARSE(star_second, "foo,*") #undef TEST_DONT_PARSE -TEST(OutputsSpec, all) { +TEST(OutputsSpec, all) +{ std::string_view str = "*"; - OutputsSpec expected = OutputsSpec::All { }; + OutputsSpec expected = OutputsSpec::All{}; ASSERT_EQ(OutputsSpec::parse(str), expected); ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_out) { +TEST(OutputsSpec, names_out) +{ std::string_view str = "out"; - OutputsSpec expected = OutputsSpec::Names { "out" }; + OutputsSpec expected = OutputsSpec::Names{"out"}; ASSERT_EQ(OutputsSpec::parse(str), expected); ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_underscore) { +TEST(OutputsSpec, names_underscore) +{ std::string_view str = "a_b"; - OutputsSpec expected = OutputsSpec::Names { "a_b" }; + OutputsSpec expected = OutputsSpec::Names{"a_b"}; ASSERT_EQ(OutputsSpec::parse(str), expected); ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_numeric) { +TEST(OutputsSpec, names_numeric) +{ std::string_view str = "01"; - OutputsSpec expected = OutputsSpec::Names { "01" }; + OutputsSpec expected = OutputsSpec::Names{"01"}; ASSERT_EQ(OutputsSpec::parse(str), expected); ASSERT_EQ(expected.to_string(), str); } -TEST(OutputsSpec, names_out_bin) { - OutputsSpec expected = OutputsSpec::Names { "out", "bin" }; +TEST(OutputsSpec, names_out_bin) +{ + OutputsSpec expected = OutputsSpec::Names{"out", "bin"}; ASSERT_EQ(OutputsSpec::parse("out,bin"), expected); // N.B. This normalization is OK. ASSERT_EQ(expected.to_string(), "bin,out"); } -#define TEST_SUBSET(X, THIS, THAT) \ - X((OutputsSpec { THIS }).isSubsetOf(THAT)); +#define TEST_SUBSET(X, THIS, THAT) X((OutputsSpec{THIS}).isSubsetOf(THAT)); -TEST(OutputsSpec, subsets_all_all) { - TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All { }, OutputsSpec::All { }); +TEST(OutputsSpec, subsets_all_all) +{ + TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All{}, OutputsSpec::All{}); } -TEST(OutputsSpec, subsets_names_all) { - TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::All { }); +TEST(OutputsSpec, subsets_names_all) +{ + TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, OutputsSpec::All{}); } -TEST(OutputsSpec, subsets_names_names_eq) { - TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::Names { "a" }); +TEST(OutputsSpec, subsets_names_names_eq) +{ + TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, subsets_names_names_noneq) { - TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, (OutputsSpec::Names { "a", "b" })); +TEST(OutputsSpec, subsets_names_names_noneq) +{ + TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names{"a"}, (OutputsSpec::Names{"a", "b"})); } -TEST(OutputsSpec, not_subsets_all_names) { - TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All { }, OutputsSpec::Names { "a" }); +TEST(OutputsSpec, not_subsets_all_names) +{ + TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All{}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, not_subsets_names_names) { - TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names { "a", "b" }), (OutputsSpec::Names { "a" })); +TEST(OutputsSpec, not_subsets_names_names) +{ + TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names{"a", "b"}), (OutputsSpec::Names{"a"})); } #undef TEST_SUBSET -#define TEST_UNION(RES, THIS, THAT) \ - ASSERT_EQ(OutputsSpec { RES }, (OutputsSpec { THIS }).union_(THAT)); +#define TEST_UNION(RES, THIS, THAT) ASSERT_EQ(OutputsSpec{RES}, (OutputsSpec{THIS}).union_(THAT)); -TEST(OutputsSpec, union_all_all) { - TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::All { }); +TEST(OutputsSpec, union_all_all) +{ + TEST_UNION(OutputsSpec::All{}, OutputsSpec::All{}, OutputsSpec::All{}); } -TEST(OutputsSpec, union_all_names) { - TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::Names { "a" }); +TEST(OutputsSpec, union_all_names) +{ + TEST_UNION(OutputsSpec::All{}, OutputsSpec::All{}, OutputsSpec::Names{"a"}); } -TEST(OutputsSpec, union_names_all) { - TEST_UNION(OutputsSpec::All { }, OutputsSpec::Names { "a" }, OutputsSpec::All { }); +TEST(OutputsSpec, union_names_all) +{ + TEST_UNION(OutputsSpec::All{}, OutputsSpec::Names{"a"}, OutputsSpec::All{}); } -TEST(OutputsSpec, union_names_names) { - TEST_UNION((OutputsSpec::Names { "a", "b" }), OutputsSpec::Names { "a" }, OutputsSpec::Names { "b" }); +TEST(OutputsSpec, union_names_names) +{ + TEST_UNION((OutputsSpec::Names{"a", "b"}), OutputsSpec::Names{"a"}, OutputsSpec::Names{"b"}); } #undef TEST_UNION -#define TEST_DONT_PARSE(NAME, STR) \ - TEST(ExtendedOutputsSpec, bad_ ## NAME) { \ - std::optional extendedOutputsSpecOpt = \ - ExtendedOutputsSpec::parseOpt(STR); \ - ASSERT_FALSE(extendedOutputsSpecOpt); \ +#define TEST_DONT_PARSE(NAME, STR) \ + TEST(ExtendedOutputsSpec, bad_##NAME) \ + { \ + std::optional extendedOutputsSpecOpt = ExtendedOutputsSpec::parseOpt(STR); \ + ASSERT_FALSE(extendedOutputsSpecOpt); \ } TEST_DONT_PARSE(carot_empty, "^") @@ -126,87 +140,86 @@ TEST_DONT_PARSE(star_second, "^foo,*") #undef TEST_DONT_PARSE -TEST(ExtendedOutputsSpec, default) { +TEST(ExtendedOutputsSpec, default) +{ std::string_view str = "foo"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); ASSERT_EQ(prefix, "foo"); - ExtendedOutputsSpec expected = ExtendedOutputsSpec::Default { }; + ExtendedOutputsSpec expected = ExtendedOutputsSpec::Default{}; ASSERT_EQ(extendedOutputsSpec, expected); - ASSERT_EQ(std::string { prefix } + expected.to_string(), str); + ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, all) { +TEST(ExtendedOutputsSpec, all) +{ std::string_view str = "foo^*"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); ASSERT_EQ(prefix, "foo"); - ExtendedOutputsSpec expected = OutputsSpec::All { }; + ExtendedOutputsSpec expected = OutputsSpec::All{}; ASSERT_EQ(extendedOutputsSpec, expected); - ASSERT_EQ(std::string { prefix } + expected.to_string(), str); + ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, out) { +TEST(ExtendedOutputsSpec, out) +{ std::string_view str = "foo^out"; auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str); ASSERT_EQ(prefix, "foo"); - ExtendedOutputsSpec expected = OutputsSpec::Names { "out" }; + ExtendedOutputsSpec expected = OutputsSpec::Names{"out"}; ASSERT_EQ(extendedOutputsSpec, expected); - ASSERT_EQ(std::string { prefix } + expected.to_string(), str); + ASSERT_EQ(std::string{prefix} + expected.to_string(), str); } -TEST(ExtendedOutputsSpec, out_bin) { +TEST(ExtendedOutputsSpec, out_bin) +{ auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^out,bin"); ASSERT_EQ(prefix, "foo"); - ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" }; + ExtendedOutputsSpec expected = OutputsSpec::Names{"out", "bin"}; ASSERT_EQ(extendedOutputsSpec, expected); - ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bin,out"); + ASSERT_EQ(std::string{prefix} + expected.to_string(), "foo^bin,out"); } -TEST(ExtendedOutputsSpec, many_carrot) { +TEST(ExtendedOutputsSpec, many_carrot) +{ auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^bar^out,bin"); ASSERT_EQ(prefix, "foo^bar"); - ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" }; + ExtendedOutputsSpec expected = OutputsSpec::Names{"out", "bin"}; ASSERT_EQ(extendedOutputsSpec, expected); - ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bar^bin,out"); + ASSERT_EQ(std::string{prefix} + expected.to_string(), "foo^bar^bin,out"); } - -#define TEST_JSON(TYPE, NAME, STR, VAL) \ - \ - TEST(TYPE, NAME ## _to_json) { \ - using nlohmann::literals::operator "" _json; \ - ASSERT_EQ( \ - STR ## _json, \ - ((nlohmann::json) TYPE { VAL })); \ - } \ - \ - TEST(TYPE, NAME ## _from_json) { \ - using nlohmann::literals::operator "" _json; \ - ASSERT_EQ( \ - TYPE { VAL }, \ - (STR ## _json).get()); \ +#define TEST_JSON(TYPE, NAME, STR, VAL) \ + \ + TEST(TYPE, NAME##_to_json) \ + { \ + using nlohmann::literals::operator"" _json; \ + ASSERT_EQ(STR##_json, ((nlohmann::json) TYPE{VAL})); \ + } \ + \ + TEST(TYPE, NAME##_from_json) \ + { \ + using nlohmann::literals::operator"" _json; \ + ASSERT_EQ(TYPE{VAL}, (STR##_json).get()); \ } -TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All { }) -TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names { "a" }) -TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names { "a", "b" })) +TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All{}) +TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names{"a"}) +TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names{"a", "b"})) -TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default { }) -TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit { OutputsSpec::All { } }) -TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a" } }) -TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a", "b" } })) +TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default{}) +TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit{OutputsSpec::All{}}) +TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a"}}) +TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit{OutputsSpec::Names{"a", "b"}})) #undef TEST_JSON #ifndef COVERAGE -RC_GTEST_PROP( - OutputsSpec, - prop_round_rip, - (const OutputsSpec & o)) +RC_GTEST_PROP(OutputsSpec, prop_round_rip, (const OutputsSpec & o)) { RC_ASSERT(o == OutputsSpec::parse(o.to_string())); } #endif -} +} // namespace nix diff --git a/src/libstore-tests/path-info.cc b/src/libstore-tests/path-info.cc index a7699f7ad..de5c95150 100644 --- a/src/libstore-tests/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -14,7 +14,8 @@ class PathInfoTest : public CharacterizationTest, public LibStoreTest { std::filesystem::path unitTestData = getUnitTestData() / "path-info"; - std::filesystem::path goldenMaster(PathView testStem) const override { + std::filesystem::path goldenMaster(PathView testStem) const override + { return unitTestData / (testStem + ".json"); } }; @@ -28,59 +29,61 @@ static UnkeyedValidPathInfo makeEmpty() static ValidPathInfo makeFullKeyed(const Store & store, bool includeImpureInfo) { - ValidPathInfo info = ValidPathInfo { + ValidPathInfo info = ValidPathInfo{ store, "foo", - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), - .references = { - .others = { - StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", - }, + .references = + { + .others = + { + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, }, - .self = true, - }, }, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.narSize = 34878; if (includeImpureInfo) { - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.registrationTime = 23423; info.ultimate = true; - info.sigs = { "asdf", "qwer" }; + info.sigs = {"asdf", "qwer"}; } return info; } -static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo) { + +static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo) +{ return makeFullKeyed(store, includeImpureInfo); } -#define JSON_TEST(STEM, OBJ, PURE) \ - TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \ - readTest(#STEM, [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON( \ - *store, \ - encoded); \ - auto expected = OBJ; \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(PathInfoTest, PathInfo_ ## STEM ## _to_json) { \ - writeTest(#STEM, [&]() -> json { \ - return OBJ.toJSON(*store, PURE, HashFormat::SRI); \ - }, [](const auto & file) { \ - return json::parse(readFile(file)); \ - }, [](const auto & file, const auto & got) { \ - return writeFile(file, got.dump(2) + "\n"); \ - }); \ +#define JSON_TEST(STEM, OBJ, PURE) \ + TEST_F(PathInfoTest, PathInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(*store, encoded); \ + auto expected = OBJ; \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(PathInfoTest, PathInfo_##STEM##_to_json) \ + { \ + writeTest( \ + #STEM, \ + [&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } JSON_TEST(empty_pure, makeEmpty(), false) @@ -89,7 +92,8 @@ JSON_TEST(empty_impure, makeEmpty(), true) JSON_TEST(pure, makeFull(*store, false), false) JSON_TEST(impure, makeFull(*store, true), true) -TEST_F(PathInfoTest, PathInfo_full_shortRefs) { +TEST_F(PathInfoTest, PathInfo_full_shortRefs) +{ ValidPathInfo it = makeFullKeyed(*store, true); // it.references = unkeyed.references; auto refs = it.shortRefs(); diff --git a/src/libstore-tests/path.cc b/src/libstore-tests/path.cc index 4da73a0ad..01d1ca792 100644 --- a/src/libstore-tests/path.cc +++ b/src/libstore-tests/path.cc @@ -17,29 +17,20 @@ namespace nix { #define HASH_PART "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q" class StorePathTest : public LibStoreTest -{ -}; +{}; -static std::regex nameRegex { std::string { nameRegexStr } }; +static std::regex nameRegex{std::string{nameRegexStr}}; -#define TEST_DONT_PARSE(NAME, STR) \ - TEST_F(StorePathTest, bad_ ## NAME) { \ - std::string_view str = \ - STORE_DIR HASH_PART "-" STR; \ - /* ASSERT_THROW generates a duplicate goto label */ \ - /* A lambda isolates those labels. */ \ - [&](){ \ - ASSERT_THROW( \ - store->parseStorePath(str), \ - BadStorePath); \ - }(); \ - std::string name { STR }; \ - [&](){ \ - ASSERT_THROW( \ - nix::checkName(name), \ - BadStorePathName); \ - }(); \ - EXPECT_FALSE(std::regex_match(name, nameRegex)); \ +#define TEST_DONT_PARSE(NAME, STR) \ + TEST_F(StorePathTest, bad_##NAME) \ + { \ + std::string_view str = STORE_DIR HASH_PART "-" STR; \ + /* ASSERT_THROW generates a duplicate goto label */ \ + /* A lambda isolates those labels. */ \ + [&]() { ASSERT_THROW(store->parseStorePath(str), BadStorePath); }(); \ + std::string name{STR}; \ + [&]() { ASSERT_THROW(nix::checkName(name), BadStorePathName); }(); \ + EXPECT_FALSE(std::regex_match(name, nameRegex)); \ } TEST_DONT_PARSE(empty, "") @@ -57,14 +48,14 @@ TEST_DONT_PARSE(dot_dash_a, ".-a") #undef TEST_DONT_PARSE -#define TEST_DO_PARSE(NAME, STR) \ - TEST_F(StorePathTest, good_ ## NAME) { \ - std::string_view str = \ - STORE_DIR HASH_PART "-" STR; \ - auto p = store->parseStorePath(str); \ - std::string name { p.name() }; \ - EXPECT_EQ(p.name(), STR); \ - EXPECT_TRUE(std::regex_match(name, nameRegex)); \ +#define TEST_DO_PARSE(NAME, STR) \ + TEST_F(StorePathTest, good_##NAME) \ + { \ + std::string_view str = STORE_DIR HASH_PART "-" STR; \ + auto p = store->parseStorePath(str); \ + std::string name{p.name()}; \ + EXPECT_EQ(p.name(), STR); \ + EXPECT_TRUE(std::regex_match(name, nameRegex)); \ } // 0-9 a-z A-Z + - . _ ? = @@ -88,67 +79,46 @@ TEST_DO_PARSE(triple_dot, "...") #ifndef COVERAGE -RC_GTEST_FIXTURE_PROP( - StorePathTest, - prop_regex_accept, - (const StorePath & p)) +RC_GTEST_FIXTURE_PROP(StorePathTest, prop_regex_accept, (const StorePath & p)) { - RC_ASSERT(std::regex_match(std::string { p.name() }, nameRegex)); + RC_ASSERT(std::regex_match(std::string{p.name()}, nameRegex)); } -RC_GTEST_FIXTURE_PROP( - StorePathTest, - prop_round_rip, - (const StorePath & p)) +RC_GTEST_FIXTURE_PROP(StorePathTest, prop_round_rip, (const StorePath & p)) { RC_ASSERT(p == store->parseStorePath(store->printStorePath(p))); } - -RC_GTEST_FIXTURE_PROP( - StorePathTest, - prop_check_regex_eq_parse, - ()) +RC_GTEST_FIXTURE_PROP(StorePathTest, prop_check_regex_eq_parse, ()) { - static auto nameFuzzer = - rc::gen::container( - rc::gen::oneOf( - // alphanum, repeated to weigh heavier - rc::gen::oneOf( - rc::gen::inRange('0', '9'), - rc::gen::inRange('a', 'z'), - rc::gen::inRange('A', 'Z') - ), - // valid symbols - rc::gen::oneOf( - rc::gen::just('+'), - rc::gen::just('-'), - rc::gen::just('.'), - rc::gen::just('_'), - rc::gen::just('?'), - rc::gen::just('=') - ), - // symbols for scary .- and ..- cases, repeated for weight - rc::gen::just('.'), rc::gen::just('.'), - rc::gen::just('.'), rc::gen::just('.'), - rc::gen::just('-'), rc::gen::just('-'), - // ascii symbol ranges - rc::gen::oneOf( - rc::gen::inRange(' ', '/'), - rc::gen::inRange(':', '@'), - rc::gen::inRange('[', '`'), - rc::gen::inRange('{', '~') - ), - // typical whitespace - rc::gen::oneOf( - rc::gen::just(' '), - rc::gen::just('\t'), - rc::gen::just('\n'), - rc::gen::just('\r') - ), - // some chance of control codes, non-ascii or other garbage we missed - rc::gen::inRange('\0', '\xff') - )); + static auto nameFuzzer = rc::gen::container(rc::gen::oneOf( + // alphanum, repeated to weigh heavier + rc::gen::oneOf(rc::gen::inRange('0', '9'), rc::gen::inRange('a', 'z'), rc::gen::inRange('A', 'Z')), + // valid symbols + rc::gen::oneOf( + rc::gen::just('+'), + rc::gen::just('-'), + rc::gen::just('.'), + rc::gen::just('_'), + rc::gen::just('?'), + rc::gen::just('=')), + // symbols for scary .- and ..- cases, repeated for weight + rc::gen::just('.'), + rc::gen::just('.'), + rc::gen::just('.'), + rc::gen::just('.'), + rc::gen::just('-'), + rc::gen::just('-'), + // ascii symbol ranges + rc::gen::oneOf( + rc::gen::inRange(' ', '/'), + rc::gen::inRange(':', '@'), + rc::gen::inRange('[', '`'), + rc::gen::inRange('{', '~')), + // typical whitespace + rc::gen::oneOf(rc::gen::just(' '), rc::gen::just('\t'), rc::gen::just('\n'), rc::gen::just('\r')), + // some chance of control codes, non-ascii or other garbage we missed + rc::gen::inRange('\0', '\xff'))); auto name = *nameFuzzer; @@ -159,9 +129,9 @@ RC_GTEST_FIXTURE_PROP( parsed = true; } catch (const BadStorePath &) { } - RC_ASSERT(parsed == std::regex_match(std::string { name }, nameRegex)); + RC_ASSERT(parsed == std::regex_match(std::string{name}, nameRegex)); } #endif -} +} // namespace nix diff --git a/src/libstore-tests/references.cc b/src/libstore-tests/references.cc index 59993727d..c7b706c68 100644 --- a/src/libstore-tests/references.cc +++ b/src/libstore-tests/references.cc @@ -42,4 +42,4 @@ TEST(references, scan) } } -} +} // namespace nix diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index 69dab5488..62acb061d 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -30,7 +30,7 @@ VERSIONED_CHARACTERIZATION_TEST( string, "string", defaultVersion, - (std::tuple { + (std::tuple{ "", "hi", "white rabbit", @@ -45,9 +45,9 @@ VERSIONED_CHARACTERIZATION_TEST( storePath, "store-path", defaultVersion, - (std::tuple { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + (std::tuple{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -55,16 +55,16 @@ VERSIONED_CHARACTERIZATION_TEST( contentAddress, "content-address", defaultVersion, - (std::tuple { - ContentAddress { + (std::tuple{ + ContentAddress{ .method = ContentAddressMethod::Raw::Text, .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, @@ -75,12 +75,12 @@ VERSIONED_CHARACTERIZATION_TEST( drvOutput, "drv-output", defaultVersion, - (std::tuple { + (std::tuple{ { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, - DrvOutput { + DrvOutput{ .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), .outputName = "quux", }, @@ -93,70 +93,88 @@ VERSIONED_CHARACTERIZATION_TEST( realisation, "realisation", defaultVersion, - (std::tuple { - Realisation { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, - }, - Realisation { - .id = { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, - .dependentRealisations = { - { - DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", - }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + (std::tuple{ + Realisation{ + .id = + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + Realisation{ + .id = + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = + { + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, }, - }, }, })) -VERSIONED_CHARACTERIZATION_TEST( - ServeProtoTest, - buildResult_2_2, - "build-result-2.2", - 2 << 8 | 2, - ({ - using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { - .status = BuildResult::OutputRejected, - .errorMsg = "no idea why", - }, - BuildResult { - .status = BuildResult::NotDeterministic, - .errorMsg = "no idea why", - }, - BuildResult { - .status = BuildResult::Built, - }, - }; - t; - })) +VERSIONED_CHARACTERIZATION_TEST(ServeProtoTest, buildResult_2_2, "build-result-2.2", 2 << 8 | 2, ({ + using namespace std::literals::chrono_literals; + std::tuple t{ + BuildResult{ + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult{ + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + }, + BuildResult{ + .status = BuildResult::Built, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST(ServeProtoTest, buildResult_2_3, "build-result-2.3", 2 << 8 | 3, ({ + using namespace std::literals::chrono_literals; + std::tuple t{ + BuildResult{ + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult{ + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult{ + .status = BuildResult::Built, + .startTime = 30, + .stopTime = 50, + }, + }; + t; + })) VERSIONED_CHARACTERIZATION_TEST( - ServeProtoTest, - buildResult_2_3, - "build-result-2.3", - 2 << 8 | 3, - ({ + ServeProtoTest, buildResult_2_6, "build-result-2.6", 2 << 8 | 6, ({ using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { + std::tuple t{ + BuildResult{ .status = BuildResult::OutputRejected, .errorMsg = "no idea why", }, - BuildResult { + BuildResult{ .status = BuildResult::NotDeterministic, .errorMsg = "no idea why", .timesBuilt = 3, @@ -164,60 +182,36 @@ VERSIONED_CHARACTERIZATION_TEST( .startTime = 30, .stopTime = 50, }, - BuildResult { - .status = BuildResult::Built, - .startTime = 30, - .stopTime = 50, - }, - }; - t; - })) - -VERSIONED_CHARACTERIZATION_TEST( - ServeProtoTest, - buildResult_2_6, - "build-result-2.6", - 2 << 8 | 6, - ({ - using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { - .status = BuildResult::OutputRejected, - .errorMsg = "no idea why", - }, - BuildResult { - .status = BuildResult::NotDeterministic, - .errorMsg = "no idea why", - .timesBuilt = 3, - .isNonDeterministic = true, - .startTime = 30, - .stopTime = 50, - }, - BuildResult { + BuildResult{ .status = BuildResult::Built, .timesBuilt = 1, - .builtOutputs = { + .builtOutputs = { - "foo", { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", + "foo", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + }, + { + "bar", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, }, }, - { - "bar", - { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, - }, - }, - }, .startTime = 30, .stopTime = 50, #if 0 @@ -237,19 +231,19 @@ VERSIONED_CHARACTERIZATION_TEST( unkeyedValidPathInfo_2_3, "unkeyed-valid-path-info-2.3", 2 << 8 | 3, - (std::tuple { + (std::tuple{ ({ - UnkeyedValidPathInfo info { Hash::dummy }; + UnkeyedValidPathInfo info{Hash::dummy}; info.narSize = 34878; info; }), ({ - UnkeyedValidPathInfo info { Hash::dummy }; - info.deriver = StorePath { + UnkeyedValidPathInfo info{Hash::dummy}; + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.references = { - StorePath { + StorePath{ "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", }, }; @@ -263,16 +257,16 @@ VERSIONED_CHARACTERIZATION_TEST( unkeyedValidPathInfo_2_4, "unkeyed-valid-path-info-2.4", 2 << 8 | 4, - (std::tuple { + (std::tuple{ ({ - UnkeyedValidPathInfo info { + UnkeyedValidPathInfo info{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.references = { - StorePath { + StorePath{ "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", }, }; @@ -280,31 +274,34 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - ValidPathInfo info { + ValidPathInfo info{ *LibStoreTest::store, "foo", - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), - .references = { - .others = { - StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", - }, + .references = + { + .others = + { + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, }, - .self = true, - }, }, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.narSize = 34878; - info.sigs = { - "fake-sig-1", - "fake-sig-2", - }, + info.sigs = + { + "fake-sig-1", + "fake-sig-2", + }, static_cast(std::move(info)); }), })) @@ -314,7 +311,7 @@ VERSIONED_CHARACTERIZATION_TEST( build_options_2_1, "build-options-2.1", 2 << 8 | 1, - (ServeProto::BuildOptions { + (ServeProto::BuildOptions{ .maxSilentTime = 5, .buildTimeout = 6, })) @@ -324,7 +321,7 @@ VERSIONED_CHARACTERIZATION_TEST( build_options_2_2, "build-options-2.2", 2 << 8 | 2, - (ServeProto::BuildOptions { + (ServeProto::BuildOptions{ .maxSilentTime = 5, .buildTimeout = 6, .maxLogSize = 7, @@ -335,7 +332,7 @@ VERSIONED_CHARACTERIZATION_TEST( build_options_2_3, "build-options-2.3", 2 << 8 | 3, - (ServeProto::BuildOptions { + (ServeProto::BuildOptions{ .maxSilentTime = 5, .buildTimeout = 6, .maxLogSize = 7, @@ -348,7 +345,7 @@ VERSIONED_CHARACTERIZATION_TEST( build_options_2_7, "build-options-2.7", 2 << 8 | 7, - (ServeProto::BuildOptions { + (ServeProto::BuildOptions{ .maxSilentTime = 5, .buildTimeout = 6, .maxLogSize = 7, @@ -362,11 +359,15 @@ VERSIONED_CHARACTERIZATION_TEST( vector, "vector", defaultVersion, - (std::tuple, std::vector, std::vector, std::vector>> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple< + std::vector, + std::vector, + std::vector, + std::vector>>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -374,11 +375,11 @@ VERSIONED_CHARACTERIZATION_TEST( set, "set", defaultVersion, - (std::tuple> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -386,10 +387,10 @@ VERSIONED_CHARACTERIZATION_TEST( optionalStorePath, "optional-store-path", defaultVersion, - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + std::optional{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, }, })) @@ -398,10 +399,10 @@ VERSIONED_CHARACTERIZATION_TEST( optionalContentAddress, "optional-content-address", defaultVersion, - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - ContentAddress { + std::optional{ + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, @@ -420,18 +421,16 @@ TEST_F(ServeProtoTest, handshake_log) ServeProto::Version clientResult; auto thread = std::thread([&]() { - FdSink out { toServer.writeSide.get() }; - FdSource in0 { toClient.readSide.get() }; - TeeSource in { in0, toClientLog }; - clientResult = ServeProto::BasicClientConnection::handshake( - out, in, defaultVersion, "blah"); + FdSink out{toServer.writeSide.get()}; + FdSource in0{toClient.readSide.get()}; + TeeSource in{in0, toClientLog}; + clientResult = ServeProto::BasicClientConnection::handshake(out, in, defaultVersion, "blah"); }); { - FdSink out { toClient.writeSide.get() }; - FdSource in { toServer.readSide.get() }; - ServeProto::BasicServerConnection::handshake( - out, in, defaultVersion); + FdSink out{toClient.writeSide.get()}; + FdSource in{toServer.readSide.get()}; + ServeProto::BasicServerConnection::handshake(out, in, defaultVersion); }; thread.join(); @@ -441,8 +440,9 @@ TEST_F(ServeProtoTest, handshake_log) } /// Has to be a `BufferedSink` for handshake. -struct NullBufferedSink : BufferedSink { - void writeUnbuffered(std::string_view data) override { } +struct NullBufferedSink : BufferedSink +{ + void writeUnbuffered(std::string_view data) override {} }; TEST_F(ServeProtoTest, handshake_client_replay) @@ -450,9 +450,8 @@ TEST_F(ServeProtoTest, handshake_client_replay) CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { NullBufferedSink nullSink; - StringSource in { toClientLog }; - auto clientResult = ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"); + StringSource in{toClientLog}; + auto clientResult = ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"); EXPECT_EQ(clientResult, defaultVersion); }); @@ -486,23 +485,18 @@ TEST_F(ServeProtoTest, handshake_client_corrupted_throws) ++toClientLogCorrupt[idx]; NullBufferedSink nullSink; - StringSource in { toClientLogCorrupt }; + StringSource in{toClientLogCorrupt}; if (idx < 4 || idx == 9) { // magic bytes don't match - EXPECT_THROW( - ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"), - Error); + EXPECT_THROW(ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), Error); } else if (idx < 8 || idx >= 12) { // Number out of bounds EXPECT_THROW( - ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"), + ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), SerialisationError); } else { - auto ver = ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"); + auto ver = ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"); // `std::min` of this and the other version saves us EXPECT_EQ(ver, defaultVersion); } @@ -510,4 +504,4 @@ TEST_F(ServeProtoTest, handshake_client_corrupted_throws) }); } -} +} // namespace nix diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index dd1b83090..f8c3587d2 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -120,4 +120,4 @@ URI_TEST( .params = {}, })) -} +} // namespace nix diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index 4baf8a325..28190cc9d 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -25,13 +25,12 @@ struct WorkerProtoTest : VersionedProtoTest WorkerProto::Version defaultVersion = 1 << 8 | 10; }; - VERSIONED_CHARACTERIZATION_TEST( WorkerProtoTest, string, "string", defaultVersion, - (std::tuple { + (std::tuple{ "", "hi", "white rabbit", @@ -46,9 +45,9 @@ VERSIONED_CHARACTERIZATION_TEST( storePath, "store-path", defaultVersion, - (std::tuple { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + (std::tuple{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -56,16 +55,16 @@ VERSIONED_CHARACTERIZATION_TEST( contentAddress, "content-address", defaultVersion, - (std::tuple { - ContentAddress { + (std::tuple{ + ContentAddress{ .method = ContentAddressMethod::Raw::Text, .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, - ContentAddress { + ContentAddress{ .method = ContentAddressMethod::Raw::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, @@ -78,21 +77,23 @@ VERSIONED_CHARACTERIZATION_TEST( derivedPath_1_29, "derived-path-1.29", 1 << 8 | 29, - (std::tuple { - DerivedPath::Opaque { - .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + (std::tuple{ + DerivedPath::Opaque{ + .path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - }), - .outputs = OutputsSpec::All { }, + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef( + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::All{}, }, - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - }), - .outputs = OutputsSpec::Names { "x", "y" }, + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef( + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names{"x", "y"}, }, })) @@ -101,24 +102,26 @@ VERSIONED_CHARACTERIZATION_TEST( derivedPath_1_30, "derived-path-1.30", 1 << 8 | 30, - (std::tuple { - DerivedPath::Opaque { - .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + (std::tuple{ + DerivedPath::Opaque{ + .path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, }, - DerivedPath::Opaque { - .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" }, + DerivedPath::Opaque{ + .path = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"}, }, - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - }), - .outputs = OutputsSpec::All { }, + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef( + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::All{}, }, - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - }), - .outputs = OutputsSpec::Names { "x", "y" }, + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef( + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names{"x", "y"}, }, })) @@ -127,12 +130,12 @@ VERSIONED_CHARACTERIZATION_TEST( drvOutput, "drv-output", defaultVersion, - (std::tuple { + (std::tuple{ { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, - DrvOutput { + DrvOutput{ .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), .outputName = "quux", }, @@ -143,115 +146,110 @@ VERSIONED_CHARACTERIZATION_TEST( realisation, "realisation", defaultVersion, - (std::tuple { - Realisation { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, + (std::tuple{ + Realisation{ + .id = + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, }, - Realisation { - .id = { - .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - .outputName = "baz", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - .signatures = { "asdf", "qwer" }, - .dependentRealisations = { + Realisation{ + .id = { - DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "quux", - }, - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + .dependentRealisations = + { + { + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, }, - }, }, })) +VERSIONED_CHARACTERIZATION_TEST(WorkerProtoTest, buildResult_1_27, "build-result-1.27", 1 << 8 | 27, ({ + using namespace std::literals::chrono_literals; + std::tuple t{ + BuildResult{ + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult{ + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + }, + BuildResult{ + .status = BuildResult::Built, + }, + }; + t; + })) + VERSIONED_CHARACTERIZATION_TEST( - WorkerProtoTest, - buildResult_1_27, - "build-result-1.27", - 1 << 8 | 27, - ({ + WorkerProtoTest, buildResult_1_28, "build-result-1.28", 1 << 8 | 28, ({ using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { + std::tuple t{ + BuildResult{ .status = BuildResult::OutputRejected, .errorMsg = "no idea why", }, - BuildResult { + BuildResult{ .status = BuildResult::NotDeterministic, .errorMsg = "no idea why", }, - BuildResult { + BuildResult{ .status = BuildResult::Built, + .builtOutputs = + { + { + "foo", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + }, + { + "bar", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + }, + }, }, }; t; })) VERSIONED_CHARACTERIZATION_TEST( - WorkerProtoTest, - buildResult_1_28, - "build-result-1.28", - 1 << 8 | 28, - ({ + WorkerProtoTest, buildResult_1_29, "build-result-1.29", 1 << 8 | 29, ({ using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { + std::tuple t{ + BuildResult{ .status = BuildResult::OutputRejected, .errorMsg = "no idea why", }, - BuildResult { - .status = BuildResult::NotDeterministic, - .errorMsg = "no idea why", - }, - BuildResult { - .status = BuildResult::Built, - .builtOutputs = { - { - "foo", - { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, - }, - }, - { - "bar", - { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, - }, - }, - }, - }, - }; - t; - })) - -VERSIONED_CHARACTERIZATION_TEST( - WorkerProtoTest, - buildResult_1_29, - "build-result-1.29", - 1 << 8 | 29, - ({ - using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { - .status = BuildResult::OutputRejected, - .errorMsg = "no idea why", - }, - BuildResult { + BuildResult{ .status = BuildResult::NotDeterministic, .errorMsg = "no idea why", .timesBuilt = 3, @@ -259,31 +257,36 @@ VERSIONED_CHARACTERIZATION_TEST( .startTime = 30, .stopTime = 50, }, - BuildResult { + BuildResult{ .status = BuildResult::Built, .timesBuilt = 1, - .builtOutputs = { + .builtOutputs = { - "foo", { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", + "foo", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + }, + { + "bar", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, }, }, - { - "bar", - { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, - }, - }, - }, .startTime = 30, .stopTime = 50, }, @@ -292,18 +295,14 @@ VERSIONED_CHARACTERIZATION_TEST( })) VERSIONED_CHARACTERIZATION_TEST( - WorkerProtoTest, - buildResult_1_37, - "build-result-1.37", - 1 << 8 | 37, - ({ + WorkerProtoTest, buildResult_1_37, "build-result-1.37", 1 << 8 | 37, ({ using namespace std::literals::chrono_literals; - std::tuple t { - BuildResult { + std::tuple t{ + BuildResult{ .status = BuildResult::OutputRejected, .errorMsg = "no idea why", }, - BuildResult { + BuildResult{ .status = BuildResult::NotDeterministic, .errorMsg = "no idea why", .timesBuilt = 3, @@ -311,31 +310,36 @@ VERSIONED_CHARACTERIZATION_TEST( .startTime = 30, .stopTime = 50, }, - BuildResult { + BuildResult{ .status = BuildResult::Built, .timesBuilt = 1, - .builtOutputs = { + .builtOutputs = { - "foo", { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "foo", + "foo", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + }, + { + "bar", + { + .id = + DrvOutput{ + .drvHash = + Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, }, }, - { - "bar", - { - .id = DrvOutput { - .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), - .outputName = "bar", - }, - .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, - }, - }, - }, .startTime = 30, .stopTime = 50, .cpuUser = std::chrono::microseconds(500s), @@ -345,51 +349,49 @@ VERSIONED_CHARACTERIZATION_TEST( t; })) -VERSIONED_CHARACTERIZATION_TEST( - WorkerProtoTest, - keyedBuildResult_1_29, - "keyed-build-result-1.29", - 1 << 8 | 29, - ({ - using namespace std::literals::chrono_literals; - std::tuple t { - KeyedBuildResult { - { - .status = KeyedBuildResult::OutputRejected, - .errorMsg = "no idea why", - }, - /* .path = */ DerivedPath::Opaque { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx" }, - }, - }, - KeyedBuildResult { - { - .status = KeyedBuildResult::NotDeterministic, - .errorMsg = "no idea why", - .timesBuilt = 3, - .isNonDeterministic = true, - .startTime = 30, - .stopTime = 50, - }, - /* .path = */ DerivedPath::Built { - .drvPath = makeConstantStorePathRef(StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - }), - .outputs = OutputsSpec::Names { "out" }, - }, - }, - }; - t; - })) +VERSIONED_CHARACTERIZATION_TEST(WorkerProtoTest, keyedBuildResult_1_29, "keyed-build-result-1.29", 1 << 8 | 29, ({ + using namespace std::literals::chrono_literals; + std::tuple t{ + KeyedBuildResult{ + { + .status = KeyedBuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + /* .path = */ + DerivedPath::Opaque{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx"}, + }, + }, + KeyedBuildResult{ + { + .status = KeyedBuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + /* .path = */ + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef( + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names{"out"}, + }, + }, + }; + t; + })) VERSIONED_CHARACTERIZATION_TEST( WorkerProtoTest, unkeyedValidPathInfo_1_15, "unkeyed-valid-path-info-1.15", 1 << 8 | 15, - (std::tuple { + (std::tuple{ ({ - UnkeyedValidPathInfo info { + UnkeyedValidPathInfo info{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; info.registrationTime = 23423; @@ -397,14 +399,14 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - UnkeyedValidPathInfo info { + UnkeyedValidPathInfo info{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.references = { - StorePath { + StorePath{ "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", }, }; @@ -419,13 +421,13 @@ VERSIONED_CHARACTERIZATION_TEST( validPathInfo_1_15, "valid-path-info-1.15", 1 << 8 | 15, - (std::tuple { + (std::tuple{ ({ - ValidPathInfo info { - StorePath { + ValidPathInfo info{ + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, - UnkeyedValidPathInfo { + UnkeyedValidPathInfo{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -434,24 +436,24 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - ValidPathInfo info { - StorePath { + ValidPathInfo info{ + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, - UnkeyedValidPathInfo { + UnkeyedValidPathInfo{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.references = { // other reference - StorePath { + StorePath{ "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo", }, // self reference - StorePath { + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, }; @@ -466,13 +468,13 @@ VERSIONED_CHARACTERIZATION_TEST( validPathInfo_1_16, "valid-path-info-1.16", 1 << 8 | 16, - (std::tuple { + (std::tuple{ ({ - ValidPathInfo info { - StorePath { + ValidPathInfo info{ + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, - UnkeyedValidPathInfo { + UnkeyedValidPathInfo{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; @@ -482,50 +484,53 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - ValidPathInfo info { - StorePath { + ValidPathInfo info{ + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, - UnkeyedValidPathInfo { + UnkeyedValidPathInfo{ Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }, }; - info.deriver = StorePath { + info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; info.references = { // other reference - StorePath { + StorePath{ "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo", }, // self reference - StorePath { + StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", }, }; info.registrationTime = 23423; info.narSize = 34878; - info.sigs = { - "fake-sig-1", - "fake-sig-2", - }, + info.sigs = + { + "fake-sig-1", + "fake-sig-2", + }, info; }), ({ - ValidPathInfo info { + ValidPathInfo info{ *LibStoreTest::store, "foo", - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = hashString(HashAlgorithm::SHA256, "(...)"), - .references = { - .others = { - StorePath { - "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", - }, + .references = + { + .others = + { + StorePath{ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, }, - .self = true, - }, }, Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), }; @@ -540,7 +545,7 @@ VERSIONED_CHARACTERIZATION_TEST( buildMode, "build-mode", defaultVersion, - (std::tuple { + (std::tuple{ bmNormal, bmRepair, bmCheck, @@ -551,10 +556,10 @@ VERSIONED_CHARACTERIZATION_TEST( optionalTrustedFlag, "optional-trusted-flag", defaultVersion, - (std::tuple, std::optional, std::optional> { + (std::tuple, std::optional, std::optional>{ std::nullopt, - std::optional { Trusted }, - std::optional { NotTrusted }, + std::optional{Trusted}, + std::optional{NotTrusted}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -562,11 +567,15 @@ VERSIONED_CHARACTERIZATION_TEST( vector, "vector", defaultVersion, - (std::tuple, std::vector, std::vector, std::vector>> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple< + std::vector, + std::vector, + std::vector, + std::vector>>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -574,11 +583,11 @@ VERSIONED_CHARACTERIZATION_TEST( set, "set", defaultVersion, - (std::tuple> { - { }, - { "" }, - { "", "foo", "bar" }, - { {}, { "" }, { "", "1", "2" } }, + (std::tuple>{ + {}, + {""}, + {"", "foo", "bar"}, + {{}, {""}, {"", "1", "2"}}, })) VERSIONED_CHARACTERIZATION_TEST( @@ -586,10 +595,10 @@ VERSIONED_CHARACTERIZATION_TEST( optionalStorePath, "optional-store-path", defaultVersion, - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + std::optional{ + StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar"}, }, })) @@ -598,10 +607,10 @@ VERSIONED_CHARACTERIZATION_TEST( optionalContentAddress, "optional-content-address", defaultVersion, - (std::tuple, std::optional> { + (std::tuple, std::optional>{ std::nullopt, - std::optional { - ContentAddress { + std::optional{ + ContentAddress{ .method = ContentAddressMethod::Raw::Flat, .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, @@ -613,7 +622,7 @@ VERSIONED_CHARACTERIZATION_TEST( clientHandshakeInfo_1_30, "client-handshake-info_1_30", 1 << 8 | 30, - (std::tuple { + (std::tuple{ {}, })) @@ -622,12 +631,12 @@ VERSIONED_CHARACTERIZATION_TEST( clientHandshakeInfo_1_33, "client-handshake-info_1_33", 1 << 8 | 33, - (std::tuple { + (std::tuple{ { - .daemonNixVersion = std::optional { "foo" }, + .daemonNixVersion = std::optional{"foo"}, }, { - .daemonNixVersion = std::optional { "bar" }, + .daemonNixVersion = std::optional{"bar"}, }, })) @@ -636,14 +645,14 @@ VERSIONED_CHARACTERIZATION_TEST( clientHandshakeInfo_1_35, "client-handshake-info_1_35", 1 << 8 | 35, - (std::tuple { + (std::tuple{ { - .daemonNixVersion = std::optional { "foo" }, - .remoteTrustsUs = std::optional { NotTrusted }, + .daemonNixVersion = std::optional{"foo"}, + .remoteTrustsUs = std::optional{NotTrusted}, }, { - .daemonNixVersion = std::optional { "bar" }, - .remoteTrustsUs = std::optional { Trusted }, + .daemonNixVersion = std::optional{"bar"}, + .remoteTrustsUs = std::optional{Trusted}, }, })) @@ -659,18 +668,16 @@ TEST_F(WorkerProtoTest, handshake_log) WorkerProto::Version clientResult; auto thread = std::thread([&]() { - FdSink out { toServer.writeSide.get() }; - FdSource in0 { toClient.readSide.get() }; - TeeSource in { in0, toClientLog }; - clientResult = std::get<0>(WorkerProto::BasicClientConnection::handshake( - out, in, defaultVersion, {})); + FdSink out{toServer.writeSide.get()}; + FdSource in0{toClient.readSide.get()}; + TeeSource in{in0, toClientLog}; + clientResult = std::get<0>(WorkerProto::BasicClientConnection::handshake(out, in, defaultVersion, {})); }); { - FdSink out { toClient.writeSide.get() }; - FdSource in { toServer.readSide.get() }; - WorkerProto::BasicServerConnection::handshake( - out, in, defaultVersion, {}); + FdSink out{toClient.writeSide.get()}; + FdSource in{toServer.readSide.get()}; + WorkerProto::BasicServerConnection::handshake(out, in, defaultVersion, {}); }; thread.join(); @@ -688,16 +695,14 @@ TEST_F(WorkerProtoTest, handshake_features) std::tuple clientResult; auto clientThread = std::thread([&]() { - FdSink out { toServer.writeSide.get() }; - FdSource in { toClient.readSide.get() }; - clientResult = WorkerProto::BasicClientConnection::handshake( - out, in, 123, {"bar", "aap", "mies", "xyzzy"}); + FdSink out{toServer.writeSide.get()}; + FdSource in{toClient.readSide.get()}; + clientResult = WorkerProto::BasicClientConnection::handshake(out, in, 123, {"bar", "aap", "mies", "xyzzy"}); }); - FdSink out { toClient.writeSide.get() }; - FdSource in { toServer.readSide.get() }; - auto daemonResult = WorkerProto::BasicServerConnection::handshake( - out, in, 456, {"foo", "bar", "xyzzy"}); + FdSink out{toClient.writeSide.get()}; + FdSource in{toServer.readSide.get()}; + auto daemonResult = WorkerProto::BasicServerConnection::handshake(out, in, 456, {"foo", "bar", "xyzzy"}); clientThread.join(); @@ -707,8 +712,9 @@ TEST_F(WorkerProtoTest, handshake_features) } /// Has to be a `BufferedSink` for handshake. -struct NullBufferedSink : BufferedSink { - void writeUnbuffered(std::string_view data) override { } +struct NullBufferedSink : BufferedSink +{ + void writeUnbuffered(std::string_view data) override {} }; TEST_F(WorkerProtoTest, handshake_client_replay) @@ -716,9 +722,9 @@ TEST_F(WorkerProtoTest, handshake_client_replay) CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { NullBufferedSink nullSink; - StringSource in { toClientLog }; - auto clientResult = std::get<0>(WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {})); + StringSource in{toClientLog}; + auto clientResult = + std::get<0>(WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {})); EXPECT_EQ(clientResult, defaultVersion); }); @@ -752,23 +758,18 @@ TEST_F(WorkerProtoTest, handshake_client_corrupted_throws) ++toClientLogCorrupt[idx]; NullBufferedSink nullSink; - StringSource in { toClientLogCorrupt }; + StringSource in{toClientLogCorrupt}; if (idx < 4 || idx == 9) { // magic bytes don't match - EXPECT_THROW( - WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {}), - Error); + EXPECT_THROW(WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), Error); } else if (idx < 8 || idx >= 12) { // Number out of bounds EXPECT_THROW( - WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {}), + WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), SerialisationError); } else { - auto ver = std::get<0>(WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {})); + auto ver = std::get<0>(WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {})); // `std::min` of this and the other version saves us EXPECT_EQ(ver, defaultVersion); } @@ -776,4 +777,4 @@ TEST_F(WorkerProtoTest, handshake_client_corrupted_throws) }); } -} +} // namespace nix diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 4df9651f0..5ac446639 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -28,15 +28,13 @@ BinaryCacheStore::BinaryCacheStore(Config & config) : config{config} { if (config.secretKeyFile != "") - signers.push_back(std::make_unique( - SecretKey { readFile(config.secretKeyFile) })); + signers.push_back(std::make_unique(SecretKey{readFile(config.secretKeyFile)})); if (config.secretKeyFiles != "") { std::stringstream ss(config.secretKeyFiles); Path keyPath; while (std::getline(ss, keyPath, ',')) { - signers.push_back(std::make_unique( - SecretKey { readFile(keyPath) })); + signers.push_back(std::make_unique(SecretKey{readFile(keyPath)})); } } @@ -53,13 +51,14 @@ void BinaryCacheStore::init() } else { for (auto & line : tokenizeString(*cacheInfo, "\n")) { size_t colon = line.find(':'); - if (colon == std::string::npos) continue; + if (colon == std::string::npos) + continue; auto name = line.substr(0, colon); auto value = trim(line.substr(colon + 1, std::string::npos)); if (name == "StoreDir") { if (value != storeDir) - throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'", - getUri(), value, storeDir); + throw Error( + "binary cache '%s' is for Nix stores with prefix '%s', not '%s'", getUri(), value, storeDir); } else if (name == "WantMassQuery") { config.wantMassQuery.setDefault(value == "1"); } else if (name == "Priority") { @@ -74,32 +73,30 @@ std::optional BinaryCacheStore::getNixCacheInfo() return getFile(cacheInfoFile); } -void BinaryCacheStore::upsertFile(const std::string & path, - std::string && data, - const std::string & mimeType) +void BinaryCacheStore::upsertFile(const std::string & path, std::string && data, const std::string & mimeType) { upsertFile(path, std::make_shared(std::move(data)), mimeType); } -void BinaryCacheStore::getFile(const std::string & path, - Callback> callback) noexcept +void BinaryCacheStore::getFile(const std::string & path, Callback> callback) noexcept { try { callback(getFile(path)); - } catch (...) { callback.rethrow(); } + } catch (...) { + callback.rethrow(); + } } void BinaryCacheStore::getFile(const std::string & path, Sink & sink) { std::promise> promise; - getFile(path, - {[&](std::future> result) { - try { - promise.set_value(result.get()); - } catch (...) { - promise.set_exception(std::current_exception()); - } - }}); + getFile(path, {[&](std::future> result) { + try { + promise.set_value(result.get()); + } catch (...) { + promise.set_exception(std::current_exception()); + } + }}); sink(*promise.get_future().get()); } @@ -128,8 +125,7 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) { auto state_(state.lock()); state_->pathInfoCache.upsert( - std::string(narInfo->path.to_string()), - PathInfoCacheValue { .value = std::shared_ptr(narInfo) }); + std::string(narInfo->path.to_string()), PathInfoCacheValue{.value = std::shared_ptr(narInfo)}); } if (diskCache) @@ -137,8 +133,7 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) } ref BinaryCacheStore::addToStoreCommon( - Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, - std::function mkInfo) + Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, std::function mkInfo) { auto [fdTemp, fnTemp] = createTempFile(); @@ -149,22 +144,19 @@ ref BinaryCacheStore::addToStoreCommon( /* Read the NAR simultaneously into a CompressionSink+FileSink (to write the compressed NAR to disk), into a HashSink (to get the NAR hash), and into a NarAccessor (to get the NAR listing). */ - HashSink fileHashSink { HashAlgorithm::SHA256 }; + HashSink fileHashSink{HashAlgorithm::SHA256}; std::shared_ptr narAccessor; - HashSink narHashSink { HashAlgorithm::SHA256 }; + HashSink narHashSink{HashAlgorithm::SHA256}; { - FdSink fileSink(fdTemp.get()); - TeeSink teeSinkCompressed { fileSink, fileHashSink }; - auto compressionSink = makeCompressionSink( - config.compression, - teeSinkCompressed, - config.parallelCompression, - config.compressionLevel); - TeeSink teeSinkUncompressed { *compressionSink, narHashSink }; - TeeSource teeSource { narSource, teeSinkUncompressed }; - narAccessor = makeNarAccessor(teeSource); - compressionSink->finish(); - fileSink.flush(); + FdSink fileSink(fdTemp.get()); + TeeSink teeSinkCompressed{fileSink, fileHashSink}; + auto compressionSink = makeCompressionSink( + config.compression, teeSinkCompressed, config.parallelCompression, config.compressionLevel); + TeeSink teeSinkUncompressed{*compressionSink, narHashSink}; + TeeSource teeSource{narSource, teeSinkUncompressed}; + narAccessor = makeNarAccessor(teeSource); + compressionSink->finish(); + fileSink.flush(); } auto now2 = std::chrono::steady_clock::now(); @@ -176,17 +168,20 @@ ref BinaryCacheStore::addToStoreCommon( narInfo->fileHash = fileHash; narInfo->fileSize = fileSize; narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar" - + (config.compression == "xz" ? ".xz" : - config.compression == "bzip2" ? ".bz2" : - config.compression == "zstd" ? ".zst" : - config.compression == "lzip" ? ".lzip" : - config.compression == "lz4" ? ".lz4" : - config.compression == "br" ? ".br" : - ""); + + (config.compression == "xz" ? ".xz" + : config.compression == "bzip2" ? ".bz2" + : config.compression == "zstd" ? ".zst" + : config.compression == "lzip" ? ".lzip" + : config.compression == "lz4" ? ".lz4" + : config.compression == "br" ? ".br" + : ""); auto duration = std::chrono::duration_cast(now2 - now1).count(); - printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache", - printStorePath(narInfo->path), info.narSize, + printMsg( + lvlTalkative, + "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache", + printStorePath(narInfo->path), + info.narSize, ((1.0 - (double) fileSize / info.narSize) * 100.0), duration); @@ -197,8 +192,10 @@ ref BinaryCacheStore::addToStoreCommon( if (ref != info.path) queryPathInfo(ref); } catch (InvalidPath &) { - throw Error("cannot add '%s' to the binary cache because the reference '%s' is not valid", - printStorePath(info.path), printStorePath(ref)); + throw Error( + "cannot add '%s' to the binary cache because the reference '%s' is not valid", + printStorePath(info.path), + printStorePath(ref)); } /* Optionally write a JSON file containing a listing of the @@ -232,7 +229,8 @@ ref BinaryCacheStore::addToStoreCommon( // FIXME: or should we overwrite? The previous link may point // to a GC'ed file, so overwriting might be useful... - if (fileExists(key)) return; + if (fileExists(key)) + return; printMsg(lvlTalkative, "creating debuginfo link from '%s' to '%s'", key, target); @@ -245,15 +243,13 @@ ref BinaryCacheStore::addToStoreCommon( for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) { auto dir = buildIdDir / s1; - if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory - || !std::regex_match(s1, regex1)) + if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory || !std::regex_match(s1, regex1)) continue; for (auto & [s2, _type] : narAccessor->readDirectory(dir)) { auto debugPath = dir / s2; - if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular - || !std::regex_match(s2, regex2)) + if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular || !std::regex_match(s2, regex2)) continue; auto buildId = s1 + s2; @@ -272,7 +268,8 @@ ref BinaryCacheStore::addToStoreCommon( /* Atomically write the NAR file. */ if (repair || !fileExists(narInfo->url)) { stats.narWrite++; - upsertFile(narInfo->url, + upsertFile( + narInfo->url, std::make_shared(fnTemp, std::ios_base::in | std::ios_base::binary), "application/x-nix-nar"); } else @@ -292,8 +289,8 @@ ref BinaryCacheStore::addToStoreCommon( return narInfo; } -void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, - RepairFlag repair, CheckSigsFlag checkSigs) +void BinaryCacheStore::addToStore( + const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) { if (!repair && isValidPath(info.path)) { // FIXME: copyNAR -> null sink @@ -302,12 +299,12 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource } addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) { - /* FIXME reinstate these, once we can correctly do hash modulo sink as - needed. We need to throw here in case we uploaded a corrupted store path. */ - // assert(info.narHash == nar.first); - // assert(info.narSize == nar.second); - return info; - }}); + /* FIXME reinstate these, once we can correctly do hash modulo sink as + needed. We need to throw here in case we uploaded a corrupted store path. */ + // assert(info.narHash == nar.first); + // assert(info.narSize == nar.second); + return info; + }}); } StorePath BinaryCacheStore::addToStoreFromDump( @@ -341,8 +338,7 @@ StorePath BinaryCacheStore::addToStoreFromDump( // The dump is already NAR in this case, just use it. nar = dump2.s; break; - case FileSerialisationMethod::Flat: - { + case FileSerialisationMethod::Flat: { // The dump is Flat, so we need to convert it to NAR with a // single file. StringSink s; @@ -357,30 +353,34 @@ StorePath BinaryCacheStore::addToStoreFromDump( if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) unsupported("addToStoreFromDump"); } - StringSource narDump { nar }; + StringSource narDump{nar}; // Use `narDump` if we wrote to `nar`. - Source & narDump2 = nar.size() > 0 - ? static_cast(narDump) - : dump; + Source & narDump2 = nar.size() > 0 ? static_cast(narDump) : dump; - return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info { - *this, - name, - ContentAddressWithReferences::fromParts( - hashMethod, - caHash ? *caHash : nar.first, - { - .others = references, - // caller is not capable of creating a self-reference, because this is content-addressed without modulus - .self = false, - }), - nar.first, - }; - info.narSize = nar.second; - return info; - })->path; + return addToStoreCommon( + narDump2, + repair, + CheckSigs, + [&](HashResult nar) { + ValidPathInfo info{ + *this, + name, + ContentAddressWithReferences::fromParts( + hashMethod, + caHash ? *caHash : nar.first, + { + .others = references, + // caller is not capable of creating a self-reference, because this is content-addressed + // without modulus + .self = false, + }), + nar.first, + }; + info.narSize = nar.second; + return info; + }) + ->path; } bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) @@ -407,7 +407,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) auto info = queryPathInfo(storePath).cast(); LengthSink narSize; - TeeSink tee { sink, narSize }; + TeeSink tee{sink, narSize}; auto decompressor = makeDecompressionSink(info->compression, tee); @@ -420,40 +420,44 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) decompressor->finish(); stats.narRead++; - //stats.narReadCompressedBytes += nar->size(); // FIXME + // stats.narReadCompressedBytes += nar->size(); // FIXME stats.narReadBytes += narSize.length; } -void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, - Callback> callback) noexcept +void BinaryCacheStore::queryPathInfoUncached( + const StorePath & storePath, Callback> callback) noexcept { auto uri = getUri(); auto storePathS = printStorePath(storePath); - auto act = std::make_shared(*logger, lvlTalkative, actQueryPathInfo, - fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri}); + auto act = std::make_shared( + *logger, + lvlTalkative, + actQueryPathInfo, + fmt("querying info about '%s' on '%s'", storePathS, uri), + Logger::Fields{storePathS, uri}); PushActivity pact(act->id); auto narInfoFile = narInfoFileFor(storePath); auto callbackPtr = std::make_shared(std::move(callback)); - getFile(narInfoFile, - {[=,this](std::future> fut) { - try { - auto data = fut.get(); + getFile(narInfoFile, {[=, this](std::future> fut) { + try { + auto data = fut.get(); - if (!data) return (*callbackPtr)({}); + if (!data) + return (*callbackPtr)({}); - stats.narInfoRead++; + stats.narInfoRead++; - (*callbackPtr)((std::shared_ptr) - std::make_shared(*this, *data, narInfoFile)); + (*callbackPtr)( + (std::shared_ptr) std::make_shared(*this, *data, narInfoFile)); - (void) act; // force Activity into this lambda to ensure it stays alive - } catch (...) { - callbackPtr->rethrow(); - } - }}); + (void) act; // force Activity into this lambda to ensure it stays alive + } catch (...) { + callbackPtr->rethrow(); + } + }}); } StorePath BinaryCacheStore::addToStore( @@ -471,54 +475,57 @@ StorePath BinaryCacheStore::addToStore( auto h = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter).first; - auto source = sinkToSource([&](Sink & sink) { - path.dumpPath(sink, filter); - }); - return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info { - *this, - name, - ContentAddressWithReferences::fromParts( - method, - h, - { - .others = references, - // caller is not capable of creating a self-reference, because this is content-addressed without modulus - .self = false, - }), - nar.first, - }; - info.narSize = nar.second; - return info; - })->path; + auto source = sinkToSource([&](Sink & sink) { path.dumpPath(sink, filter); }); + return addToStoreCommon( + *source, + repair, + CheckSigs, + [&](HashResult nar) { + ValidPathInfo info{ + *this, + name, + ContentAddressWithReferences::fromParts( + method, + h, + { + .others = references, + // caller is not capable of creating a self-reference, because this is content-addressed + // without modulus + .self = false, + }), + nar.first, + }; + info.narSize = nar.second; + return info; + }) + ->path; } -void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id, - Callback> callback) noexcept +void BinaryCacheStore::queryRealisationUncached( + const DrvOutput & id, Callback> callback) noexcept { auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi"; auto callbackPtr = std::make_shared(std::move(callback)); - Callback> newCallback = { - [=](std::future> fut) { - try { - auto data = fut.get(); - if (!data) return (*callbackPtr)({}); + Callback> newCallback = {[=](std::future> fut) { + try { + auto data = fut.get(); + if (!data) + return (*callbackPtr)({}); - auto realisation = Realisation::fromJSON( - nlohmann::json::parse(*data), outputInfoFilePath); - return (*callbackPtr)(std::make_shared(realisation)); - } catch (...) { - callbackPtr->rethrow(); - } + auto realisation = Realisation::fromJSON(nlohmann::json::parse(*data), outputInfoFilePath); + return (*callbackPtr)(std::make_shared(realisation)); + } catch (...) { + callbackPtr->rethrow(); } - }; + }}; getFile(outputInfoFilePath, std::move(newCallback)); } -void BinaryCacheStore::registerDrvOutput(const Realisation& info) { +void BinaryCacheStore::registerDrvOutput(const Realisation & info) +{ if (diskCache) diskCache->upsertRealisation(getUri(), info); auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi"; @@ -563,4 +570,4 @@ void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view l "text/plain; charset=utf-8"); } -} +} // namespace nix diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index 091661337..43c7adb11 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -5,4 +5,4 @@ namespace nix { bool BuildResult::operator==(const BuildResult &) const noexcept = default; std::strong_ordering BuildResult::operator<=>(const BuildResult &) const noexcept = default; -} +} // namespace nix diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 3019d9d72..e68e60250 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -24,8 +24,8 @@ namespace nix { -DerivationBuildingGoal::DerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv_, - Worker & worker, BuildMode buildMode) +DerivationBuildingGoal::DerivationBuildingGoal( + const StorePath & drvPath, const Derivation & drv_, Worker & worker, BuildMode buildMode) : Goal(worker, gaveUpOnSubstitution()) , drvPath(drvPath) , buildMode(buildMode) @@ -36,8 +36,8 @@ DerivationBuildingGoal::DerivationBuildingGoal(const StorePath & drvPath, const parsedDrv = std::make_unique(*parsedOpt); } try { - drvOptions = std::make_unique( - DerivationOptions::fromStructuredAttrs(drv->env, parsedDrv.get())); + drvOptions = + std::make_unique(DerivationOptions::fromStructuredAttrs(drv->env, parsedDrv.get())); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); throw; @@ -51,22 +51,36 @@ DerivationBuildingGoal::DerivationBuildingGoal(const StorePath & drvPath, const worker.store.addTempRoot(this->drvPath); } - DerivationBuildingGoal::~DerivationBuildingGoal() { /* Careful: we should never ever throw an exception from a destructor. */ - try { killChild(); } catch (...) { ignoreExceptionInDestructor(); } + try { + killChild(); + } catch (...) { + ignoreExceptionInDestructor(); + } #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows if (builder) { - try { builder->stopDaemon(); } catch (...) { ignoreExceptionInDestructor(); } - try { builder->deleteTmpDir(false); } catch (...) { ignoreExceptionInDestructor(); } + try { + builder->stopDaemon(); + } catch (...) { + ignoreExceptionInDestructor(); + } + try { + builder->deleteTmpDir(false); + } catch (...) { + ignoreExceptionInDestructor(); + } } #endif - try { closeLogFile(); } catch (...) { ignoreExceptionInDestructor(); } + try { + closeLogFile(); + } catch (...) { + ignoreExceptionInDestructor(); + } } - std::string DerivationBuildingGoal::key() { /* Ensure that derivations get built in order of their name, @@ -76,7 +90,6 @@ std::string DerivationBuildingGoal::key() return "bd$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath); } - void DerivationBuildingGoal::killChild() { #ifndef _WIN32 // TODO enable build hook on Windows @@ -102,7 +115,6 @@ void DerivationBuildingGoal::killChild() #endif } - void DerivationBuildingGoal::timedOut(Error && ex) { killChild(); @@ -111,19 +123,18 @@ void DerivationBuildingGoal::timedOut(Error && ex) [[maybe_unused]] Done _ = done(BuildResult::TimedOut, {}, std::move(ex)); } - /** * Used for `inputGoals` local variable below */ struct value_comparison { - template - bool operator()(const ref & lhs, const ref & rhs) const { + template + bool operator()(const ref & lhs, const ref & rhs) const + { return *lhs < *rhs; } }; - std::string showKnownOutputs(Store & store, const Derivation & drv) { std::string msg; @@ -139,7 +150,6 @@ std::string showKnownOutputs(Store & store, const Derivation & drv) return msg; } - /* At least one of the output paths could not be produced using a substitute. So we have to build instead. */ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() @@ -149,12 +159,14 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() std::map, GoalPtr, value_comparison> inputGoals; { - std::function, const DerivedPathMap::ChildNode &)> addWaiteeDerivedPath; + std::function, const DerivedPathMap::ChildNode &)> + addWaiteeDerivedPath; - addWaiteeDerivedPath = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + addWaiteeDerivedPath = [&](ref inputDrv, + const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) { auto g = worker.makeGoal( - DerivedPath::Built { + DerivedPath::Built{ .drvPath = inputDrv, .outputs = inputNode.value, }, @@ -164,17 +176,18 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() } for (const auto & [outputName, childNode] : inputNode.childMap) addWaiteeDerivedPath( - make_ref(SingleDerivedPath::Built { inputDrv, outputName }), - childNode); + make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); }; for (const auto & [inputDrvPath, inputNode] : drv->inputDrvs.map) { /* Ensure that pure, non-fixed-output derivations don't depend on impure derivations. */ - if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() && !drv->type().isFixed()) { + if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() + && !drv->type().isFixed()) { auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); if (inputDrv.type().isImpure()) - throw Error("pure derivation '%s' depends on impure derivation '%s'", + throw Error( + "pure derivation '%s' depends on impure derivation '%s'", worker.store.printStorePath(drvPath), worker.store.printStorePath(inputDrvPath)); } @@ -197,25 +210,27 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() } for (auto & i : drv->inputSrcs) { - if (worker.store.isValidPath(i)) continue; + if (worker.store.isValidPath(i)) + continue; if (!settings.useSubstitutes) - throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled", - worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); + throw Error( + "dependency '%s' of '%s' does not exist, and substitution is disabled", + worker.store.printStorePath(i), + worker.store.printStorePath(drvPath)); waitees.insert(upcast_goal(worker.makePathSubstitutionGoal(i))); } co_await await(std::move(waitees)); - trace("all inputs realised"); if (nrFailed != 0) { - auto msg = fmt( - "Cannot build '%s'.\n" - "Reason: " ANSI_RED "%d %s failed" ANSI_NORMAL ".", - Magenta(worker.store.printStorePath(drvPath)), - nrFailed, - nrFailed == 1 ? "dependency" : "dependencies"); + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "%d %s failed" ANSI_NORMAL ".", + Magenta(worker.store.printStorePath(drvPath)), + nrFailed, + nrFailed == 1 ? "dependency" : "dependencies"); msg += showKnownOutputs(worker.store, *drv); co_return done(BuildResult::DependencyFailed, {}, Error(msg)); } @@ -230,30 +245,29 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() auto & fullDrv = *drv; auto drvType = fullDrv.type(); - bool resolveDrv = std::visit(overloaded { - [&](const DerivationType::InputAddressed & ia) { - /* must resolve if deferred. */ - return ia.deferred; - }, - [&](const DerivationType::ContentAddressed & ca) { - return !fullDrv.inputDrvs.map.empty() && ( - ca.fixed - /* Can optionally resolve if fixed, which is good - for avoiding unnecessary rebuilds. */ - ? experimentalFeatureSettings.isEnabled(Xp::CaDerivations) - /* Must resolve if floating and there are any inputs - drvs. */ - : true); - }, - [&](const DerivationType::Impure &) { - return true; - } - }, drvType.raw) + bool resolveDrv = + std::visit( + overloaded{ + [&](const DerivationType::InputAddressed & ia) { + /* must resolve if deferred. */ + return ia.deferred; + }, + [&](const DerivationType::ContentAddressed & ca) { + return !fullDrv.inputDrvs.map.empty() + && (ca.fixed + /* Can optionally resolve if fixed, which is good + for avoiding unnecessary rebuilds. */ + ? experimentalFeatureSettings.isEnabled(Xp::CaDerivations) + /* Must resolve if floating and there are any inputs + drvs. */ + : true); + }, + [&](const DerivationType::Impure &) { return true; }}, + drvType.raw) /* no inputs are outputs of dynamic derivations */ - || std::ranges::any_of( - fullDrv.inputDrvs.map.begin(), - fullDrv.inputDrvs.map.end(), - [](auto & pair) { return !pair.second.childMap.empty(); }); + || std::ranges::any_of(fullDrv.inputDrvs.map.begin(), fullDrv.inputDrvs.map.end(), [](auto & pair) { + return !pair.second.childMap.empty(); + }); if (resolveDrv && !fullDrv.inputDrvs.map.empty()) { experimentalFeatureSettings.require(Xp::CaDerivations); @@ -261,46 +275,55 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() /* We are be able to resolve this derivation based on the now-known results of dependencies. If so, we become a stub goal aliasing that resolved derivation goal. */ - std::optional attempt = fullDrv.tryResolve(worker.store, + std::optional attempt = fullDrv.tryResolve( + worker.store, [&](ref drvPath, const std::string & outputName) -> std::optional { auto mEntry = get(inputGoals, drvPath); - if (!mEntry) return std::nullopt; + if (!mEntry) + return std::nullopt; auto & buildResult = (*mEntry)->buildResult; - if (!buildResult.success()) return std::nullopt; + if (!buildResult.success()) + return std::nullopt; auto i = get(buildResult.builtOutputs, outputName); - if (!i) return std::nullopt; + if (!i) + return std::nullopt; return i->outPath; }); if (!attempt) { - /* TODO (impure derivations-induced tech debt) (see below): - The above attempt should have found it, but because we manage - inputDrvOutputs statefully, sometimes it gets out of sync with - the real source of truth (store). So we query the store - directly if there's a problem. */ - attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); + /* TODO (impure derivations-induced tech debt) (see below): + The above attempt should have found it, but because we manage + inputDrvOutputs statefully, sometimes it gets out of sync with + the real source of truth (store). So we query the store + directly if there's a problem. */ + attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); } assert(attempt); - Derivation drvResolved { std::move(*attempt) }; + Derivation drvResolved{std::move(*attempt)}; auto pathResolved = writeDerivation(worker.store, drvResolved); - auto msg = fmt("resolved derivation: '%s' -> '%s'", - worker.store.printStorePath(drvPath), - worker.store.printStorePath(pathResolved)); - act = std::make_unique(*logger, lvlInfo, actBuildWaiting, msg, - Logger::Fields { - worker.store.printStorePath(drvPath), - worker.store.printStorePath(pathResolved), - }); + auto msg = + fmt("resolved derivation: '%s' -> '%s'", + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved)); + act = std::make_unique( + *logger, + lvlInfo, + actBuildWaiting, + msg, + Logger::Fields{ + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved), + }); /* TODO https://github.com/NixOS/nix/issues/13247 we should let the calling goal do this, so it has a change to pass just the output(s) it cares about. */ - auto resolvedDrvGoal = worker.makeDerivationTrampolineGoal( - pathResolved, OutputsSpec::All{}, drvResolved, buildMode); + auto resolvedDrvGoal = + worker.makeDerivationTrampolineGoal(pathResolved, OutputsSpec::All{}, drvResolved, buildMode); { Goals waitees{resolvedDrvGoal}; co_await await(std::move(waitees)); @@ -323,33 +346,36 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() if ((!initialOutput) || (!resolvedHash)) throw Error( "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolve)", - worker.store.printStorePath(drvPath), outputName); + worker.store.printStorePath(drvPath), + outputName); - auto realisation = [&]{ - auto take1 = get(resolvedResult.builtOutputs, outputName); - if (take1) return *take1; + auto realisation = [&] { + auto take1 = get(resolvedResult.builtOutputs, outputName); + if (take1) + return *take1; - /* The above `get` should work. But stateful tracking of - outputs in resolvedResult, this can get out of sync with the - store, which is our actual source of truth. For now we just - check the store directly if it fails. */ - auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, outputName }); - if (take2) return *take2; + /* The above `get` should work. But stateful tracking of + outputs in resolvedResult, this can get out of sync with the + store, which is our actual source of truth. For now we just + check the store directly if it fails. */ + auto take2 = worker.evalStore.queryRealisation(DrvOutput{*resolvedHash, outputName}); + if (take2) + return *take2; - throw Error( - "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/realisation)", - worker.store.printStorePath(pathResolved), outputName); + throw Error( + "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/realisation)", + worker.store.printStorePath(pathResolved), + outputName); }(); if (!drv->type().isImpure()) { auto newRealisation = realisation; - newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; + newRealisation.id = DrvOutput{initialOutput->outputHash, outputName}; newRealisation.signatures.clear(); if (!drv->type().isFixed()) { - auto & drvStore = worker.evalStore.isValidPath(drvPath) - ? worker.evalStore - : worker.store; - newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); + auto & drvStore = worker.evalStore.isValidPath(drvPath) ? worker.evalStore : worker.store; + newRealisation.dependentRealisations = + drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); } worker.store.signRealisation(newRealisation); worker.store.registerDrvOutput(newRealisation); @@ -358,12 +384,7 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() builtOutputs.emplace(outputName, realisation); } - runPostBuildHook( - worker.store, - *logger, - drvPath, - outputPaths - ); + runPostBuildHook(worker.store, *logger, drvPath, outputPaths); } auto status = resolvedResult.status; @@ -381,8 +402,8 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() impure derivations are always resolved above. Can just use DB. This case only happens in the (older) input addressed and fixed output derivation cases. */ - auto outMap = [&]{ - for (auto * drvStore : { &worker.evalStore, &worker.store }) + auto outMap = [&] { + for (auto * drvStore : {&worker.evalStore, &worker.store}) if (drvStore->isValidPath(depDrvPath)) return worker.store.queryDerivationOutputMap(depDrvPath, drvStore); assert(false); @@ -392,7 +413,9 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() if (outMapPath == outMap.end()) { throw Error( "derivation '%s' requires non-existent output '%s' from input derivation '%s'", - worker.store.printStorePath(drvPath), outputName, worker.store.printStorePath(depDrvPath)); + worker.store.printStorePath(drvPath), + outputName, + worker.store.printStorePath(depDrvPath)); } worker.store.computeFSClosure(outMapPath->second, inputPaths); @@ -414,22 +437,29 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() void DerivationBuildingGoal::started() { - auto msg = fmt( - buildMode == bmRepair ? "repairing outputs of '%s'" : - buildMode == bmCheck ? "checking outputs of '%s'" : - "building '%s'", worker.store.printStorePath(drvPath)); + auto msg = + fmt(buildMode == bmRepair ? "repairing outputs of '%s'" + : buildMode == bmCheck ? "checking outputs of '%s'" + : "building '%s'", + worker.store.printStorePath(drvPath)); fmt("building '%s'", worker.store.printStorePath(drvPath)); #ifndef _WIN32 // TODO enable build hook on Windows - if (hook) msg += fmt(" on '%s'", machineName); + if (hook) + msg += fmt(" on '%s'", machineName); #endif - act = std::make_unique(*logger, lvlInfo, actBuild, msg, - Logger::Fields{worker.store.printStorePath(drvPath), + act = std::make_unique( + *logger, + lvlInfo, + actBuild, + msg, + Logger::Fields{ + worker.store.printStorePath(drvPath), #ifndef _WIN32 // TODO enable build hook on Windows - hook ? machineName : + hook ? machineName : #endif - "", - 1, - 1}); + "", + 1, + 1}); mcRunningBuilds = std::make_unique>(worker.runningBuilds); worker.updateProgress(); } @@ -459,16 +489,12 @@ Goal::Co DerivationBuildingGoal::tryToBuild() if (i.second.second) lockFiles.insert(worker.store.Store::toRealPath(*i.second.second)); else - lockFiles.insert( - worker.store.Store::toRealPath(drvPath) + "." + i.first - ); + lockFiles.insert(worker.store.Store::toRealPath(drvPath) + "." + i.first); } } - if (!outputLocks.lockPaths(lockFiles, "", false)) - { - Activity act(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); + if (!outputLocks.lockPaths(lockFiles, "", false)) { + Activity act(*logger, lvlWarn, actBuildWaiting, fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); /* Wait then try locking again, repeat until success (returned boolean is true). */ @@ -496,7 +522,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() /* If any of the outputs already exist but are not valid, delete them. */ for (auto & [_, status] : initialOutputs) { - if (!status.known || status.known->isValid()) continue; + if (!status.known || status.known->isValid()) + continue; auto storePath = status.known->path; debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path)); deletePath(worker.store.Store::toRealPath(storePath)); @@ -506,31 +533,33 @@ Goal::Co DerivationBuildingGoal::tryToBuild() `preferLocalBuild' set. Also, check and repair modes are only supported for local builds. */ bool buildLocally = - (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) - && settings.maxBuildJobs.get() != 0; + (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) && settings.maxBuildJobs.get() != 0; if (!buildLocally) { switch (tryBuildHook()) { - case rpAccept: - /* Yes, it has started doing so. Wait until we get - EOF from the hook. */ - actLock.reset(); - buildResult.startTime = time(0); // inexact - started(); - co_await Suspend{}; - co_return hookDone(); - case rpPostpone: - /* Not now; wait until at least one child finishes or - the wake-up timeout expires. */ - if (!actLock) - actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); - outputLocks.unlock(); - co_await waitForAWhile(); - co_return tryToBuild(); - case rpDecline: - /* We should do it ourselves. */ - break; + case rpAccept: + /* Yes, it has started doing so. Wait until we get + EOF from the hook. */ + actLock.reset(); + buildResult.startTime = time(0); // inexact + started(); + co_await Suspend{}; + co_return hookDone(); + case rpPostpone: + /* Not now; wait until at least one child finishes or + the wake-up timeout expires. */ + if (!actLock) + actLock = std::make_unique( + *logger, + lvlWarn, + actBuildWaiting, + fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); + outputLocks.unlock(); + co_await waitForAWhile(); + co_return tryToBuild(); + case rpDecline: + /* We should do it ourselves. */ + break; } } @@ -545,8 +574,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() either pass a different '--store' or enable remote builds. For more information check 'man nix.conf' and search for '/machines'. - )" - ); + )"); } #ifdef _WIN32 // TODO enable `DerivationBuilder` on Windows @@ -574,9 +602,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() { DerivationBuildingGoal & goal; - DerivationBuildingGoalCallbacks(DerivationBuildingGoal & goal, std::unique_ptr & builder) + DerivationBuildingGoalCallbacks( + DerivationBuildingGoal & goal, std::unique_ptr & builder) : goal{goal} - {} + { + } ~DerivationBuildingGoalCallbacks() override = default; @@ -605,13 +635,18 @@ Goal::Co DerivationBuildingGoal::tryToBuild() goal.worker.markContentsGood(path); } - Path openLogFile() override { + Path openLogFile() override + { return goal.openLogFile(); } - void closeLogFile() override { + + void closeLogFile() override + { goal.closeLogFile(); } - void appendLogTailErrorMsg(std::string & msg) override { + + void appendLogTailErrorMsg(std::string & msg) override + { goal.appendLogTailErrorMsg(msg); } }; @@ -621,7 +656,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() builder = makeDerivationBuilder( worker.store, std::make_unique(*this, builder), - DerivationBuilderParams { + DerivationBuilderParams{ drvPath, buildMode, buildResult, @@ -635,7 +670,10 @@ Goal::Co DerivationBuildingGoal::tryToBuild() if (!builder->prepareBuild()) { if (!actLock) - actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, + actLock = std::make_unique( + *logger, + lvlWarn, + actBuildWaiting, fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); co_await waitForAWhile(); continue; @@ -682,20 +720,18 @@ Goal::Co DerivationBuildingGoal::tryToBuild() #endif } - -void runPostBuildHook( - Store & store, - Logger & logger, - const StorePath & drvPath, - const StorePathSet & outputPaths) +void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths) { auto hook = settings.postBuildHook; if (hook == "") return; - Activity act(logger, lvlTalkative, actPostBuildHook, - fmt("running post-build-hook '%s'", settings.postBuildHook), - Logger::Fields{store.printStorePath(drvPath)}); + Activity act( + logger, + lvlTalkative, + actPostBuildHook, + fmt("running post-build-hook '%s'", settings.postBuildHook), + Logger::Fields{store.printStorePath(drvPath)}); PushActivity pact(act.id); StringMap hookEnvironment = getEnv(); @@ -703,13 +739,18 @@ void runPostBuildHook( hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths)))); hookEnvironment.emplace("NIX_CONFIG", globalConfig.toKeyValue()); - struct LogSink : Sink { + struct LogSink : Sink + { Activity & act; std::string currentLine; - LogSink(Activity & act) : act(act) { } + LogSink(Activity & act) + : act(act) + { + } - void operator() (std::string_view data) override { + void operator()(std::string_view data) override + { for (auto c : data) { if (c == '\n') { flushLine(); @@ -719,18 +760,21 @@ void runPostBuildHook( } } - void flushLine() { + void flushLine() + { act.result(resPostBuildLogLine, currentLine); currentLine.clear(); } - ~LogSink() { + ~LogSink() + { if (currentLine != "") { currentLine += '\n'; flushLine(); } } }; + LogSink sink(act); runProgram2({ @@ -741,7 +785,6 @@ void runPostBuildHook( }); } - void DerivationBuildingGoal::appendLogTailErrorMsg(std::string & msg) { if (!logger->isVerbose() && !logTail.empty()) { @@ -751,19 +794,17 @@ void DerivationBuildingGoal::appendLogTailErrorMsg(std::string & msg) msg += line; msg += "\n"; } - auto nixLogCommand = experimentalFeatureSettings.isEnabled(Xp::NixCommand) - ? "nix log" - : "nix-store -l"; + auto nixLogCommand = experimentalFeatureSettings.isEnabled(Xp::NixCommand) ? "nix log" : "nix-store -l"; // The command is on a separate line for easy copying, such as with triple click. // This message will be indented elsewhere, so removing the indentation before the // command will not put it at the start of the line unfortunately. - msg += fmt("For full logs, run:\n " ANSI_BOLD "%s %s" ANSI_NORMAL, - nixLogCommand, - worker.store.printStorePath(drvPath)); + msg += + fmt("For full logs, run:\n " ANSI_BOLD "%s %s" ANSI_NORMAL, + nixLogCommand, + worker.store.printStorePath(drvPath)); } } - Goal::Co DerivationBuildingGoal::hookDone() { #ifndef _WIN32 @@ -802,11 +843,11 @@ Goal::Co DerivationBuildingGoal::hookDone() /* Check the exit status. */ if (!statusOk(status)) { - auto msg = fmt( - "Cannot build '%s'.\n" - "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", - Magenta(worker.store.printStorePath(drvPath)), - statusToString(status)); + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", + Magenta(worker.store.printStorePath(drvPath)), + statusToString(status)); msg += showKnownOutputs(worker.store, *drv); @@ -834,12 +875,7 @@ Goal::Co DerivationBuildingGoal::hookDone() StorePathSet outputPaths; for (auto & [_, output] : builtOutputs) outputPaths.insert(output.outPath); - runPostBuildHook( - worker.store, - *logger, - drvPath, - outputPaths - ); + runPostBuildHook(worker.store, *logger, drvPath, outputPaths); /* It is now safe to delete the lock files, since all future lockers will see that the output paths are valid; they will @@ -858,7 +894,8 @@ HookReply DerivationBuildingGoal::tryBuildHook() #else /* This should use `worker.evalStore`, but per #13179 the build hook doesn't work with eval store anyways. */ - if (settings.buildHook.get().empty() || !worker.tryBuildHook || !worker.store.isValidPath(drvPath)) return rpDecline; + if (settings.buildHook.get().empty() || !worker.tryBuildHook || !worker.store.isValidPath(drvPath)) + return rpDecline; if (!worker.hook) worker.hook = std::make_unique(); @@ -866,12 +903,8 @@ HookReply DerivationBuildingGoal::tryBuildHook() try { /* Send the request to the hook. */ - worker.hook->sink - << "try" - << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0) - << drv->platform - << worker.store.printStorePath(drvPath) - << drvOptions->getRequiredSystemFeatures(*drv); + worker.hook->sink << "try" << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0) << drv->platform + << worker.store.printStorePath(drvPath) << drvOptions->getRequiredSystemFeatures(*drv); worker.hook->sink.flush(); /* Read the first line of input, which should be a word indicating @@ -891,8 +924,7 @@ HookReply DerivationBuildingGoal::tryBuildHook() else if (s.substr(0, 2) == "# ") { reply = s.substr(2); break; - } - else { + } else { s += "\n"; writeToStderr(s); } @@ -906,17 +938,14 @@ HookReply DerivationBuildingGoal::tryBuildHook() worker.tryBuildHook = false; worker.hook = 0; return rpDecline; - } - else if (reply == "postpone") + } else if (reply == "postpone") return rpPostpone; else if (reply != "accept") throw Error("bad hook reply '%s'", reply); } catch (SysError & e) { if (e.errNo == EPIPE) { - printError( - "build hook died unexpectedly: %s", - chomp(drainFD(worker.hook->fromHook.readSide.get()))); + printError("build hook died unexpectedly: %s", chomp(drainFD(worker.hook->fromHook.readSide.get()))); worker.hook = 0; return rpDecline; } else @@ -932,7 +961,7 @@ HookReply DerivationBuildingGoal::tryBuildHook() throw; } - CommonProto::WriteConn conn { hook->sink }; + CommonProto::WriteConn conn{hook->sink}; /* Tell the hook all the inputs that have to be copied to the remote system. */ @@ -944,7 +973,8 @@ HookReply DerivationBuildingGoal::tryBuildHook() StringSet missingOutputs; for (auto & [outputName, status] : initialOutputs) { // XXX: Does this include known CA outputs? - if (buildMode != bmCheck && status.known && status.known->isValid()) continue; + if (buildMode != bmCheck && status.known && status.known->isValid()) + continue; missingOutputs.insert(outputName); } CommonProto::write(worker.store, conn, missingOutputs); @@ -965,12 +995,12 @@ HookReply DerivationBuildingGoal::tryBuildHook() #endif } - Path DerivationBuildingGoal::openLogFile() { logSize = 0; - if (!settings.keepLog) return ""; + if (!settings.keepLog) + return ""; auto baseName = std::string(baseNameOf(worker.store.printStorePath(drvPath))); @@ -983,15 +1013,18 @@ Path DerivationBuildingGoal::openLogFile() Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, baseName.substr(0, 2)); createDirs(dir); - Path logFileName = fmt("%s/%s%s", dir, baseName.substr(2), - settings.compressLog ? ".bz2" : ""); + Path logFileName = fmt("%s/%s%s", dir, baseName.substr(2), settings.compressLog ? ".bz2" : ""); - fdLogFile = toDescriptor(open(logFileName.c_str(), O_CREAT | O_WRONLY | O_TRUNC + fdLogFile = toDescriptor(open( + logFileName.c_str(), + O_CREAT | O_WRONLY | O_TRUNC #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - , 0666)); - if (!fdLogFile) throw SysError("creating log file '%1%'", logFileName); + , + 0666)); + if (!fdLogFile) + throw SysError("creating log file '%1%'", logFileName); logFileSink = std::make_shared(fdLogFile.get()); @@ -1003,26 +1036,23 @@ Path DerivationBuildingGoal::openLogFile() return logFileName; } - void DerivationBuildingGoal::closeLogFile() { auto logSink2 = std::dynamic_pointer_cast(logSink); - if (logSink2) logSink2->finish(); - if (logFileSink) logFileSink->flush(); + if (logSink2) + logSink2->finish(); + if (logFileSink) + logFileSink->flush(); logSink = logFileSink = 0; fdLogFile.close(); } - bool DerivationBuildingGoal::isReadDesc(Descriptor fd) { #ifdef _WIN32 // TODO enable build hook on Windows return false; #else - return - (hook && fd == hook->builderOut.readSide.get()) - || - (builder && fd == builder->builderOut.get()); + return (hook && fd == hook->builderOut.readSide.get()) || (builder && fd == builder->builderOut.get()); #endif } @@ -1030,17 +1060,16 @@ void DerivationBuildingGoal::handleChildOutput(Descriptor fd, std::string_view d { // local & `ssh://`-builds are dealt with here. auto isWrittenToLog = isReadDesc(fd); - if (isWrittenToLog) - { + if (isWrittenToLog) { logSize += data.size(); if (settings.maxLogSize && logSize > settings.maxLogSize) { killChild(); // We're not inside a coroutine, hence we can't use co_return here. // Thus we ignore the return value. [[maybe_unused]] Done _ = done( - BuildResult::LogLimitExceeded, {}, - Error("%s killed after writing more than %d bytes of log output", - getName(), settings.maxLogSize)); + BuildResult::LogLimitExceeded, + {}, + Error("%s killed after writing more than %d bytes of log output", getName(), settings.maxLogSize)); return; } @@ -1055,7 +1084,8 @@ void DerivationBuildingGoal::handleChildOutput(Descriptor fd, std::string_view d currentLogLine[currentLogLinePos++] = c; } - if (logSink) (*logSink)(data); + if (logSink) + (*logSink)(data); } #ifndef _WIN32 // TODO enable build hook on Windows @@ -1072,19 +1102,18 @@ void DerivationBuildingGoal::handleChildOutput(Descriptor fd, std::string_view d const auto fields = (*json)["fields"]; if (type == resBuildLogLine) { (*logSink)((fields.size() > 0 ? fields[0].get() : "") + "\n"); - } else if (type == resSetPhase && ! fields.is_null()) { + } else if (type == resSetPhase && !fields.is_null()) { const auto phase = fields[0]; - if (! phase.is_null()) { + if (!phase.is_null()) { // nixpkgs' stdenv produces lines in the log to signal // phase changes. // We want to get the same lines in case of remote builds. // The format is: // @nix { "action": "setPhase", "phase": "$curPhase" } - const auto logLine = nlohmann::json::object({ - {"action", "setPhase"}, - {"phase", phase} - }); - (*logSink)("@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + "\n"); + const auto logLine = nlohmann::json::object({{"action", "setPhase"}, {"phase", phase}}); + (*logSink)( + "@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + + "\n"); } } } @@ -1096,14 +1125,13 @@ void DerivationBuildingGoal::handleChildOutput(Descriptor fd, std::string_view d #endif } - void DerivationBuildingGoal::handleEOF(Descriptor fd) { - if (!currentLogLine.empty()) flushLine(); + if (!currentLogLine.empty()) + flushLine(); worker.wakeUp(shared_from_this()); } - void DerivationBuildingGoal::flushLine() { if (handleJSONLogMessage(currentLogLine, *act, builderActivities, "the derivation builder", false)) @@ -1111,7 +1139,8 @@ void DerivationBuildingGoal::flushLine() else { logTail.push_back(currentLogLine); - if (logTail.size() > settings.logLines) logTail.pop_front(); + if (logTail.size() > settings.logLines) + logTail.pop_front(); act->result(resBuildLogLine, currentLogLine); } @@ -1120,12 +1149,11 @@ void DerivationBuildingGoal::flushLine() currentLogLinePos = 0; } - std::map> DerivationBuildingGoal::queryPartialDerivationOutputMap() { assert(!drv->type().isImpure()); - for (auto * drvStore : { &worker.evalStore, &worker.store }) + for (auto * drvStore : {&worker.evalStore, &worker.store}) if (drvStore->isValidPath(drvPath)) return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); @@ -1139,7 +1167,8 @@ std::map> DerivationBuildingGoal::queryPar std::pair DerivationBuildingGoal::checkPathValidity() { - if (drv->type().isImpure()) return { false, {} }; + if (drv->type().isImpure()) + return {false, {}}; bool checkHash = buildMode == bmRepair; SingleDrvOutputs validOutputs; @@ -1155,11 +1184,9 @@ std::pair DerivationBuildingGoal::checkPathValidity() auto outputPath = *i.second; info.known = { .path = outputPath, - .status = !worker.store.isValidPath(outputPath) - ? PathStatus::Absent - : !checkHash || worker.pathContentsGood(outputPath) - ? PathStatus::Valid - : PathStatus::Corrupt, + .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent + : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid + : PathStatus::Corrupt, }; } auto drvOutput = DrvOutput{info.outputHash, i.first}; @@ -1175,30 +1202,29 @@ std::pair DerivationBuildingGoal::checkPathValidity() // its realisation stored (probably because it has been built // without the `ca-derivations` experimental flag). worker.store.registerDrvOutput( - Realisation { + Realisation{ drvOutput, info.known->path, - } - ); + }); } } if (info.known && info.known->isValid()) - validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path }); + validOutputs.emplace(i.first, Realisation{drvOutput, info.known->path}); } bool allValid = true; for (auto & [_, status] : initialOutputs) { - if (!status.wanted) continue; + if (!status.wanted) + continue; if (!status.known || !status.known->isValid()) { allValid = false; break; } } - return { allValid, validOutputs }; + return {allValid, validOutputs}; } - SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() { auto [allValid, validOutputs] = checkPathValidity(); @@ -1207,11 +1233,8 @@ SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() return validOutputs; } - -Goal::Done DerivationBuildingGoal::done( - BuildResult::Status status, - SingleDrvOutputs builtOutputs, - std::optional ex) +Goal::Done +DerivationBuildingGoal::done(BuildResult::Status status, SingleDrvOutputs builtOutputs, std::optional ex) { outputLocks.unlock(); buildResult.status = status; @@ -1245,4 +1268,4 @@ Goal::Done DerivationBuildingGoal::done( return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } -} +} // namespace nix diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 128d360c9..55111e378 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -11,7 +11,7 @@ #include "nix/util/compression.hh" #include "nix/store/common-protocol.hh" #include "nix/store/common-protocol-impl.hh" // Don't remove is actually needed -#include "nix/store/local-store.hh" // TODO remove, along with remaining downcasts +#include "nix/store/local-store.hh" // TODO remove, along with remaining downcasts #include #include @@ -24,8 +24,12 @@ namespace nix { -DerivationGoal::DerivationGoal(const StorePath & drvPath, const Derivation & drv, - const OutputName & wantedOutput, Worker & worker, BuildMode buildMode) +DerivationGoal::DerivationGoal( + const StorePath & drvPath, + const Derivation & drv, + const OutputName & wantedOutput, + Worker & worker, + BuildMode buildMode) : Goal(worker, haveDerivation()) , drvPath(drvPath) , wantedOutput(wantedOutput) @@ -33,17 +37,15 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const Derivation & drv { this->drv = std::make_unique(drv); - name = fmt( - "building of '%s' from in-memory derivation", - DerivedPath::Built { makeConstantStorePathRef(drvPath), drv.outputNames() }.to_string(worker.store)); + name = + fmt("building of '%s' from in-memory derivation", + DerivedPath::Built{makeConstantStorePathRef(drvPath), drv.outputNames()}.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); worker.updateProgress(); - } - std::string DerivationGoal::key() { /* Ensure that derivations get built in order of their name, @@ -56,7 +58,6 @@ std::string DerivationGoal::key() }.to_string(worker.store); } - Goal::Co DerivationGoal::haveDerivation() { trace("have derivation"); @@ -76,8 +77,7 @@ Goal::Co DerivationGoal::haveDerivation() /* At least one of the output paths could not be produced using a substitute. So we have to build instead. */ - auto gaveUpOnSubstitution = [&]() -> Goal::Co - { + auto gaveUpOnSubstitution = [&]() -> Goal::Co { auto g = worker.makeDerivationBuildingGoal(drvPath, *drv, buildMode); /* We will finish with it ourselves, as if we were the derivational goal. */ @@ -103,7 +103,7 @@ Goal::Co DerivationGoal::haveDerivation() buildResult.builtOutputs = assertPathValidity(); } - for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end(); ) { + for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { if (it->first != wantedOutput) { it = buildResult.builtOutputs.erase(it); } else { @@ -124,20 +124,20 @@ Goal::Co DerivationGoal::haveDerivation() { bool impure = drv->type().isImpure(); - if (impure) experimentalFeatureSettings.require(Xp::ImpureDerivations); + if (impure) + experimentalFeatureSettings.require(Xp::ImpureDerivations); auto outputHashes = staticOutputHashes(worker.evalStore, *drv); for (auto & [outputName, outputHash] : outputHashes) { InitialOutput v{ .wanted = true, // Will be refined later - .outputHash = outputHash - }; + .outputHash = outputHash}; /* TODO we might want to also allow randomizing the paths for regular CA derivations, e.g. for sake of checking determinism. */ if (impure) { - v.known = InitialOutputStatus { + v.known = InitialOutputStatus{ .path = StorePath::random(outputPathName(drv->name, outputName)), .status = PathStatus::Absent, }; @@ -173,22 +173,17 @@ Goal::Co DerivationGoal::haveDerivation() them. */ if (settings.useSubstitutes && drvOptions.substitutesAllowed()) for (auto & [outputName, status] : initialOutputs) { - if (!status.wanted) continue; + if (!status.wanted) + continue; if (!status.known) - waitees.insert( - upcast_goal( - worker.makeDrvOutputSubstitutionGoal( - DrvOutput{status.outputHash, outputName}, - buildMode == bmRepair ? Repair : NoRepair - ) - ) - ); + waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( + DrvOutput{status.outputHash, outputName}, buildMode == bmRepair ? Repair : NoRepair))); else { auto * cap = getDerivationCA(*drv); waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( status.known->path, buildMode == bmRepair ? Repair : NoRepair, - cap ? std::optional { *cap } : std::nullopt))); + cap ? std::optional{*cap} : std::nullopt))); } } @@ -199,8 +194,11 @@ Goal::Co DerivationGoal::haveDerivation() assert(!drv->type().isImpure()); if (nrFailed > 0 && nrFailed > nrNoSubstituters && !settings.tryFallback) { - co_return done(BuildResult::TransientFailure, {}, - Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", + co_return done( + BuildResult::TransientFailure, + {}, + Error( + "some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", worker.store.printStorePath(drvPath))); } @@ -215,26 +213,25 @@ Goal::Co DerivationGoal::haveDerivation() co_return repairClosure(); } if (buildMode == bmCheck && !allValid) - throw Error("some outputs of '%s' are not valid, so checking is not possible", - worker.store.printStorePath(drvPath)); + throw Error( + "some outputs of '%s' are not valid, so checking is not possible", worker.store.printStorePath(drvPath)); /* Nothing to wait for; tail call */ co_return gaveUpOnSubstitution(); } - /** * Used for `inputGoals` local variable below */ struct value_comparison { - template - bool operator()(const ref & lhs, const ref & rhs) const { + template + bool operator()(const ref & lhs, const ref & rhs) const + { return *lhs < *rhs; } }; - Goal::Co DerivationGoal::repairClosure() { assert(!drv->type().isImpure()); @@ -278,18 +275,20 @@ Goal::Co DerivationGoal::repairClosure() /* Check each path (slow!). */ for (auto & i : outputClosure) { - if (worker.pathContentsGood(i)) continue; + if (worker.pathContentsGood(i)) + continue; printError( "found corrupted or missing path '%s' in the output closure of '%s'", - worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); + worker.store.printStorePath(i), + worker.store.printStorePath(drvPath)); auto drvPath2 = outputsToDrv.find(i); if (drvPath2 == outputsToDrv.end()) waitees.insert(upcast_goal(worker.makePathSubstitutionGoal(i, Repair))); else waitees.insert(worker.makeGoal( - DerivedPath::Built { + DerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath2->second), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }, bmRepair)); } @@ -299,18 +298,18 @@ Goal::Co DerivationGoal::repairClosure() if (!waitees.empty()) { trace("closure repaired"); if (nrFailed > 0) - throw Error("some paths in the output closure of derivation '%s' could not be repaired", + throw Error( + "some paths in the output closure of derivation '%s' could not be repaired", worker.store.printStorePath(drvPath)); } co_return done(BuildResult::AlreadyValid, assertPathValidity()); } - std::map> DerivationGoal::queryPartialDerivationOutputMap() { assert(!drv->type().isImpure()); - for (auto * drvStore : { &worker.evalStore, &worker.store }) + for (auto * drvStore : {&worker.evalStore, &worker.store}) if (drvStore->isValidPath(drvPath)) return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); @@ -326,7 +325,7 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() { assert(!drv->type().isImpure()); - for (auto * drvStore : { &worker.evalStore, &worker.store }) + for (auto * drvStore : {&worker.evalStore, &worker.store}) if (drvStore->isValidPath(drvPath)) return worker.store.queryDerivationOutputMap(drvPath, drvStore); @@ -337,10 +336,10 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() return res; } - std::pair DerivationGoal::checkPathValidity() { - if (drv->type().isImpure()) return { false, {} }; + if (drv->type().isImpure()) + return {false, {}}; bool checkHash = buildMode == bmRepair; StringSet wantedOutputsLeft{wantedOutput}; @@ -359,11 +358,9 @@ std::pair DerivationGoal::checkPathValidity() auto outputPath = *i.second; info.known = { .path = outputPath, - .status = !worker.store.isValidPath(outputPath) - ? PathStatus::Absent - : !checkHash || worker.pathContentsGood(outputPath) - ? PathStatus::Valid - : PathStatus::Corrupt, + .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent + : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid + : PathStatus::Corrupt, }; } auto drvOutput = DrvOutput{info.outputHash, i.first}; @@ -379,38 +376,38 @@ std::pair DerivationGoal::checkPathValidity() // its realisation stored (probably because it has been built // without the `ca-derivations` experimental flag). worker.store.registerDrvOutput( - Realisation { + Realisation{ drvOutput, info.known->path, - } - ); + }); } } if (info.known && info.known->isValid()) - validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path }); + validOutputs.emplace(i.first, Realisation{drvOutput, info.known->path}); } // If we requested all the outputs, we are always fine. // If we requested specific elements, the loop above removes all the valid // ones, so any that are left must be invalid. if (!wantedOutputsLeft.empty()) - throw Error("derivation '%s' does not have wanted outputs %s", + throw Error( + "derivation '%s' does not have wanted outputs %s", worker.store.printStorePath(drvPath), concatStringsSep(", ", quoteStrings(wantedOutputsLeft))); bool allValid = true; for (auto & [_, status] : initialOutputs) { - if (!status.wanted) continue; + if (!status.wanted) + continue; if (!status.known || !status.known->isValid()) { allValid = false; break; } } - return { allValid, validOutputs }; + return {allValid, validOutputs}; } - SingleDrvOutputs DerivationGoal::assertPathValidity() { auto [allValid, validOutputs] = checkPathValidity(); @@ -419,11 +416,7 @@ SingleDrvOutputs DerivationGoal::assertPathValidity() return validOutputs; } - -Goal::Done DerivationGoal::done( - BuildResult::Status status, - SingleDrvOutputs builtOutputs, - std::optional ex) +Goal::Done DerivationGoal::done(BuildResult::Status status, SingleDrvOutputs builtOutputs, std::optional ex) { buildResult.status = status; if (ex) @@ -458,4 +451,4 @@ Goal::Done DerivationGoal::done( return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } -} +} // namespace nix diff --git a/src/libstore/build/derivation-trampoline-goal.cc b/src/libstore/build/derivation-trampoline-goal.cc index e8ca47dfe..5038a4ea0 100644 --- a/src/libstore/build/derivation-trampoline-goal.cc +++ b/src/libstore/build/derivation-trampoline-goal.cc @@ -172,4 +172,4 @@ Goal::Co DerivationTrampolineGoal::haveDerivation(StorePath drvPath, Derivation co_return amDone(g->exitCode, g->ex); } -} +} // namespace nix diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index e87a796f6..0ddd1c438 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -8,10 +8,7 @@ namespace nix { DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal( - const DrvOutput & id, - Worker & worker, - RepairFlag repair, - std::optional ca) + const DrvOutput & id, Worker & worker, RepairFlag repair, std::optional ca) : Goal(worker, init()) , id(id) { @@ -19,7 +16,6 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal( trace("created"); } - Goal::Co DrvOutputSubstitutionGoal::init() { trace("init"); @@ -40,32 +36,35 @@ Goal::Co DrvOutputSubstitutionGoal::init() some other error occurs), so it must not touch `this`. So put the shared state in a separate refcounted object. */ auto outPipe = std::make_shared(); - #ifndef _WIN32 +#ifndef _WIN32 outPipe->create(); - #else +#else outPipe->createAsyncPipe(worker.ioport.get()); - #endif +#endif auto promise = std::make_shared>>(); sub->queryRealisation( - id, - { [outPipe(outPipe), promise(promise)](std::future> res) { + id, {[outPipe(outPipe), promise(promise)](std::future> res) { try { Finally updateStats([&]() { outPipe->writeSide.close(); }); promise->set_value(res.get()); } catch (...) { promise->set_exception(std::current_exception()); } - } }); + }}); - worker.childStarted(shared_from_this(), { - #ifndef _WIN32 - outPipe->readSide.get() - #else - &*outPipe - #endif - }, true, false); + worker.childStarted( + shared_from_this(), + { +#ifndef _WIN32 + outPipe->readSide.get() +#else + &*outPipe +#endif + }, + true, + false); co_await Suspend{}; @@ -84,7 +83,8 @@ Goal::Co DrvOutputSubstitutionGoal::init() substituterFailed = true; } - if (!outputInfo) continue; + if (!outputInfo) + continue; bool failed = false; @@ -101,8 +101,7 @@ Goal::Co DrvOutputSubstitutionGoal::init() sub->getUri(), depId.to_string(), worker.store.printStorePath(localOutputInfo->outPath), - worker.store.printStorePath(depPath) - ); + worker.store.printStorePath(depPath)); failed = true; break; } @@ -110,7 +109,8 @@ Goal::Co DrvOutputSubstitutionGoal::init() } } - if (failed) continue; + if (failed) + continue; co_return realisationFetched(std::move(waitees), outputInfo, sub); } @@ -130,7 +130,9 @@ Goal::Co DrvOutputSubstitutionGoal::init() co_return amDone(substituterFailed ? ecFailed : ecNoSubstituters); } -Goal::Co DrvOutputSubstitutionGoal::realisationFetched(Goals waitees, std::shared_ptr outputInfo, nix::ref sub) { +Goal::Co DrvOutputSubstitutionGoal::realisationFetched( + Goals waitees, std::shared_ptr outputInfo, nix::ref sub) +{ waitees.insert(worker.makePathSubstitutionGoal(outputInfo->outPath)); co_await await(std::move(waitees)); @@ -160,5 +162,4 @@ void DrvOutputSubstitutionGoal::handleEOF(Descriptor fd) worker.wakeUp(shared_from_this()); } - -} +} // namespace nix diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 6c842554c..1dd540265 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -38,15 +38,14 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod ex->withExitStatus(worker.failingExitStatus()); throw std::move(*ex); } else if (!failed.empty()) { - if (ex) logError(ex->info()); + if (ex) + logError(ex->info()); throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed))); } } std::vector Store::buildPathsWithResults( - const std::vector & reqs, - BuildMode buildMode, - std::shared_ptr evalStore) + const std::vector & reqs, BuildMode buildMode, std::shared_ptr evalStore) { Worker worker(*this, evalStore ? *evalStore : *this); @@ -65,36 +64,36 @@ std::vector Store::buildPathsWithResults( results.reserve(state.size()); for (auto & [req, goalPtr] : state) - results.emplace_back(KeyedBuildResult { - goalPtr->buildResult, - /* .path = */ req, - }); + results.emplace_back( + KeyedBuildResult{ + goalPtr->buildResult, + /* .path = */ req, + }); return results; } -BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) +BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { Worker worker(*this, *this); - auto goal = worker.makeDerivationTrampolineGoal(drvPath, OutputsSpec::All {}, drv, buildMode); + auto goal = worker.makeDerivationTrampolineGoal(drvPath, OutputsSpec::All{}, drv, buildMode); try { worker.run(Goals{goal}); return goal->buildResult; } catch (Error & e) { - return BuildResult { + return BuildResult{ .status = BuildResult::MiscFailure, .errorMsg = e.msg(), }; }; } - void Store::ensurePath(const StorePath & path) { /* If the path is already valid, we're done. */ - if (isValidPath(path)) return; + if (isValidPath(path)) + return; Worker worker(*this, *this); GoalPtr goal = worker.makePathSubstitutionGoal(path); @@ -107,11 +106,11 @@ void Store::ensurePath(const StorePath & path) goal->ex->withExitStatus(worker.failingExitStatus()); throw std::move(*goal->ex); } else - throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); + throw Error( + worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); } } - void Store::repairPath(const StorePath & path) { Worker worker(*this, *this); @@ -126,15 +125,17 @@ void Store::repairPath(const StorePath & path) auto info = queryPathInfo(path); if (info->deriver && isValidPath(*info->deriver)) { goals.clear(); - goals.insert(worker.makeGoal(DerivedPath::Built { - .drvPath = makeConstantStorePathRef(*info->deriver), - // FIXME: Should just build the specific output we need. - .outputs = OutputsSpec::All { }, - }, bmRepair)); + goals.insert(worker.makeGoal( + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(*info->deriver), + // FIXME: Should just build the specific output we need. + .outputs = OutputsSpec::All{}, + }, + bmRepair)); worker.run(goals); } else throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path)); } } -} +} // namespace nix diff --git a/src/libstore/build/goal.cc b/src/libstore/build/goal.cc index 88b0c28c0..2e9ca5bf7 100644 --- a/src/libstore/build/goal.cc +++ b/src/libstore/build/goal.cc @@ -8,28 +8,35 @@ using promise_type = nix::Goal::promise_type; using handle_type = nix::Goal::handle_type; using Suspend = nix::Goal::Suspend; -Co::Co(Co&& rhs) { +Co::Co(Co && rhs) +{ this->handle = rhs.handle; rhs.handle = nullptr; } -void Co::operator=(Co&& rhs) { + +void Co::operator=(Co && rhs) +{ this->handle = rhs.handle; rhs.handle = nullptr; } -Co::~Co() { + +Co::~Co() +{ if (handle) { handle.promise().alive = false; handle.destroy(); } } -Co promise_type::get_return_object() { +Co promise_type::get_return_object() +{ auto handle = handle_type::from_promise(*this); return Co{handle}; }; -std::coroutine_handle<> promise_type::final_awaiter::await_suspend(handle_type h) noexcept { - auto& p = h.promise(); +std::coroutine_handle<> promise_type::final_awaiter::await_suspend(handle_type h) noexcept +{ + auto & p = h.promise(); auto goal = p.goal; assert(goal); goal->trace("in final_awaiter"); @@ -39,9 +46,9 @@ std::coroutine_handle<> promise_type::final_awaiter::await_suspend(handle_type h // We still have a continuation, i.e. work to do. // We assert that the goal is still busy. assert(goal->exitCode == ecBusy); - assert(goal->top_co); // Goal must have an active coroutine. + assert(goal->top_co); // Goal must have an active coroutine. assert(goal->top_co->handle == h); // The active coroutine must be us. - assert(p.alive); // We must not have been destructed. + assert(p.alive); // We must not have been destructed. // we move continuation to the top, // note: previous top_co is actually h, so by moving into it, @@ -68,7 +75,8 @@ std::coroutine_handle<> promise_type::final_awaiter::await_suspend(handle_type h } } -void promise_type::return_value(Co&& next) { +void promise_type::return_value(Co && next) +{ goal->trace("return_value(Co&&)"); // Save old continuation. auto old_continuation = std::move(continuation); @@ -82,20 +90,22 @@ void promise_type::return_value(Co&& next) { continuation->handle.promise().continuation = std::move(old_continuation); } -std::coroutine_handle<> nix::Goal::Co::await_suspend(handle_type caller) { +std::coroutine_handle<> nix::Goal::Co::await_suspend(handle_type caller) +{ assert(handle); // we must be a valid coroutine - auto& p = handle.promise(); + auto & p = handle.promise(); assert(!p.continuation); // we must have no continuation - assert(!p.goal); // we must not have a goal yet + assert(!p.goal); // we must not have a goal yet auto goal = caller.promise().goal; assert(goal); p.goal = goal; p.continuation = std::move(goal->top_co); // we set our continuation to be top_co (i.e. caller) - goal->top_co = std::move(*this); // we set top_co to ourselves, don't use this anymore after this! - return p.goal->top_co->handle; // we execute ourselves + goal->top_co = std::move(*this); // we set top_co to ourselves, don't use this anymore after this! + return p.goal->top_co->handle; // we execute ourselves } -bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const { +bool CompareGoalPtrs::operator()(const GoalPtr & a, const GoalPtr & b) const +{ std::string s1 = a->key(); std::string s2 = b->key(); return s1 < s2; @@ -146,9 +156,11 @@ Goal::Done Goal::amDone(ExitCode result, std::optional ex) goal->trace(fmt("waitee '%s' done; %d left", name, goal->waitees.size())); - if (result == ecFailed || result == ecNoSubstituters) ++goal->nrFailed; + if (result == ecFailed || result == ecNoSubstituters) + ++goal->nrFailed; - if (result == ecNoSubstituters) ++goal->nrNoSubstituters; + if (result == ecNoSubstituters) + ++goal->nrNoSubstituters; if (goal->waitees.empty()) { worker.wakeUp(goal); @@ -177,7 +189,6 @@ Goal::Done Goal::amDone(ExitCode result, std::optional ex) return Done{}; } - void Goal::trace(std::string_view s) { debug("%1%: %2%", name, s); @@ -194,22 +205,25 @@ void Goal::work() assert(top_co || exitCode != ecBusy); } -Goal::Co Goal::yield() { +Goal::Co Goal::yield() +{ worker.wakeUp(shared_from_this()); co_await Suspend{}; co_return Return{}; } -Goal::Co Goal::waitForAWhile() { +Goal::Co Goal::waitForAWhile() +{ worker.waitForAWhile(shared_from_this()); co_await Suspend{}; co_return Return{}; } -Goal::Co Goal::waitForBuildSlot() { +Goal::Co Goal::waitForBuildSlot() +{ worker.waitForBuildSlot(shared_from_this()); co_await Suspend{}; co_return Return{}; } -} +} // namespace nix diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 9ffc8219d..3c9ad6374 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -8,7 +8,8 @@ namespace nix { -PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional ca) +PathSubstitutionGoal::PathSubstitutionGoal( + const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional ca) : Goal(worker, init()) , storePath(storePath) , repair(repair) @@ -19,17 +20,12 @@ PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & maintainExpectedSubstitutions = std::make_unique>(worker.expectedSubstitutions); } - PathSubstitutionGoal::~PathSubstitutionGoal() { cleanup(); } - -Goal::Done PathSubstitutionGoal::done( - ExitCode result, - BuildResult::Status status, - std::optional errorMsg) +Goal::Done PathSubstitutionGoal::done(ExitCode result, BuildResult::Status status, std::optional errorMsg) { buildResult.status = status; if (errorMsg) { @@ -39,7 +35,6 @@ Goal::Done PathSubstitutionGoal::done( return amDone(result); } - Goal::Co PathSubstitutionGoal::init() { trace("init"); @@ -52,7 +47,8 @@ Goal::Co PathSubstitutionGoal::init() } if (settings.readOnlyMode) - throw Error("cannot substitute path '%s' - no write access to the Nix store", worker.store.printStorePath(storePath)); + throw Error( + "cannot substitute path '%s' - no write access to the Nix store", worker.store.printStorePath(storePath)); auto subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list>(); @@ -72,8 +68,7 @@ Goal::Co PathSubstitutionGoal::init() if (ca) { subPath = sub->makeFixedOutputPathFromCA( - std::string { storePath.name() }, - ContentAddressWithReferences::withoutRefs(*ca)); + std::string{storePath.name()}, ContentAddressWithReferences::withoutRefs(*ca)); if (sub->storeDir == worker.store.storeDir) assert(subPath == storePath); } else if (sub->storeDir != worker.store.storeDir) { @@ -86,13 +81,16 @@ Goal::Co PathSubstitutionGoal::init() } catch (InvalidPath &) { continue; } catch (SubstituterDisabled & e) { - if (settings.tryFallback) continue; - else throw e; + if (settings.tryFallback) + continue; + else + throw e; } catch (Error & e) { if (settings.tryFallback) { logError(e.info()); continue; - } else throw e; + } else + throw e; } if (info->path != storePath) { @@ -101,8 +99,11 @@ Goal::Co PathSubstitutionGoal::init() info2->path = storePath; info = info2; } else { - printError("asked '%s' for '%s' but got '%s'", - sub->getUri(), worker.store.printStorePath(storePath), sub->printStorePath(info->path)); + printError( + "asked '%s' for '%s' but got '%s'", + sub->getUri(), + worker.store.printStorePath(storePath), + sub->printStorePath(info->path)); continue; } } @@ -114,18 +115,19 @@ Goal::Co PathSubstitutionGoal::init() maintainExpectedDownload = narInfo && narInfo->fileSize - ? std::make_unique>(worker.expectedDownloadSize, narInfo->fileSize) - : nullptr; + ? std::make_unique>(worker.expectedDownloadSize, narInfo->fileSize) + : nullptr; worker.updateProgress(); /* Bail out early if this substituter lacks a valid signature. LocalStore::addToStore() also checks for this, but only after we've downloaded the path. */ - if (!sub->config.isTrusted && worker.store.pathInfoIsUntrusted(*info)) - { - warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'", - worker.store.printStorePath(storePath), sub->getUri()); + if (!sub->config.isTrusted && worker.store.pathInfoIsUntrusted(*info)) { + warn( + "ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'", + worker.store.printStorePath(storePath), + sub->getUri()); continue; } @@ -159,11 +161,12 @@ Goal::Co PathSubstitutionGoal::init() co_return done( substituterFailed ? ecFailed : ecNoSubstituters, BuildResult::NoSubstituters, - fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath))); + fmt("path '%s' is required, but there is no substituter that can build it", + worker.store.printStorePath(storePath))); } - -Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed) +Goal::Co PathSubstitutionGoal::tryToRun( + StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed) { trace("all references realised"); @@ -175,11 +178,13 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, } for (auto & i : info->references) - /* ignore self-references */ + /* ignore self-references */ if (i != storePath) { if (!worker.store.isValidPath(i)) { - throw Error("reference '%s' of path '%s' is not a valid path", - worker.store.printStorePath(i), worker.store.printStorePath(storePath)); + throw Error( + "reference '%s' of path '%s' is not a valid path", + worker.store.printStorePath(i), + worker.store.printStorePath(storePath)); } } @@ -215,8 +220,7 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()}); PushActivity pact(act.id); - copyStorePath(*sub, worker.store, - subPath, repair, sub->config.isTrusted ? NoCheckSigs : CheckSigs); + copyStorePath(*sub, worker.store, subPath, repair, sub->config.isTrusted ? NoCheckSigs : CheckSigs); promise.set_value(); } catch (...) { @@ -224,13 +228,17 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, } }); - worker.childStarted(shared_from_this(), { + worker.childStarted( + shared_from_this(), + { #ifndef _WIN32 - outPipe.readSide.get() + outPipe.readSide.get() #else - &outPipe + &outPipe #endif - }, true, false); + }, + true, + false); co_await Suspend{}; @@ -282,13 +290,11 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, co_return done(ecSuccess, BuildResult::Substituted); } - void PathSubstitutionGoal::handleEOF(Descriptor fd) { worker.wakeUp(shared_from_this()); } - void PathSubstitutionGoal::cleanup() { try { @@ -304,5 +310,4 @@ void PathSubstitutionGoal::cleanup() } } - -} +} // namespace nix diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 2b901f818..9cb36fa16 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -29,7 +29,6 @@ Worker::Worker(Store & store, Store & evalStore) checkMismatch = false; } - Worker::~Worker() { /* Explicitly get rid of all strong pointers now. After this all @@ -44,9 +43,10 @@ Worker::~Worker() } template -std::shared_ptr Worker::initGoalIfNeeded(std::weak_ptr & goal_weak, Args && ...args) +std::shared_ptr Worker::initGoalIfNeeded(std::weak_ptr & goal_weak, Args &&... args) { - if (auto goal = goal_weak.lock()) return goal; + if (auto goal = goal_weak.lock()) + return goal; auto goal = std::make_shared(args...); goal_weak = goal; @@ -55,64 +55,60 @@ std::shared_ptr Worker::initGoalIfNeeded(std::weak_ptr & goal_weak, Args & } std::shared_ptr Worker::makeDerivationTrampolineGoal( - ref drvReq, - const OutputsSpec & wantedOutputs, - BuildMode buildMode) + ref drvReq, const OutputsSpec & wantedOutputs, BuildMode buildMode) { return initGoalIfNeeded( - derivationTrampolineGoals.ensureSlot(*drvReq).value[wantedOutputs], - drvReq, wantedOutputs, *this, buildMode); + derivationTrampolineGoals.ensureSlot(*drvReq).value[wantedOutputs], drvReq, wantedOutputs, *this, buildMode); } - std::shared_ptr Worker::makeDerivationTrampolineGoal( - const StorePath & drvPath, - const OutputsSpec & wantedOutputs, - const Derivation & drv, - BuildMode buildMode) + const StorePath & drvPath, const OutputsSpec & wantedOutputs, const Derivation & drv, BuildMode buildMode) { return initGoalIfNeeded( derivationTrampolineGoals.ensureSlot(DerivedPath::Opaque{drvPath}).value[wantedOutputs], - drvPath, wantedOutputs, drv, *this, buildMode); + drvPath, + wantedOutputs, + drv, + *this, + buildMode); } - -std::shared_ptr Worker::makeDerivationGoal(const StorePath & drvPath, - const Derivation & drv, const OutputName & wantedOutput, BuildMode buildMode) +std::shared_ptr Worker::makeDerivationGoal( + const StorePath & drvPath, const Derivation & drv, const OutputName & wantedOutput, BuildMode buildMode) { return initGoalIfNeeded(derivationGoals[drvPath][wantedOutput], drvPath, drv, wantedOutput, *this, buildMode); } - -std::shared_ptr Worker::makeDerivationBuildingGoal(const StorePath & drvPath, - const Derivation & drv, BuildMode buildMode) +std::shared_ptr +Worker::makeDerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode) { return initGoalIfNeeded(derivationBuildingGoals[drvPath], drvPath, drv, *this, buildMode); } - -std::shared_ptr Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional ca) +std::shared_ptr +Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional ca) { return initGoalIfNeeded(substitutionGoals[path], path, *this, repair, ca); } - -std::shared_ptr Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional ca) +std::shared_ptr +Worker::makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair, std::optional ca) { return initGoalIfNeeded(drvOutputSubstitutionGoals[id], id, *this, repair, ca); } - GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode) { - return std::visit(overloaded { - [&](const DerivedPath::Built & bfd) -> GoalPtr { - return makeDerivationTrampolineGoal(bfd.drvPath, bfd.outputs, buildMode); + return std::visit( + overloaded{ + [&](const DerivedPath::Built & bfd) -> GoalPtr { + return makeDerivationTrampolineGoal(bfd.drvPath, bfd.outputs, buildMode); + }, + [&](const DerivedPath::Opaque & bo) -> GoalPtr { + return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair); + }, }, - [&](const DerivedPath::Opaque & bo) -> GoalPtr { - return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair); - }, - }, req.raw()); + req.raw()); } /** @@ -149,12 +145,12 @@ static bool removeGoal(std::shared_ptr goal, std::map & goalMap) } template -static bool removeGoal(std::shared_ptr goal, typename DerivedPathMap>>::ChildNode & node) +static bool +removeGoal(std::shared_ptr goal, typename DerivedPathMap>>::ChildNode & node) { return removeGoal(goal, node.value) || removeGoal(goal, node.childMap); } - void Worker::removeGoal(GoalPtr goal) { if (auto drvGoal = std::dynamic_pointer_cast(goal)) @@ -181,34 +177,31 @@ void Worker::removeGoal(GoalPtr goal) /* Wake up goals waiting for any goal to finish. */ for (auto & i : waitingForAnyGoal) { GoalPtr goal = i.lock(); - if (goal) wakeUp(goal); + if (goal) + wakeUp(goal); } waitingForAnyGoal.clear(); } - void Worker::wakeUp(GoalPtr goal) { goal->trace("woken up"); addToWeakGoals(awake, goal); } - size_t Worker::getNrLocalBuilds() { return nrLocalBuilds; } - size_t Worker::getNrSubstitutions() { return nrSubstitutions; } - -void Worker::childStarted(GoalPtr goal, const std::set & channels, - bool inBuildSlot, bool respectTimeouts) +void Worker::childStarted( + GoalPtr goal, const std::set & channels, bool inBuildSlot, bool respectTimeouts) { Child child; child.goal = goal; @@ -235,12 +228,11 @@ void Worker::childStarted(GoalPtr goal, const std::setinBuildSlot) { switch (goal->jobCategory()) { @@ -267,40 +259,37 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) /* Wake up goals waiting for a build slot. */ for (auto & j : wantingToBuild) { GoalPtr goal = j.lock(); - if (goal) wakeUp(goal); + if (goal) + wakeUp(goal); } wantingToBuild.clear(); } } - void Worker::waitForBuildSlot(GoalPtr goal) { goal->trace("wait for build slot"); bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution; - if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) || - (isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs)) + if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) + || (isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs)) wakeUp(goal); /* we can do it right away */ else addToWeakGoals(wantingToBuild, goal); } - void Worker::waitForAnyGoal(GoalPtr goal) { debug("wait for any goal"); addToWeakGoals(waitingForAnyGoal, goal); } - void Worker::waitForAWhile(GoalPtr goal) { debug("wait for a while"); addToWeakGoals(waitingForAWhile, goal); } - void Worker::run(const Goals & _topGoals) { std::vector topPaths; @@ -308,10 +297,11 @@ void Worker::run(const Goals & _topGoals) for (auto & i : _topGoals) { topGoals.insert(i); if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back(DerivedPath::Built { - .drvPath = goal->drvReq, - .outputs = goal->wantedOutputs, - }); + topPaths.push_back( + DerivedPath::Built{ + .drvPath = goal->drvReq, + .outputs = goal->wantedOutputs, + }); } else if (auto goal = dynamic_cast(i.get())) { topPaths.push_back(DerivedPath::Opaque{goal->storePath}); } @@ -336,33 +326,37 @@ void Worker::run(const Goals & _topGoals) Goals awake2; for (auto & i : awake) { GoalPtr goal = i.lock(); - if (goal) awake2.insert(goal); + if (goal) + awake2.insert(goal); } awake.clear(); for (auto & goal : awake2) { checkInterrupt(); goal->work(); - if (topGoals.empty()) break; // stuff may have been cancelled + if (topGoals.empty()) + break; // stuff may have been cancelled } } - if (topGoals.empty()) break; + if (topGoals.empty()) + break; /* Wait for input. */ if (!children.empty() || !waitingForAWhile.empty()) waitForInput(); else if (awake.empty() && 0U == settings.maxBuildJobs) { if (getMachines().empty()) - throw Error( - "Unable to start any build; either increase '--max-jobs' or enable remote builds.\n" - "\n" - "For more information run 'man nix.conf' and search for '/machines'."); + throw Error( + "Unable to start any build; either increase '--max-jobs' or enable remote builds.\n" + "\n" + "For more information run 'man nix.conf' and search for '/machines'."); else - throw Error( - "Unable to start any build; remote machines may not have all required system features.\n" - "\n" - "For more information run 'man nix.conf' and search for '/machines'."); - } else assert(!awake.empty()); + throw Error( + "Unable to start any build; remote machines may not have all required system features.\n" + "\n" + "For more information run 'man nix.conf' and search for '/machines'."); + } else + assert(!awake.empty()); } /* If --keep-going is not set, it's possible that the main goal @@ -395,7 +389,8 @@ void Worker::waitForInput() // Periodicallty wake up to see if we need to run the garbage collector. nearest = before + std::chrono::seconds(10); for (auto & i : children) { - if (!i.respectTimeouts) continue; + if (!i.respectTimeouts) + continue; if (0 != settings.maxSilentTime) nearest = std::min(nearest, i.lastOutput + std::chrono::seconds(settings.maxSilentTime)); if (0 != settings.buildTimeout) @@ -410,11 +405,15 @@ void Worker::waitForInput() up after a few seconds at most. */ if (!waitingForAWhile.empty()) { useTimeout = true; - if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) lastWokenUp = before; - timeout = std::max(1L, + if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) + lastWokenUp = before; + timeout = std::max( + 1L, (long) std::chrono::duration_cast( - lastWokenUp + std::chrono::seconds(settings.pollInterval) - before).count()); - } else lastWokenUp = steady_time_point::min(); + lastWokenUp + std::chrono::seconds(settings.pollInterval) - before) + .count()); + } else + lastWokenUp = steady_time_point::min(); if (useTimeout) vomit("sleeping %d seconds", timeout); @@ -427,7 +426,7 @@ void Worker::waitForInput() includes EOF. */ for (auto & i : children) { for (auto & j : i.channels) { - state.pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN }); + state.pollStatus.push_back((struct pollfd) {.fd = j, .events = POLLIN}); state.fdToPollStatus[j] = state.pollStatus.size() - 1; } } @@ -437,7 +436,7 @@ void Worker::waitForInput() #ifdef _WIN32 ioport.get(), #endif - useTimeout ? (std::optional { timeout * 1000 }) : std::nullopt); + useTimeout ? (std::optional{timeout * 1000}) : std::nullopt); auto after = steady_time_point::clock::now(); @@ -455,8 +454,7 @@ void Worker::waitForInput() state.iterate( j->channels, [&](Descriptor k, std::string_view data) { - printMsg(lvlVomit, "%1%: read %2% bytes", - goal->getName(), data.size()); + printMsg(lvlVomit, "%1%: read %2% bytes", goal->getName(), data.size()); j->lastOutput = after; goal->handleChildOutput(k, data); }, @@ -465,24 +463,16 @@ void Worker::waitForInput() goal->handleEOF(k); }); - if (goal->exitCode == Goal::ecBusy && - 0 != settings.maxSilentTime && - j->respectTimeouts && - after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime)) - { - goal->timedOut(Error( - "%1% timed out after %2% seconds of silence", - goal->getName(), settings.maxSilentTime)); + if (goal->exitCode == Goal::ecBusy && 0 != settings.maxSilentTime && j->respectTimeouts + && after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime)) { + goal->timedOut( + Error("%1% timed out after %2% seconds of silence", goal->getName(), settings.maxSilentTime)); } - else if (goal->exitCode == Goal::ecBusy && - 0 != settings.buildTimeout && - j->respectTimeouts && - after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout)) - { - goal->timedOut(Error( - "%1% timed out after %2% seconds", - goal->getName(), settings.buildTimeout)); + else if ( + goal->exitCode == Goal::ecBusy && 0 != settings.buildTimeout && j->respectTimeouts + && after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout)) { + goal->timedOut(Error("%1% timed out after %2% seconds", goal->getName(), settings.buildTimeout)); } } @@ -490,26 +480,26 @@ void Worker::waitForInput() lastWokenUp = after; for (auto & i : waitingForAWhile) { GoalPtr goal = i.lock(); - if (goal) wakeUp(goal); + if (goal) + wakeUp(goal); } waitingForAWhile.clear(); } } - unsigned int Worker::failingExitStatus() { // See API docs in header for explanation unsigned int mask = 0; bool buildFailure = permanentFailure || timedOut || hashMismatch; if (buildFailure) - mask |= 0x04; // 100 + mask |= 0x04; // 100 if (timedOut) - mask |= 0x01; // 101 + mask |= 0x01; // 101 if (hashMismatch) - mask |= 0x02; // 102 + mask |= 0x02; // 102 if (checkMismatch) { - mask |= 0x08; // 104 + mask |= 0x08; // 104 } if (mask) @@ -517,11 +507,11 @@ unsigned int Worker::failingExitStatus() return mask ? mask : 1; } - bool Worker::pathContentsGood(const StorePath & path) { auto i = pathContentsGoodCache.find(path); - if (i != pathContentsGoodCache.end()) return i->second; + if (i != pathContentsGoodCache.end()) + return i->second; printInfo("checking path '%s'...", store.printStorePath(path)); auto info = store.queryPathInfo(path); bool res; @@ -529,8 +519,10 @@ bool Worker::pathContentsGood(const StorePath & path) res = false; else { auto current = hashPath( - {store.getFSAccessor(), CanonPath(path.to_string())}, - FileIngestionMethod::NixArchive, info->narHash.algo).first; + {store.getFSAccessor(), CanonPath(path.to_string())}, + FileIngestionMethod::NixArchive, + info->narHash.algo) + .first; Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current; } @@ -540,13 +532,11 @@ bool Worker::pathContentsGood(const StorePath & path) return res; } - void Worker::markContentsGood(const StorePath & path) { pathContentsGoodCache.insert_or_assign(path, true); } - GoalPtr upcast_goal(std::shared_ptr subGoal) { return subGoal; @@ -562,4 +552,4 @@ GoalPtr upcast_goal(std::shared_ptr subGoal) return subGoal; } -} +} // namespace nix diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 0e99ca0e5..0ff0be3aa 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -58,13 +58,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, * Python package brings its own * `$out/lib/pythonX.Y/site-packages/easy-install.pth'.) */ - if (hasSuffix(srcFile, "/propagated-build-inputs") || - hasSuffix(srcFile, "/nix-support") || - hasSuffix(srcFile, "/perllocal.pod") || - hasSuffix(srcFile, "/info/dir") || - hasSuffix(srcFile, "/log") || - hasSuffix(srcFile, "/manifest.nix") || - hasSuffix(srcFile, "/manifest.json")) + if (hasSuffix(srcFile, "/propagated-build-inputs") || hasSuffix(srcFile, "/nix-support") + || hasSuffix(srcFile, "/perllocal.pod") || hasSuffix(srcFile, "/info/dir") || hasSuffix(srcFile, "/log") + || hasSuffix(srcFile, "/manifest.nix") || hasSuffix(srcFile, "/manifest.json")) continue; else if (S_ISDIR(srcSt.st_mode)) { @@ -80,11 +76,14 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, throw Error("collision between '%1%' and non-directory '%2%'", srcFile, target); if (unlink(dstFile.c_str()) == -1) throw SysError("unlinking '%1%'", dstFile); - if (mkdir(dstFile.c_str() - #ifndef _WIN32 // TODO abstract mkdir perms for Windows - , 0755 - #endif - ) == -1) + if (mkdir( + dstFile.c_str() +#ifndef _WIN32 // TODO abstract mkdir perms for Windows + , + 0755 +#endif + ) + == -1) throw SysError("creating directory '%1%'", dstFile); createLinks(state, target, dstFile, state.priorities[dstFile]); createLinks(state, srcFile, dstFile, priority); @@ -100,11 +99,7 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, if (S_ISLNK(dstSt.st_mode)) { auto prevPriority = state.priorities[dstFile]; if (prevPriority == priority) - throw BuildEnvFileConflictError( - readLink(dstFile), - srcFile, - priority - ); + throw BuildEnvFileConflictError(readLink(dstFile), srcFile, priority); if (prevPriority < priority) continue; if (unlink(dstFile.c_str()) == -1) @@ -127,16 +122,18 @@ void buildProfile(const Path & out, Packages && pkgs) PathSet done, postponed; auto addPkg = [&](const Path & pkgDir, int priority) { - if (!done.insert(pkgDir).second) return; + if (!done.insert(pkgDir).second) + return; createLinks(state, pkgDir, out, priority); try { for (const auto & p : tokenizeString>( - readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n")) + readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n")) if (!done.count(p)) postponed.insert(p); } catch (SysError & e) { - if (e.errNo != ENOENT && e.errNo != ENOTDIR) throw; + if (e.errNo != ENOENT && e.errNo != ENOTDIR) + throw; } }; @@ -171,7 +168,8 @@ static void builtinBuildenv(const BuiltinBuilderContext & ctx) { auto getAttr = [&](const std::string & name) { auto i = ctx.drv.env.find(name); - if (i == ctx.drv.env.end()) throw Error("attribute '%s' missing", name); + if (i == ctx.drv.env.end()) + throw Error("attribute '%s' missing", name); return i->second; }; @@ -191,7 +189,7 @@ static void builtinBuildenv(const BuiltinBuilderContext & ctx) const int priority = stoi(*itemIt++); const size_t outputs = stoul(*itemIt++); - for (size_t n {0}; n < outputs; n++) { + for (size_t n{0}; n < outputs; n++) { pkgs.emplace_back(std::move(*itemIt++), active, priority); } } @@ -204,4 +202,4 @@ static void builtinBuildenv(const BuiltinBuilderContext & ctx) static RegisterBuiltinBuilder registerBuildenv("buildenv", builtinBuildenv); -} +} // namespace nix diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 18fa75558..55add7876 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -35,14 +35,11 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) auto fileTransfer = makeFileTransfer(); auto fetch = [&](const std::string & url) { - auto source = sinkToSource([&](Sink & sink) { - FileTransferRequest request(url); request.decompress = false; - auto decompressor = makeDecompressionSink( - unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); + auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); fileTransfer->download(std::move(request), *decompressor); decompressor->finish(); }); @@ -64,8 +61,11 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) if (dof && dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat) for (auto hashedMirror : settings.hashedMirrors.get()) try { - if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - fetch(hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + dof->ca.hash.to_string(HashFormat::Base16, false)); + if (!hasSuffix(hashedMirror, "/")) + hashedMirror += '/'; + fetch( + hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + + dof->ca.hash.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); @@ -77,4 +77,4 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) static RegisterBuiltinBuilder registerFetchurl("fetchurl", builtinFetchurl); -} +} // namespace nix diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index dd6b8bb71..317cbe9ef 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -7,7 +7,8 @@ static void builtinUnpackChannel(const BuiltinBuilderContext & ctx) { auto getAttr = [&](const std::string & name) -> const std::string & { auto i = ctx.drv.env.find(name); - if (i == ctx.drv.env.end()) throw Error("attribute '%s' missing", name); + if (i == ctx.drv.env.end()) + throw Error("attribute '%s' missing", name); return i->second; }; @@ -42,4 +43,4 @@ static void builtinUnpackChannel(const BuiltinBuilderContext & ctx) static RegisterBuiltinBuilder registerUnpackChannel("unpack-channel", builtinUnpackChannel); -} +} // namespace nix diff --git a/src/libstore/common-protocol.cc b/src/libstore/common-protocol.cc index 311f4888c..d4f3efc9b 100644 --- a/src/libstore/common-protocol.cc +++ b/src/libstore/common-protocol.cc @@ -18,80 +18,80 @@ std::string CommonProto::Serialise::read(const StoreDirConfig & sto return readString(conn.from); } -void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::string & str) +void CommonProto::Serialise::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const std::string & str) { conn.to << str; } - StorePath CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return store.parseStorePath(readString(conn.from)); } -void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const StorePath & storePath) +void CommonProto::Serialise::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const StorePath & storePath) { conn.to << store.printStorePath(storePath); } - ContentAddress CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return ContentAddress::parse(readString(conn.from)); } -void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const ContentAddress & ca) +void CommonProto::Serialise::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const ContentAddress & ca) { conn.to << renderContentAddress(ca); } - Realisation CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { std::string rawInput = readString(conn.from); - return Realisation::fromJSON( - nlohmann::json::parse(rawInput), - "remote-protocol" - ); + return Realisation::fromJSON(nlohmann::json::parse(rawInput), "remote-protocol"); } -void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation) +void CommonProto::Serialise::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation) { conn.to << realisation.toJSON().dump(); } - DrvOutput CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return DrvOutput::parse(readString(conn.from)); } -void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput) +void CommonProto::Serialise::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput) { conn.to << drvOutput.to_string(); } - -std::optional CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) +std::optional +CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { auto s = readString(conn.from); - return s == "" ? std::optional {} : store.parseStorePath(s); + return s == "" ? std::optional{} : store.parseStorePath(s); } -void CommonProto::Serialise>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & storePathOpt) +void CommonProto::Serialise>::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & storePathOpt) { conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); } - -std::optional CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) +std::optional +CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return ContentAddress::parseOpt(readString(conn.from)); } -void CommonProto::Serialise>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & caOpt) +void CommonProto::Serialise>::write( + const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & caOpt) { conn.to << (caOpt ? renderContentAddress(*caOpt) : ""); } -} +} // namespace nix diff --git a/src/libstore/common-ssh-store-config.cc b/src/libstore/common-ssh-store-config.cc index bcaa11a96..0e3a126ec 100644 --- a/src/libstore/common-ssh-store-config.cc +++ b/src/libstore/common-ssh-store-config.cc @@ -40,4 +40,4 @@ SSHMaster CommonSSHStoreConfig::createSSHMaster(bool useMaster, Descriptor logFD }; } -} +} // namespace nix diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index 5d27c4136..9a57e3aa6 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -62,8 +62,7 @@ ContentAddressMethod ContentAddressMethod::parse(std::string_view m) if (m == "text") return ContentAddressMethod::Raw::Text; else - return fileIngestionMethodToContentAddressMethod( - parseFileIngestionMethod(m)); + return fileIngestionMethodToContentAddressMethod(parseFileIngestionMethod(m)); } std::string_view ContentAddressMethod::renderPrefix() const @@ -84,12 +83,10 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) { if (splitPrefix(m, "r:")) { return ContentAddressMethod::Raw::NixArchive; - } - else if (splitPrefix(m, "git:")) { + } else if (splitPrefix(m, "git:")) { experimentalFeatureSettings.require(Xp::GitHashing); return ContentAddressMethod::Raw::Git; - } - else if (splitPrefix(m, "text:")) { + } else if (splitPrefix(m, "text:")) { return ContentAddressMethod::Raw::Text; } return ContentAddressMethod::Raw::Flat; @@ -145,7 +142,7 @@ std::string ContentAddress::render() const */ static std::pair parseContentAddressMethodPrefix(std::string_view & rest) { - std::string_view wholeInput { rest }; + std::string_view wholeInput{rest}; std::string_view prefix; { @@ -155,7 +152,7 @@ static std::pair parseContentAddressMethodP prefix = *optPrefix; } - auto parseHashAlgorithm_ = [&](){ + auto parseHashAlgorithm_ = [&]() { auto hashAlgoRaw = splitPrefixTo(rest, ':'); if (!hashAlgoRaw) throw UsageError("content address hash must be in form ':', but found: %s", wholeInput); @@ -186,7 +183,8 @@ static std::pair parseContentAddressMethodP std::move(hashAlgo), }; } else - throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); + throw UsageError( + "content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); } ContentAddress ContentAddress::parse(std::string_view rawCa) @@ -195,7 +193,7 @@ ContentAddress ContentAddress::parse(std::string_view rawCa) auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest); - return ContentAddress { + return ContentAddress{ .method = std::move(caMethod), .hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo), }; @@ -211,9 +209,7 @@ std::pair ContentAddressMethod::parseWithAl std::optional ContentAddress::parseOpt(std::string_view rawCaOpt) { - return rawCaOpt == "" - ? std::nullopt - : std::optional { ContentAddress::parse(rawCaOpt) }; + return rawCaOpt == "" ? std::nullopt : std::optional{ContentAddress::parse(rawCaOpt)}; }; std::string renderContentAddress(std::optional ca) @@ -223,8 +219,7 @@ std::string renderContentAddress(std::optional ca) std::string ContentAddress::printMethodAlgo() const { - return std::string { method.renderPrefix() } - + printHashAlgo(hash.algo); + return std::string{method.renderPrefix()} + printHashAlgo(hash.algo); } bool StoreReferences::empty() const @@ -241,14 +236,14 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con { switch (ca.method.raw) { case ContentAddressMethod::Raw::Text: - return TextInfo { + return TextInfo{ .hash = ca.hash, .references = {}, }; case ContentAddressMethod::Raw::Flat: case ContentAddressMethod::Raw::NixArchive: case ContentAddressMethod::Raw::Git: - return FixedOutputInfo { + return FixedOutputInfo{ .method = ca.method.getFileIngestionMethod(), .hash = ca.hash, .references = {}, @@ -258,21 +253,21 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con } } -ContentAddressWithReferences ContentAddressWithReferences::fromParts( - ContentAddressMethod method, Hash hash, StoreReferences refs) +ContentAddressWithReferences +ContentAddressWithReferences::fromParts(ContentAddressMethod method, Hash hash, StoreReferences refs) { switch (method.raw) { case ContentAddressMethod::Raw::Text: if (refs.self) throw Error("self-reference not allowed with text hashing"); - return TextInfo { + return TextInfo{ .hash = std::move(hash), .references = std::move(refs.others), }; case ContentAddressMethod::Raw::Flat: case ContentAddressMethod::Raw::NixArchive: case ContentAddressMethod::Raw::Git: - return FixedOutputInfo { + return FixedOutputInfo{ .method = method.getFileIngestionMethod(), .hash = std::move(hash), .references = std::move(refs), @@ -284,27 +279,24 @@ ContentAddressWithReferences ContentAddressWithReferences::fromParts( ContentAddressMethod ContentAddressWithReferences::getMethod() const { - return std::visit(overloaded { - [](const TextInfo & th) -> ContentAddressMethod { - return ContentAddressMethod::Raw::Text; + return std::visit( + overloaded{ + [](const TextInfo & th) -> ContentAddressMethod { return ContentAddressMethod::Raw::Text; }, + [](const FixedOutputInfo & fsh) -> ContentAddressMethod { + return fileIngestionMethodToContentAddressMethod(fsh.method); + }, }, - [](const FixedOutputInfo & fsh) -> ContentAddressMethod { - return fileIngestionMethodToContentAddressMethod( - fsh.method); - }, - }, raw); + raw); } Hash ContentAddressWithReferences::getHash() const { - return std::visit(overloaded { - [](const TextInfo & th) { - return th.hash; + return std::visit( + overloaded{ + [](const TextInfo & th) { return th.hash; }, + [](const FixedOutputInfo & fsh) { return fsh.hash; }, }, - [](const FixedOutputInfo & fsh) { - return fsh.hash; - }, - }, raw); + raw); } -} +} // namespace nix diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index bf4a9d959..6211850cb 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -18,14 +18,14 @@ #include "nix/util/logging.hh" #ifndef _WIN32 // TODO need graceful async exit support on Windows? -# include "nix/util/monitor-fd.hh" +# include "nix/util/monitor-fd.hh" #endif #include namespace nix::daemon { -Sink & operator << (Sink & sink, const Logger::Fields & fields) +Sink & operator<<(Sink & sink, const Logger::Fields & fields) { sink << fields.size(); for (auto & f : fields) { @@ -34,7 +34,8 @@ Sink & operator << (Sink & sink, const Logger::Fields & fields) sink << f.i; else if (f.type == Logger::Field::tString) sink << f.s; - else unreachable(); + else + unreachable(); } return sink; } @@ -57,7 +58,10 @@ struct TunnelLogger : public Logger WorkerProto::Version clientVersion; TunnelLogger(FdSink & to, WorkerProto::Version clientVersion) - : to(to), clientVersion(clientVersion) { } + : to(to) + , clientVersion(clientVersion) + { + } void enqueueMsg(const std::string & s) { @@ -80,7 +84,8 @@ struct TunnelLogger : public Logger void log(Verbosity lvl, std::string_view s) override { - if (lvl > verbosity) return; + if (lvl > verbosity) + return; StringSink buf; buf << STDERR_NEXT << (s + "\n"); @@ -89,7 +94,8 @@ struct TunnelLogger : public Logger void logEI(const ErrorInfo & ei) override { - if (ei.level > verbosity) return; + if (ei.level > verbosity) + return; std::ostringstream oss; showErrorInfo(oss, ei, false); @@ -133,8 +139,13 @@ struct TunnelLogger : public Logger } } - void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) override + void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) override { if (GET_PROTOCOL_MINOR(clientVersion) < 20) { if (!s.empty()) @@ -149,7 +160,8 @@ struct TunnelLogger : public Logger void stopActivity(ActivityId act) override { - if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; + if (GET_PROTOCOL_MINOR(clientVersion) < 20) + return; StringSink buf; buf << STDERR_STOP_ACTIVITY << act; enqueueMsg(buf.s); @@ -157,7 +169,8 @@ struct TunnelLogger : public Logger void result(ActivityId act, ResultType type, const Fields & fields) override { - if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; + if (GET_PROTOCOL_MINOR(clientVersion) < 20) + return; StringSink buf; buf << STDERR_RESULT << act << type << fields; enqueueMsg(buf.s); @@ -167,8 +180,13 @@ struct TunnelLogger : public Logger struct TunnelSink : Sink { Sink & to; - TunnelSink(Sink & to) : to(to) { } - void operator () (std::string_view data) override + + TunnelSink(Sink & to) + : to(to) + { + } + + void operator()(std::string_view data) override { to << STDERR_WRITE; writeString(data, to); @@ -179,13 +197,20 @@ struct TunnelSource : BufferedSource { Source & from; BufferedSink & to; - TunnelSource(Source & from, BufferedSink & to) : from(from), to(to) { } + + TunnelSource(Source & from, BufferedSink & to) + : from(from) + , to(to) + { + } + size_t readUnbuffered(char * data, size_t len) override { to << STDERR_READ << len; to.flush(); size_t n = readString(data, len, from); - if (n == 0) throw EndOfFile("unexpected end-of-file"); + if (n == 0) + throw EndOfFile("unexpected end-of-file"); return n; } }; @@ -233,8 +258,10 @@ struct ClientSettings else if (!hasSuffix(s, "/") && trusted.count(s + "/")) subs.push_back(s + "/"); else - warn("ignoring untrusted substituter '%s', you are not a trusted user.\n" - "Run `man nix.conf` for more information on the `substituters` configuration option.", s); + warn( + "ignoring untrusted substituter '%s', you are not a trusted user.\n" + "Run `man nix.conf` for more information on the `substituters` configuration option.", + s); res = subs; return true; }; @@ -245,23 +272,24 @@ struct ClientSettings else if (name == experimentalFeatureSettings.experimentalFeatures.name) { // We don’t want to forward the experimental features to // the daemon, as that could cause some pretty weird stuff - if (parseFeatures(tokenizeString(value)) != experimentalFeatureSettings.experimentalFeatures.get()) + if (parseFeatures(tokenizeString(value)) + != experimentalFeatureSettings.experimentalFeatures.get()) debug("Ignoring the client-specified experimental features"); } else if (name == "plugin-files") { - warn("Ignoring the client-specified plugin-files.\n" - "The client specifying plugins to the daemon never made sense, and was removed in Nix >=2.14."); - } - else if (trusted - || name == settings.buildTimeout.name - || name == settings.maxSilentTime.name - || name == settings.pollInterval.name - || name == "connect-timeout" + warn( + "Ignoring the client-specified plugin-files.\n" + "The client specifying plugins to the daemon never made sense, and was removed in Nix >=2.14."); + } else if ( + trusted || name == settings.buildTimeout.name || name == settings.maxSilentTime.name + || name == settings.pollInterval.name || name == "connect-timeout" || (name == "builders" && value == "")) settings.set(name, value); else if (setSubstituters(settings.substituters)) ; else - warn("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name); + warn( + "ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", + name); } catch (UsageError & e) { warn(e.what()); } @@ -269,8 +297,11 @@ struct ClientSettings } }; -static void performOp(TunnelLogger * logger, ref store, - TrustedFlag trusted, RecursiveFlag recursive, +static void performOp( + TunnelLogger * logger, + ref store, + TrustedFlag trusted, + RecursiveFlag recursive, WorkerProto::BasicServerConnection & conn, WorkerProto::Op op) { @@ -349,7 +380,8 @@ static void performOp(TunnelLogger * logger, ref store, store->queryReferrers(path, paths); else if (op == WorkerProto::Op::QueryValidDerivers) paths = store->queryValidDerivers(path); - else paths = store->queryDerivationOutputs(path); + else + paths = store->queryDerivationOutputs(path); logger->stopWork(); WorkerProto::write(*store, wconn, paths); break; @@ -424,7 +456,8 @@ static void performOp(TunnelLogger * logger, ref store, assert(false); } // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store. - auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair); + auto path = + store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair); return store->queryPathInfo(path); }(); logger->stopWork(); @@ -440,10 +473,10 @@ static void performOp(TunnelLogger * logger, ref store, std::string hashAlgoRaw; conn.from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw; if (recursive > true) - throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); - method = recursive - ? ContentAddressMethod::Raw::NixArchive - : ContentAddressMethod::Raw::Flat; + throw Error( + "unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", + recursive); + method = recursive ? ContentAddressMethod::Raw::NixArchive : ContentAddressMethod::Raw::Flat; /* Compatibility hack. */ if (!fixed) { hashAlgoRaw = "sha256"; @@ -467,8 +500,8 @@ static void performOp(TunnelLogger * logger, ref store, parseDump(sink, savedNARSource); }); logger->startWork(); - auto path = store->addToStoreFromDump( - *dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo); + auto path = + store->addToStoreFromDump(*dumpSource, baseName, FileSerialisationMethod::NixArchive, method, hashAlgo); logger->stopWork(); conn.to << store->printStorePath(path); @@ -485,9 +518,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); { FramedSource source(conn.from); - store->addMultipleToStore(source, - RepairFlag{repair}, - dontCheckSigs ? NoCheckSigs : CheckSigs); + store->addMultipleToStore(source, RepairFlag{repair}, dontCheckSigs ? NoCheckSigs : CheckSigs); } logger->stopWork(); break; @@ -499,8 +530,15 @@ static void performOp(TunnelLogger * logger, ref store, auto refs = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); auto path = ({ - StringSource source { s }; - store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, refs, NoRepair); + StringSource source{s}; + store->addToStoreFromDump( + source, + suffix, + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + refs, + NoRepair); }); logger->stopWork(); conn.to << store->printStorePath(path); @@ -521,11 +559,11 @@ static void performOp(TunnelLogger * logger, ref store, case WorkerProto::Op::ImportPaths: { logger->startWork(); TunnelSource source(conn.from, conn.to); - auto paths = store->importPaths(source, - trusted ? NoCheckSigs : CheckSigs); + auto paths = store->importPaths(source, trusted ? NoCheckSigs : CheckSigs); logger->stopWork(); Strings paths2; - for (auto & i : paths) paths2.push_back(store->printStorePath(i)); + for (auto & i : paths) + paths2.push_back(store->printStorePath(i)); conn.to << paths2; break; } @@ -644,7 +682,7 @@ static void performOp(TunnelLogger * logger, ref store, Derivation drv2; static_cast(drv2) = drv; - drvPath = writeDerivation(*store, Derivation { drv2 }); + drvPath = writeDerivation(*store, Derivation{drv2}); } auto res = store->buildDerivation(drvPath, drv, buildMode); @@ -796,11 +834,9 @@ static void performOp(TunnelLogger * logger, ref store, if (i == infos.end()) conn.to << 0; else { - conn.to << 1 - << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); + conn.to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); WorkerProto::write(*store, wconn, i->second.references); - conn.to << i->second.downloadSize - << i->second.narSize; + conn.to << i->second.downloadSize << i->second.narSize; } break; } @@ -842,7 +878,8 @@ static void performOp(TunnelLogger * logger, ref store, try { info = store->queryPathInfo(path); } catch (InvalidPath &) { - if (GET_PROTOCOL_MINOR(conn.protoVersion) < 17) throw; + if (GET_PROTOCOL_MINOR(conn.protoVersion) < 17) + throw; } logger->stopWork(); if (info) { @@ -898,7 +935,7 @@ static void performOp(TunnelLogger * logger, ref store, auto path = store->parseStorePath(readString(conn.from)); auto deriver = readString(conn.from); auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); - ValidPathInfo info { path, narHash }; + ValidPathInfo info{path, narHash}; if (deriver != "") info.deriver = store->parseStorePath(deriver); info.references = WorkerProto::Serialise::read(*store, rconn); @@ -915,8 +952,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); { FramedSource source(conn.from); - store->addToStore(info, source, (RepairFlag) repair, - dontCheckSigs ? NoCheckSigs : CheckSigs); + store->addToStore(info, source, (RepairFlag) repair, dontCheckSigs ? NoCheckSigs : CheckSigs); } logger->stopWork(); } @@ -927,7 +963,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 21) source = std::make_unique(conn.from, conn.to); else { - TeeSource tee { conn.from, saved }; + TeeSource tee{conn.from, saved}; NullFileSystemObjectSink ether; parseDump(ether, tee); source = std::make_unique(saved.s); @@ -936,8 +972,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); // FIXME: race if addToStore doesn't read source? - store->addToStore(info, *source, (RepairFlag) repair, - dontCheckSigs ? NoCheckSigs : CheckSigs); + store->addToStore(info, *source, (RepairFlag) repair, dontCheckSigs ? NoCheckSigs : CheckSigs); logger->stopWork(); } @@ -962,8 +997,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) { auto outputId = DrvOutput::parse(readString(conn.from)); auto outputPath = StorePath(readString(conn.from)); - store->registerDrvOutput(Realisation{ - .id = outputId, .outPath = outputPath}); + store->registerDrvOutput(Realisation{.id = outputId, .outPath = outputPath}); } else { auto realisation = WorkerProto::Serialise::read(*store, rconn); store->registerDrvOutput(realisation); @@ -979,11 +1013,13 @@ static void performOp(TunnelLogger * logger, ref store, logger->stopWork(); if (GET_PROTOCOL_MINOR(conn.protoVersion) < 31) { std::set outPaths; - if (info) outPaths.insert(info->outPath); + if (info) + outPaths.insert(info->outPath); WorkerProto::write(*store, wconn, outPaths); } else { std::set realisations; - if (info) realisations.insert(*info); + if (info) + realisations.insert(*info); WorkerProto::write(*store, wconn, realisations); } break; @@ -1015,12 +1051,7 @@ static void performOp(TunnelLogger * logger, ref store, } } -void processConnection( - ref store, - FdSource && from, - FdSink && to, - TrustedFlag trusted, - RecursiveFlag recursive) +void processConnection(ref store, FdSource && from, FdSink && to, TrustedFlag trusted, RecursiveFlag recursive) { #ifndef _WIN32 // TODO need graceful async exit support on Windows? auto monitor = !recursive ? std::make_unique(from.fd) : nullptr; @@ -1029,8 +1060,7 @@ void processConnection( /* Exchange the greeting. */ auto [protoVersion, features] = - WorkerProto::BasicServerConnection::handshake( - to, from, PROTOCOL_VERSION, WorkerProto::allFeatures); + WorkerProto::BasicServerConnection::handshake(to, from, PROTOCOL_VERSION, WorkerProto::allFeatures); if (protoVersion < 0x10a) throw Error("the Nix client version is too old"); @@ -1059,14 +1089,14 @@ void processConnection( printMsgUsing(prevLogger, lvlDebug, "%d operations", opCount); }); - conn.postHandshake(*store, { - .daemonNixVersion = nixVersion, - // We and the underlying store both need to trust the client for - // it to be trusted. - .remoteTrustsUs = trusted - ? store->isTrustedClient() - : std::optional { NotTrusted }, - }); + conn.postHandshake( + *store, + { + .daemonNixVersion = nixVersion, + // We and the underlying store both need to trust the client for + // it to be trusted. + .remoteTrustsUs = trusted ? store->isTrustedClient() : std::optional{NotTrusted}, + }); /* Send startup error messages to the client. */ tunnelLogger->startWork(); @@ -1103,7 +1133,8 @@ void processConnection( happens, just send the error message and exit. */ bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr; tunnelLogger->stopWork(&e); - if (!errorAllowed) throw; + if (!errorAllowed) + throw; } catch (std::bad_alloc & e) { auto ex = Error("Nix daemon out of memory"); tunnelLogger->stopWork(&ex); @@ -1127,4 +1158,4 @@ void processConnection( } } -} +} // namespace nix::daemon diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index f6bac2868..07212289e 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -291,7 +291,7 @@ bool DerivationOptions::useUidRange(const BasicDerivation & drv) const return getRequiredSystemFeatures(drv).count("uid-range"); } -} +} // namespace nix namespace nlohmann { @@ -381,4 +381,4 @@ void adl_serializer::to_json(json & json, Deriv json["disallowedRequisites"] = c.disallowedRequisites; } -} +} // namespace nlohmann diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 0657a7499..279713c71 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -15,128 +15,94 @@ namespace nix { -std::optional DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const +std::optional +DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { - return std::visit(overloaded { - [](const DerivationOutput::InputAddressed & doi) -> std::optional { - return { doi.path }; + return std::visit( + overloaded{ + [](const DerivationOutput::InputAddressed & doi) -> std::optional { return {doi.path}; }, + [&](const DerivationOutput::CAFixed & dof) -> std::optional { + return {dof.path(store, drvName, outputName)}; + }, + [](const DerivationOutput::CAFloating & dof) -> std::optional { return std::nullopt; }, + [](const DerivationOutput::Deferred &) -> std::optional { return std::nullopt; }, + [](const DerivationOutput::Impure &) -> std::optional { return std::nullopt; }, }, - [&](const DerivationOutput::CAFixed & dof) -> std::optional { - return { - dof.path(store, drvName, outputName) - }; - }, - [](const DerivationOutput::CAFloating & dof) -> std::optional { - return std::nullopt; - }, - [](const DerivationOutput::Deferred &) -> std::optional { - return std::nullopt; - }, - [](const DerivationOutput::Impure &) -> std::optional { - return std::nullopt; - }, - }, raw); + raw); } - -StorePath DerivationOutput::CAFixed::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const +StorePath +DerivationOutput::CAFixed::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { return store.makeFixedOutputPathFromCA( - outputPathName(drvName, outputName), - ContentAddressWithReferences::withoutRefs(ca)); + outputPathName(drvName, outputName), ContentAddressWithReferences::withoutRefs(ca)); } - bool DerivationType::isCA() const { /* Normally we do the full `std::visit` to make sure we have exhaustively handled all variants, but so long as there is a variant called `ContentAddressed`, it must be the only one for which `isCA` is true for this to make sense!. */ - return std::visit(overloaded { - [](const InputAddressed & ia) { - return false; + return std::visit( + overloaded{ + [](const InputAddressed & ia) { return false; }, + [](const ContentAddressed & ca) { return true; }, + [](const Impure &) { return true; }, }, - [](const ContentAddressed & ca) { - return true; - }, - [](const Impure &) { - return true; - }, - }, raw); + raw); } bool DerivationType::isFixed() const { - return std::visit(overloaded { - [](const InputAddressed & ia) { - return false; + return std::visit( + overloaded{ + [](const InputAddressed & ia) { return false; }, + [](const ContentAddressed & ca) { return ca.fixed; }, + [](const Impure &) { return false; }, }, - [](const ContentAddressed & ca) { - return ca.fixed; - }, - [](const Impure &) { - return false; - }, - }, raw); + raw); } bool DerivationType::hasKnownOutputPaths() const { - return std::visit(overloaded { - [](const InputAddressed & ia) { - return !ia.deferred; + return std::visit( + overloaded{ + [](const InputAddressed & ia) { return !ia.deferred; }, + [](const ContentAddressed & ca) { return ca.fixed; }, + [](const Impure &) { return false; }, }, - [](const ContentAddressed & ca) { - return ca.fixed; - }, - [](const Impure &) { - return false; - }, - }, raw); + raw); } - bool DerivationType::isSandboxed() const { - return std::visit(overloaded { - [](const InputAddressed & ia) { - return true; + return std::visit( + overloaded{ + [](const InputAddressed & ia) { return true; }, + [](const ContentAddressed & ca) { return ca.sandboxed; }, + [](const Impure &) { return false; }, }, - [](const ContentAddressed & ca) { - return ca.sandboxed; - }, - [](const Impure &) { - return false; - }, - }, raw); + raw); } - bool DerivationType::isImpure() const { - return std::visit(overloaded { - [](const InputAddressed & ia) { - return false; + return std::visit( + overloaded{ + [](const InputAddressed & ia) { return false; }, + [](const ContentAddressed & ca) { return false; }, + [](const Impure &) { return true; }, }, - [](const ContentAddressed & ca) { - return false; - }, - [](const Impure &) { - return true; - }, - }, raw); + raw); } - bool BasicDerivation::isBuiltin() const { return builder.substr(0, 8) == "builtin:"; } - -StorePath writeDerivation(Store & store, - const Derivation & drv, RepairFlag repair, bool readOnly) +StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) { auto references = drv.inputSrcs; for (auto & i : drv.inputDrvs.map) @@ -146,50 +112,68 @@ StorePath writeDerivation(Store & store, held during a garbage collection). */ auto suffix = std::string(drv.name) + drvExtension; auto contents = drv.unparse(store, false); - return readOnly || settings.readOnlyMode - ? store.makeFixedOutputPathFromCA(suffix, TextInfo { - .hash = hashString(HashAlgorithm::SHA256, contents), - .references = std::move(references), - }) - : ({ - StringSource s { contents }; - store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references, repair); - }); + return readOnly || settings.readOnlyMode ? store.makeFixedOutputPathFromCA( + suffix, + TextInfo{ + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(references), + }) + : ({ + StringSource s{contents}; + store.addToStoreFromDump( + s, + suffix, + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + references, + repair); + }); } - namespace { /** * This mimics std::istream to some extent. We use this much smaller implementation * instead of plain istreams because the sentry object overhead is too high. */ -struct StringViewStream { +struct StringViewStream +{ std::string_view remaining; - int peek() const { + int peek() const + { return remaining.empty() ? EOF : remaining[0]; } - int get() { - if (remaining.empty()) return EOF; + int get() + { + if (remaining.empty()) + return EOF; char c = remaining[0]; remaining.remove_prefix(1); return c; } }; -constexpr struct Escapes { +constexpr struct Escapes +{ char map[256]; - constexpr Escapes() { - for (int i = 0; i < 256; i++) map[i] = (char) (unsigned char) i; + + constexpr Escapes() + { + for (int i = 0; i < 256; i++) + map[i] = (char) (unsigned char) i; map[(int) (unsigned char) 'n'] = '\n'; map[(int) (unsigned char) 'r'] = '\r'; map[(int) (unsigned char) 't'] = '\t'; } - char operator[](char c) const { return map[(unsigned char) c]; } -} escapes; -} + char operator[](char c) const + { + return map[(unsigned char) c]; + } +} escapes; +} // namespace /* Read string `s' from stream `str'. */ static void expect(StringViewStream & str, std::string_view s) @@ -199,7 +183,6 @@ static void expect(StringViewStream & str, std::string_view s) str.remaining.remove_prefix(s.size()); } - /* Read a C-style string from stream `str'. */ static BackedStringView parseString(StringViewStream & str) { @@ -228,12 +211,13 @@ static BackedStringView parseString(StringViewStream & str) if (*c == '\\') { c++; res += escapes[*c]; - } - else res += *c; + } else + res += *c; return res; } -static void validatePath(std::string_view s) { +static void validatePath(std::string_view s) +{ if (s.size() == 0 || s[0] != '/') throw FormatError("bad path '%1%' in derivation", s); } @@ -245,7 +229,6 @@ static BackedStringView parsePath(StringViewStream & str) return s; } - static bool endOfList(StringViewStream & str) { if (str.peek() == ',') { @@ -259,7 +242,6 @@ static bool endOfList(StringViewStream & str) return false; } - static StringSet parseStrings(StringViewStream & str, bool arePaths) { StringSet res; @@ -269,10 +251,11 @@ static StringSet parseStrings(StringViewStream & str, bool arePaths) return res; } - static DerivationOutput parseDerivationOutput( const StoreDirConfig & store, - std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS, + std::string_view pathS, + std::string_view hashAlgoStr, + std::string_view hashS, const ExperimentalFeatureSettings & xpSettings) { if (hashAlgoStr != "") { @@ -284,46 +267,51 @@ static DerivationOutput parseDerivationOutput( xpSettings.require(Xp::ImpureDerivations); if (pathS != "") throw FormatError("impure derivation output should not specify output path"); - return DerivationOutput::Impure { + return DerivationOutput::Impure{ .method = std::move(method), .hashAlgo = std::move(hashAlgo), }; } else if (hashS != "") { validatePath(pathS); auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo); - return DerivationOutput::CAFixed { - .ca = ContentAddress { - .method = std::move(method), - .hash = std::move(hash), - }, + return DerivationOutput::CAFixed{ + .ca = + ContentAddress{ + .method = std::move(method), + .hash = std::move(hash), + }, }; } else { xpSettings.require(Xp::CaDerivations); if (pathS != "") throw FormatError("content-addressing derivation output should not specify output path"); - return DerivationOutput::CAFloating { + return DerivationOutput::CAFloating{ .method = std::move(method), .hashAlgo = std::move(hashAlgo), }; } } else { if (pathS == "") { - return DerivationOutput::Deferred { }; + return DerivationOutput::Deferred{}; } validatePath(pathS); - return DerivationOutput::InputAddressed { + return DerivationOutput::InputAddressed{ .path = store.parseStorePath(pathS), }; } } static DerivationOutput parseDerivationOutput( - const StoreDirConfig & store, StringViewStream & str, + const StoreDirConfig & store, + StringViewStream & str, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { - expect(str, ","); const auto pathS = parseString(str); - expect(str, ","); const auto hashAlgo = parseString(str); - expect(str, ","); const auto hash = parseString(str); + expect(str, ","); + const auto pathS = parseString(str); + expect(str, ","); + const auto hashAlgo = parseString(str); + expect(str, ","); + const auto hash = parseString(str); expect(str, ")"); return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings); @@ -346,16 +334,12 @@ enum struct DerivationATermVersion { DynamicDerivations, }; -static DerivedPathMap::ChildNode parseDerivedPathMapNode( - const StoreDirConfig & store, - StringViewStream & str, - DerivationATermVersion version) +static DerivedPathMap::ChildNode +parseDerivedPathMapNode(const StoreDirConfig & store, StringViewStream & str, DerivationATermVersion version) { DerivedPathMap::ChildNode node; - auto parseNonDynamic = [&]() { - node.value = parseStrings(str, false); - }; + auto parseNonDynamic = [&]() { node.value = parseStrings(str, false); }; // Older derivation should never use new form, but newer // derivaiton can use old form. @@ -392,9 +376,10 @@ static DerivedPathMap::ChildNode parseDerivedPathMapNode( return node; } - Derivation parseDerivation( - const StoreDirConfig & store, std::string && s, std::string_view name, + const StoreDirConfig & store, + std::string && s, + std::string_view name, const ExperimentalFeatureSettings & xpSettings) { Derivation drv; @@ -428,7 +413,8 @@ Derivation parseDerivation( /* Parse the list of outputs. */ expect(str, "["); while (!endOfList(str)) { - expect(str, "("); std::string id = parseString(str).toOwned(); + expect(str, "("); + std::string id = parseString(str).toOwned(); auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } @@ -439,13 +425,17 @@ Derivation parseDerivation( expect(str, "("); auto drvPath = parsePath(str); expect(str, ","); - drv.inputDrvs.map.insert_or_assign(store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); + drv.inputDrvs.map.insert_or_assign( + store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); expect(str, ")"); } - expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); - expect(str, ","); drv.platform = parseString(str).toOwned(); - expect(str, ","); drv.builder = parseString(str).toOwned(); + expect(str, ","); + drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); + expect(str, ","); + drv.platform = parseString(str).toOwned(); + expect(str, ","); + drv.builder = parseString(str).toOwned(); /* Parse the builder arguments. */ expect(str, ",["); @@ -455,8 +445,10 @@ Derivation parseDerivation( /* Parse the environment variables. */ expect(str, ",["); while (!endOfList(str)) { - expect(str, "("); auto name = parseString(str).toOwned(); - expect(str, ","); auto value = parseString(str).toOwned(); + expect(str, "("); + auto name = parseString(str).toOwned(); + expect(str, ","); + auto value = parseString(str).toOwned(); expect(str, ")"); drv.env.insert_or_assign(std::move(name), std::move(value)); } @@ -465,7 +457,6 @@ Derivation parseDerivation( return drv; } - /** * Print a derivation string literal to an `std::string`. * @@ -483,16 +474,24 @@ static void printString(std::string & res, std::string_view s) char * p = buf; *p++ = '"'; for (auto c : s) - if (c == '\"' || c == '\\') { *p++ = '\\'; *p++ = c; } - else if (c == '\n') { *p++ = '\\'; *p++ = 'n'; } - else if (c == '\r') { *p++ = '\\'; *p++ = 'r'; } - else if (c == '\t') { *p++ = '\\'; *p++ = 't'; } - else *p++ = c; + if (c == '\"' || c == '\\') { + *p++ = '\\'; + *p++ = c; + } else if (c == '\n') { + *p++ = '\\'; + *p++ = 'n'; + } else if (c == '\r') { + *p++ = '\\'; + *p++ = 'r'; + } else if (c == '\t') { + *p++ = '\\'; + *p++ = 't'; + } else + *p++ = c; *p++ = '"'; res.append(buf, p - buf); } - static void printUnquotedString(std::string & res, std::string_view s) { res += '"'; @@ -500,34 +499,38 @@ static void printUnquotedString(std::string & res, std::string_view s) res += '"'; } - template static void printStrings(std::string & res, ForwardIterator i, ForwardIterator j) { res += '['; bool first = true; - for ( ; i != j; ++i) { - if (first) first = false; else res += ','; + for (; i != j; ++i) { + if (first) + first = false; + else + res += ','; printString(res, *i); } res += ']'; } - template static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIterator j) { res += '['; bool first = true; - for ( ; i != j; ++i) { - if (first) first = false; else res += ','; + for (; i != j; ++i) { + if (first) + first = false; + else + res += ','; printUnquotedString(res, *i); } res += ']'; } - -static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string & s, const DerivedPathMap::ChildNode & node) +static void unparseDerivedPathMapNode( + const StoreDirConfig & store, std::string & s, const DerivedPathMap::ChildNode & node) { s += ','; if (node.childMap.empty()) { @@ -538,8 +541,12 @@ static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string s += ",["; bool first = true; for (auto & [outputName, childNode] : node.childMap) { - if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, outputName); + if (first) + first = false; + else + s += ','; + s += '('; + printUnquotedString(s, outputName); unparseDerivedPathMapNode(store, s, childNode); s += ')'; } @@ -547,7 +554,6 @@ static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string } } - /** * Does the derivation have a dependency on the output of a dynamic * derivation? @@ -559,17 +565,15 @@ static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string */ static bool hasDynamicDrvDep(const Derivation & drv) { - return - std::find_if( - drv.inputDrvs.map.begin(), - drv.inputDrvs.map.end(), - [](auto & kv) { return !kv.second.childMap.empty(); }) - != drv.inputDrvs.map.end(); + return std::find_if( + drv.inputDrvs.map.begin(), + drv.inputDrvs.map.end(), + [](auto & kv) { return !kv.second.childMap.empty(); }) + != drv.inputDrvs.map.end(); } - -std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, - DerivedPathMap::ChildNode::Map * actualInputs) const +std::string Derivation::unparse( + const StoreDirConfig & store, bool maskOutputs, DerivedPathMap::ChildNode::Map * actualInputs) const { std::string s; s.reserve(65536); @@ -589,36 +593,56 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, bool first = true; s += "["; for (auto & i : outputs) { - if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, i.first); - std::visit(overloaded { - [&](const DerivationOutput::InputAddressed & doi) { - s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path)); - s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, ""); - }, - [&](const DerivationOutput::CAFixed & dof) { - s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); - s += ','; printUnquotedString(s, dof.ca.printMethodAlgo()); - s += ','; printUnquotedString(s, dof.ca.hash.to_string(HashFormat::Base16, false)); - }, - [&](const DerivationOutput::CAFloating & dof) { - s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo)); - s += ','; printUnquotedString(s, ""); - }, - [&](const DerivationOutput::Deferred &) { - s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, ""); - }, - [&](const DerivationOutput::Impure & doi) { - // FIXME - s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo)); - s += ','; printUnquotedString(s, "impure"); - } - }, i.second.raw); + if (first) + first = false; + else + s += ','; + s += '('; + printUnquotedString(s, i.first); + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed & doi) { + s += ','; + printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path)); + s += ','; + printUnquotedString(s, ""); + s += ','; + printUnquotedString(s, ""); + }, + [&](const DerivationOutput::CAFixed & dof) { + s += ','; + printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); + s += ','; + printUnquotedString(s, dof.ca.printMethodAlgo()); + s += ','; + printUnquotedString(s, dof.ca.hash.to_string(HashFormat::Base16, false)); + }, + [&](const DerivationOutput::CAFloating & dof) { + s += ','; + printUnquotedString(s, ""); + s += ','; + printUnquotedString(s, std::string{dof.method.renderPrefix()} + printHashAlgo(dof.hashAlgo)); + s += ','; + printUnquotedString(s, ""); + }, + [&](const DerivationOutput::Deferred &) { + s += ','; + printUnquotedString(s, ""); + s += ','; + printUnquotedString(s, ""); + s += ','; + printUnquotedString(s, ""); + }, + [&](const DerivationOutput::Impure & doi) { + // FIXME + s += ','; + printUnquotedString(s, ""); + s += ','; + printUnquotedString(s, std::string{doi.method.renderPrefix()} + printHashAlgo(doi.hashAlgo)); + s += ','; + printUnquotedString(s, "impure"); + }}, + i.second.raw); s += ')'; } @@ -626,15 +650,23 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, first = true; if (actualInputs) { for (auto & [drvHashModulo, childMap] : *actualInputs) { - if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, drvHashModulo); + if (first) + first = false; + else + s += ','; + s += '('; + printUnquotedString(s, drvHashModulo); unparseDerivedPathMapNode(store, s, childMap); s += ')'; } } else { for (auto & [drvPath, childMap] : inputDrvs.map) { - if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, store.printStorePath(drvPath)); + if (first) + first = false; + else + s += ','; + s += '('; + printUnquotedString(s, store.printStorePath(drvPath)); unparseDerivedPathMapNode(store, s, childMap); s += ')'; } @@ -644,16 +676,24 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, auto paths = store.printStorePathSet(inputSrcs); // FIXME: slow printUnquotedStrings(s, paths.begin(), paths.end()); - s += ','; printUnquotedString(s, platform); - s += ','; printString(s, builder); - s += ','; printStrings(s, args.begin(), args.end()); + s += ','; + printUnquotedString(s, platform); + s += ','; + printString(s, builder); + s += ','; + printStrings(s, args.begin(), args.end()); s += ",["; first = true; for (auto & i : env) { - if (first) first = false; else s += ','; - s += '('; printString(s, i.first); - s += ','; printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); + if (first) + first = false; + else + s += ','; + s += '('; + printString(s, i.first); + s += ','; + printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); s += ')'; } @@ -662,16 +702,15 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, return s; } - // FIXME: remove bool isDerivation(std::string_view fileName) { return hasSuffix(fileName, drvExtension); } - -std::string outputPathName(std::string_view drvName, OutputNameView outputName) { - std::string res { drvName }; +std::string outputPathName(std::string_view drvName, OutputNameView outputName) +{ + std::string res{drvName}; if (outputName != "out") { res += "-"; res += outputName; @@ -679,106 +718,75 @@ std::string outputPathName(std::string_view drvName, OutputNameView outputName) return res; } - DerivationType BasicDerivation::type() const { - std::set - inputAddressedOutputs, - fixedCAOutputs, - floatingCAOutputs, - deferredIAOutputs, + std::set inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs, deferredIAOutputs, impureOutputs; std::optional floatingHashAlgo; for (auto & i : outputs) { - std::visit(overloaded { - [&](const DerivationOutput::InputAddressed &) { - inputAddressedOutputs.insert(i.first); + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed &) { inputAddressedOutputs.insert(i.first); }, + [&](const DerivationOutput::CAFixed &) { fixedCAOutputs.insert(i.first); }, + [&](const DerivationOutput::CAFloating & dof) { + floatingCAOutputs.insert(i.first); + if (!floatingHashAlgo) { + floatingHashAlgo = dof.hashAlgo; + } else { + if (*floatingHashAlgo != dof.hashAlgo) + throw Error("all floating outputs must use the same hash algorithm"); + } + }, + [&](const DerivationOutput::Deferred &) { deferredIAOutputs.insert(i.first); }, + [&](const DerivationOutput::Impure &) { impureOutputs.insert(i.first); }, }, - [&](const DerivationOutput::CAFixed &) { - fixedCAOutputs.insert(i.first); - }, - [&](const DerivationOutput::CAFloating & dof) { - floatingCAOutputs.insert(i.first); - if (!floatingHashAlgo) { - floatingHashAlgo = dof.hashAlgo; - } else { - if (*floatingHashAlgo != dof.hashAlgo) - throw Error("all floating outputs must use the same hash algorithm"); - } - }, - [&](const DerivationOutput::Deferred &) { - deferredIAOutputs.insert(i.first); - }, - [&](const DerivationOutput::Impure &) { - impureOutputs.insert(i.first); - }, - }, i.second.raw); + i.second.raw); } - if (inputAddressedOutputs.empty() - && fixedCAOutputs.empty() - && floatingCAOutputs.empty() - && deferredIAOutputs.empty() - && impureOutputs.empty()) + if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() + && deferredIAOutputs.empty() && impureOutputs.empty()) throw Error("must have at least one output"); - if (!inputAddressedOutputs.empty() - && fixedCAOutputs.empty() - && floatingCAOutputs.empty() - && deferredIAOutputs.empty() - && impureOutputs.empty()) - return DerivationType::InputAddressed { + if (!inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() + && deferredIAOutputs.empty() && impureOutputs.empty()) + return DerivationType::InputAddressed{ .deferred = false, }; - if (inputAddressedOutputs.empty() - && !fixedCAOutputs.empty() - && floatingCAOutputs.empty() - && deferredIAOutputs.empty() - && impureOutputs.empty()) - { + if (inputAddressedOutputs.empty() && !fixedCAOutputs.empty() && floatingCAOutputs.empty() + && deferredIAOutputs.empty() && impureOutputs.empty()) { if (fixedCAOutputs.size() > 1) // FIXME: Experimental feature? throw Error("only one fixed output is allowed for now"); if (*fixedCAOutputs.begin() != "out") throw Error("single fixed output must be named \"out\""); - return DerivationType::ContentAddressed { + return DerivationType::ContentAddressed{ .sandboxed = false, .fixed = true, }; } - if (inputAddressedOutputs.empty() - && fixedCAOutputs.empty() - && !floatingCAOutputs.empty() - && deferredIAOutputs.empty() - && impureOutputs.empty()) - return DerivationType::ContentAddressed { + if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && !floatingCAOutputs.empty() + && deferredIAOutputs.empty() && impureOutputs.empty()) + return DerivationType::ContentAddressed{ .sandboxed = true, .fixed = false, }; - if (inputAddressedOutputs.empty() - && fixedCAOutputs.empty() - && floatingCAOutputs.empty() - && !deferredIAOutputs.empty() - && impureOutputs.empty()) - return DerivationType::InputAddressed { + if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() + && !deferredIAOutputs.empty() && impureOutputs.empty()) + return DerivationType::InputAddressed{ .deferred = true, }; - if (inputAddressedOutputs.empty() - && fixedCAOutputs.empty() - && floatingCAOutputs.empty() - && deferredIAOutputs.empty() - && !impureOutputs.empty()) - return DerivationType::Impure { }; + if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() + && deferredIAOutputs.empty() && !impureOutputs.empty()) + return DerivationType::Impure{}; throw Error("can't mix derivation output types"); } - Sync drvHashes; /* pathDerivationModulo and hashDerivationModulo are mutually recursive @@ -796,10 +804,7 @@ static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPa return h->second; } } - auto h = hashDerivationModulo( - store, - store.readInvalidDerivation(drvPath), - false); + auto h = hashDerivationModulo(store, store.readInvalidDerivation(drvPath), false); // Cache it drvHashes.lock()->insert_or_assign(drvPath, h); return h; @@ -831,33 +836,30 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut std::map outputHashes; for (const auto & i : drv.outputs) { auto & dof = std::get(i.second.raw); - auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:" - + dof.ca.printMethodAlgo() + ":" - + dof.ca.hash.to_string(HashFormat::Base16, false) + ":" - + store.printStorePath(dof.path(store, drv.name, i.first))); + auto hash = hashString( + HashAlgorithm::SHA256, + "fixed:out:" + dof.ca.printMethodAlgo() + ":" + dof.ca.hash.to_string(HashFormat::Base16, false) + ":" + + store.printStorePath(dof.path(store, drv.name, i.first))); outputHashes.insert_or_assign(i.first, std::move(hash)); } - return DrvHash { + return DrvHash{ .hashes = outputHashes, .kind = DrvHash::Kind::Regular, }; } - auto kind = std::visit(overloaded { - [](const DerivationType::InputAddressed & ia) { - /* This might be a "pesimistically" deferred output, so we don't - "taint" the kind yet. */ - return DrvHash::Kind::Regular; - }, - [](const DerivationType::ContentAddressed & ca) { - return ca.fixed - ? DrvHash::Kind::Regular - : DrvHash::Kind::Deferred; - }, - [](const DerivationType::Impure &) -> DrvHash::Kind { - return DrvHash::Kind::Deferred; - } - }, drv.type().raw); + auto kind = std::visit( + overloaded{ + [](const DerivationType::InputAddressed & ia) { + /* This might be a "pesimistically" deferred output, so we don't + "taint" the kind yet. */ + return DrvHash::Kind::Regular; + }, + [](const DerivationType::ContentAddressed & ca) { + return ca.fixed ? DrvHash::Kind::Regular : DrvHash::Kind::Deferred; + }, + [](const DerivationType::Impure &) -> DrvHash::Kind { return DrvHash::Kind::Deferred; }}, + drv.type().raw); DerivedPathMap::ChildNode::Map inputs2; for (auto & [drvPath, node] : drv.inputDrvs.map) { @@ -879,19 +881,17 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut outputHashes.insert_or_assign(outputName, hash); } - return DrvHash { + return DrvHash{ .hashes = outputHashes, .kind = kind, }; } - std::map staticOutputHashes(Store & store, const Derivation & drv) { return hashDerivationModulo(store, drv, true).hashes; } - static DerivationOutput readDerivationOutput(Source & in, const StoreDirConfig & store) { const auto pathS = readString(in); @@ -913,11 +913,8 @@ DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const StoreDirC { DerivationOutputsAndOptPaths outsAndOptPaths; for (auto & [outputName, output] : outputs) - outsAndOptPaths.insert(std::make_pair( - outputName, - std::make_pair(output, output.path(store, name, outputName)) - ) - ); + outsAndOptPaths.insert( + std::make_pair(outputName, std::make_pair(output, output.path(store, name, outputName)))); return outsAndOptPaths; } @@ -929,7 +926,6 @@ std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) return nameWithSuffix; } - Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivation & drv, std::string_view name) { drv.name = name; @@ -942,8 +938,7 @@ Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivati drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = CommonProto::Serialise::read(store, - CommonProto::ReadConn { .from = in }); + drv.inputSrcs = CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = in}); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -957,59 +952,54 @@ Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivati return in; } - void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDerivation & drv) { out << drv.outputs.size(); for (auto & i : drv.outputs) { out << i.first; - std::visit(overloaded { - [&](const DerivationOutput::InputAddressed & doi) { - out << store.printStorePath(doi.path) - << "" - << ""; + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed & doi) { + out << store.printStorePath(doi.path) << "" + << ""; + }, + [&](const DerivationOutput::CAFixed & dof) { + out << store.printStorePath(dof.path(store, drv.name, i.first)) << dof.ca.printMethodAlgo() + << dof.ca.hash.to_string(HashFormat::Base16, false); + }, + [&](const DerivationOutput::CAFloating & dof) { + out << "" << (std::string{dof.method.renderPrefix()} + printHashAlgo(dof.hashAlgo)) << ""; + }, + [&](const DerivationOutput::Deferred &) { + out << "" + << "" + << ""; + }, + [&](const DerivationOutput::Impure & doi) { + out << "" << (std::string{doi.method.renderPrefix()} + printHashAlgo(doi.hashAlgo)) << "impure"; + }, }, - [&](const DerivationOutput::CAFixed & dof) { - out << store.printStorePath(dof.path(store, drv.name, i.first)) - << dof.ca.printMethodAlgo() - << dof.ca.hash.to_string(HashFormat::Base16, false); - }, - [&](const DerivationOutput::CAFloating & dof) { - out << "" - << (std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo)) - << ""; - }, - [&](const DerivationOutput::Deferred &) { - out << "" - << "" - << ""; - }, - [&](const DerivationOutput::Impure & doi) { - out << "" - << (std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo)) - << "impure"; - }, - }, i.second.raw); + i.second.raw); } - CommonProto::write(store, - CommonProto::WriteConn { .to = out }, - drv.inputSrcs); + CommonProto::write(store, CommonProto::WriteConn{.to = out}, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) out << i.first << i.second; } - std::string hashPlaceholder(const OutputNameView outputName) { // FIXME: memoize? - return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false); + return "/" + + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)) + .to_string(HashFormat::Nix32, false); } void BasicDerivation::applyRewrites(const StringMap & rewrites) { - if (rewrites.empty()) return; + if (rewrites.empty()) + return; debug("rewriting the derivation"); @@ -1038,23 +1028,21 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String if (std::holds_alternative(output.raw)) { auto h = get(hashModulo.hashes, outputName); if (!h) - throw Error("derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)", - drv.name, outputName); + throw Error( + "derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)", drv.name, outputName); auto outPath = store.makeOutputPath(outputName, *h, drv.name); drv.env[outputName] = store.printStorePath(outPath); - output = DerivationOutput::InputAddressed { + output = DerivationOutput::InputAddressed{ .path = std::move(outPath), }; } } - } std::optional Derivation::tryResolve(Store & store, Store * evalStore) const { return tryResolve( - store, - [&](ref drvPath, const std::string & outputName) -> std::optional { + store, [&](ref drvPath, const std::string & outputName) -> std::optional { try { return resolveDerivedPath(store, SingleDerivedPath::Built{drvPath, outputName}, evalStore); } catch (Error &) { @@ -1064,41 +1052,45 @@ std::optional Derivation::tryResolve(Store & store, Store * eva } static bool tryResolveInput( - Store & store, StorePathSet & inputSrcs, StringMap & inputRewrites, + Store & store, + StorePathSet & inputSrcs, + StringMap & inputRewrites, const DownstreamPlaceholder * placeholderOpt, - ref drvPath, const DerivedPathMap::ChildNode & inputNode, - std::function(ref drvPath, const std::string & outputName)> queryResolutionChain) + ref drvPath, + const DerivedPathMap::ChildNode & inputNode, + std::function(ref drvPath, const std::string & outputName)> + queryResolutionChain) { auto getPlaceholder = [&](const std::string & outputName) { - return placeholderOpt - ? DownstreamPlaceholder::unknownDerivation(*placeholderOpt, outputName) - : [&]{ - auto * p = std::get_if(&drvPath->raw()); - // otherwise we should have had a placeholder to build-upon already - assert(p); - return DownstreamPlaceholder::unknownCaOutput(p->path, outputName); - }(); + return placeholderOpt ? DownstreamPlaceholder::unknownDerivation(*placeholderOpt, outputName) : [&] { + auto * p = std::get_if(&drvPath->raw()); + // otherwise we should have had a placeholder to build-upon already + assert(p); + return DownstreamPlaceholder::unknownCaOutput(p->path, outputName); + }(); }; for (auto & outputName : inputNode.value) { auto actualPathOpt = queryResolutionChain(drvPath, outputName); - if (!actualPathOpt) return false; + if (!actualPathOpt) + return false; auto actualPath = *actualPathOpt; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - inputRewrites.emplace( - getPlaceholder(outputName).render(), - store.printStorePath(actualPath)); + inputRewrites.emplace(getPlaceholder(outputName).render(), store.printStorePath(actualPath)); } inputSrcs.insert(std::move(actualPath)); } for (auto & [outputName, childNode] : inputNode.childMap) { auto nextPlaceholder = getPlaceholder(outputName); - if (!tryResolveInput(store, inputSrcs, inputRewrites, - &nextPlaceholder, - make_ref(SingleDerivedPath::Built{drvPath, outputName}), - childNode, - queryResolutionChain)) + if (!tryResolveInput( + store, + inputSrcs, + inputRewrites, + &nextPlaceholder, + make_ref(SingleDerivedPath::Built{drvPath, outputName}), + childNode, + queryResolutionChain)) return false; } return true; @@ -1106,16 +1098,23 @@ static bool tryResolveInput( std::optional Derivation::tryResolve( Store & store, - std::function(ref drvPath, const std::string & outputName)> queryResolutionChain) const + std::function(ref drvPath, const std::string & outputName)> + queryResolutionChain) const { - BasicDerivation resolved { *this }; + BasicDerivation resolved{*this}; // Input paths that we'll want to rewrite in the derivation StringMap inputRewrites; for (auto & [inputDrv, inputNode] : inputDrvs.map) - if (!tryResolveInput(store, resolved.inputSrcs, inputRewrites, - nullptr, make_ref(SingleDerivedPath::Opaque{inputDrv}), inputNode, queryResolutionChain)) + if (!tryResolveInput( + store, + resolved.inputSrcs, + inputRewrites, + nullptr, + make_ref(SingleDerivedPath::Opaque{inputDrv}), + inputNode, + queryResolutionChain)) return std::nullopt; rewriteDerivation(store, resolved, inputRewrites); @@ -1123,7 +1122,6 @@ std::optional Derivation::tryResolve( return resolved; } - void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const { assert(drvPath.isDerivation()); @@ -1134,15 +1132,16 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const throw Error("Derivation '%s' has name '%s' which does not match its path", store.printStorePath(drvPath), name); } - auto envHasRightPath = [&](const StorePath & actual, const std::string & varName) - { + auto envHasRightPath = [&](const StorePath & actual, const std::string & varName) { auto j = env.find(varName); if (j == env.end() || store.parseStorePath(j->second) != actual) - throw Error("derivation '%s' has incorrect environment variable '%s', should be '%s'", - store.printStorePath(drvPath), varName, store.printStorePath(actual)); + throw Error( + "derivation '%s' has incorrect environment variable '%s', should be '%s'", + store.printStorePath(drvPath), + varName, + store.printStorePath(actual)); }; - // Don't need the answer, but do this anyways to assert is proper // combination. The code below is more general and naturally allows // combinations that are currently prohibited. @@ -1150,74 +1149,82 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const std::optional hashesModulo; for (auto & i : outputs) { - std::visit(overloaded { - [&](const DerivationOutput::InputAddressed & doia) { - if (!hashesModulo) { - // somewhat expensive so we do lazily - hashesModulo = hashDerivationModulo(store, *this, true); - } - auto currentOutputHash = get(hashesModulo->hashes, i.first); - if (!currentOutputHash) - throw Error("derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'", - store.printStorePath(drvPath), store.printStorePath(doia.path), i.first); - StorePath recomputed = store.makeOutputPath(i.first, *currentOutputHash, drvName); - if (doia.path != recomputed) - throw Error("derivation '%s' has incorrect output '%s', should be '%s'", - store.printStorePath(drvPath), store.printStorePath(doia.path), store.printStorePath(recomputed)); - envHasRightPath(doia.path, i.first); + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed & doia) { + if (!hashesModulo) { + // somewhat expensive so we do lazily + hashesModulo = hashDerivationModulo(store, *this, true); + } + auto currentOutputHash = get(hashesModulo->hashes, i.first); + if (!currentOutputHash) + throw Error( + "derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'", + store.printStorePath(drvPath), + store.printStorePath(doia.path), + i.first); + StorePath recomputed = store.makeOutputPath(i.first, *currentOutputHash, drvName); + if (doia.path != recomputed) + throw Error( + "derivation '%s' has incorrect output '%s', should be '%s'", + store.printStorePath(drvPath), + store.printStorePath(doia.path), + store.printStorePath(recomputed)); + envHasRightPath(doia.path, i.first); + }, + [&](const DerivationOutput::CAFixed & dof) { + auto path = dof.path(store, drvName, i.first); + envHasRightPath(path, i.first); + }, + [&](const DerivationOutput::CAFloating &) { + /* Nothing to check */ + }, + [&](const DerivationOutput::Deferred &) { + /* Nothing to check */ + }, + [&](const DerivationOutput::Impure &) { + /* Nothing to check */ + }, }, - [&](const DerivationOutput::CAFixed & dof) { - auto path = dof.path(store, drvName, i.first); - envHasRightPath(path, i.first); - }, - [&](const DerivationOutput::CAFloating &) { - /* Nothing to check */ - }, - [&](const DerivationOutput::Deferred &) { - /* Nothing to check */ - }, - [&](const DerivationOutput::Impure &) { - /* Nothing to check */ - }, - }, i.second.raw); + i.second.raw); } } - const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure"); -nlohmann::json DerivationOutput::toJSON( - const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const +nlohmann::json +DerivationOutput::toJSON(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { nlohmann::json res = nlohmann::json::object(); - std::visit(overloaded { - [&](const DerivationOutput::InputAddressed & doi) { - res["path"] = store.printStorePath(doi.path); + std::visit( + overloaded{ + [&](const DerivationOutput::InputAddressed & doi) { res["path"] = store.printStorePath(doi.path); }, + [&](const DerivationOutput::CAFixed & dof) { + res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); + res["method"] = std::string{dof.ca.method.render()}; + res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo); + res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); + // FIXME print refs? + }, + [&](const DerivationOutput::CAFloating & dof) { + res["method"] = std::string{dof.method.render()}; + res["hashAlgo"] = printHashAlgo(dof.hashAlgo); + }, + [&](const DerivationOutput::Deferred &) {}, + [&](const DerivationOutput::Impure & doi) { + res["method"] = std::string{doi.method.render()}; + res["hashAlgo"] = printHashAlgo(doi.hashAlgo); + res["impure"] = true; + }, }, - [&](const DerivationOutput::CAFixed & dof) { - res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); - res["method"] = std::string { dof.ca.method.render() }; - res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo); - res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); - // FIXME print refs? - }, - [&](const DerivationOutput::CAFloating & dof) { - res["method"] = std::string { dof.method.render() }; - res["hashAlgo"] = printHashAlgo(dof.hashAlgo); - }, - [&](const DerivationOutput::Deferred &) {}, - [&](const DerivationOutput::Impure & doi) { - res["method"] = std::string { doi.method.render() }; - res["hashAlgo"] = printHashAlgo(doi.hashAlgo); - res["impure"] = true; - }, - }, raw); + raw); return res; } - DerivationOutput DerivationOutput::fromJSON( - const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName, + const StoreDirConfig & store, + std::string_view drvName, + OutputNameView outputName, const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { @@ -1228,52 +1235,51 @@ DerivationOutput DerivationOutput::fromJSON( keys.insert(key); auto methodAlgo = [&]() -> std::pair { - ContentAddressMethod method = ContentAddressMethod::parse( - getString(valueAt(json, "method"))); + ContentAddressMethod method = ContentAddressMethod::parse(getString(valueAt(json, "method"))); if (method == ContentAddressMethod::Raw::Text) xpSettings.require(Xp::DynamicDerivations); - auto hashAlgo = parseHashAlgo( - getString(valueAt(json, "hashAlgo"))); - return { std::move(method), std::move(hashAlgo) }; + auto hashAlgo = parseHashAlgo(getString(valueAt(json, "hashAlgo"))); + return {std::move(method), std::move(hashAlgo)}; }; - if (keys == (std::set { "path" })) { - return DerivationOutput::InputAddressed { + if (keys == (std::set{"path"})) { + return DerivationOutput::InputAddressed{ .path = store.parseStorePath(getString(valueAt(json, "path"))), }; } - else if (keys == (std::set { "path", "method", "hashAlgo", "hash" })) { + else if (keys == (std::set{"path", "method", "hashAlgo", "hash"})) { auto [method, hashAlgo] = methodAlgo(); - auto dof = DerivationOutput::CAFixed { - .ca = ContentAddress { - .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo), - }, + auto dof = DerivationOutput::CAFixed{ + .ca = + ContentAddress{ + .method = std::move(method), + .hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo), + }, }; if (dof.path(store, drvName, outputName) != store.parseStorePath(getString(valueAt(json, "path")))) throw Error("Path doesn't match derivation output"); return dof; } - else if (keys == (std::set { "method", "hashAlgo" })) { + else if (keys == (std::set{"method", "hashAlgo"})) { xpSettings.require(Xp::CaDerivations); auto [method, hashAlgo] = methodAlgo(); - return DerivationOutput::CAFloating { + return DerivationOutput::CAFloating{ .method = std::move(method), .hashAlgo = std::move(hashAlgo), }; } - else if (keys == (std::set { })) { - return DerivationOutput::Deferred {}; + else if (keys == (std::set{})) { + return DerivationOutput::Deferred{}; } - else if (keys == (std::set { "method", "hashAlgo", "impure" })) { + else if (keys == (std::set{"method", "hashAlgo", "impure"})) { xpSettings.require(Xp::ImpureDerivations); auto [method, hashAlgo] = methodAlgo(); - return DerivationOutput::Impure { + return DerivationOutput::Impure{ .method = std::move(method), .hashAlgo = hashAlgo, }; @@ -1284,7 +1290,6 @@ DerivationOutput DerivationOutput::fromJSON( } } - nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const { nlohmann::json res = nlohmann::json::object(); @@ -1300,7 +1305,7 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const } { - auto& inputsList = res["inputSrcs"]; + auto & inputsList = res["inputSrcs"]; inputsList = nlohmann::json ::array(); for (auto & input : inputSrcs) inputsList.emplace_back(store.printStorePath(input)); @@ -1320,7 +1325,7 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const return value; }; { - auto& inputDrvsObj = res["inputDrvs"]; + auto & inputDrvsObj = res["inputDrvs"]; inputDrvsObj = nlohmann::json::object(); for (auto & [inputDrv, inputNode] : inputDrvs.map) { inputDrvsObj[store.printStorePath(inputDrv)] = doInput(inputNode); @@ -1341,11 +1346,8 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const return res; } - Derivation Derivation::fromJSON( - const StoreDirConfig & store, - const nlohmann::json & _json, - const ExperimentalFeatureSettings & xpSettings) + const StoreDirConfig & store, const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { using nlohmann::detail::value_t; @@ -1359,8 +1361,7 @@ Derivation Derivation::fromJSON( auto outputs = getObject(valueAt(json, "outputs")); for (auto & [outputName, output] : outputs) { res.outputs.insert_or_assign( - outputName, - DerivationOutput::fromJSON(store, res.name, outputName, output, xpSettings)); + outputName, DerivationOutput::fromJSON(store, res.name, outputName, output, xpSettings)); } } catch (Error & e) { e.addTrace({}, "while reading key 'outputs'"); @@ -1391,8 +1392,7 @@ Derivation Derivation::fromJSON( }; auto drvs = getObject(valueAt(json, "inputDrvs")); for (auto & [inputDrvPath, inputOutputs] : drvs) - res.inputDrvs.map[store.parseStorePath(inputDrvPath)] = - doInput(inputOutputs); + res.inputDrvs.map[store.parseStorePath(inputDrvPath)] = doInput(inputOutputs); } catch (Error & e) { e.addTrace({}, "while reading key 'inputDrvs'"); throw; @@ -1416,4 +1416,4 @@ Derivation Derivation::fromJSON( return res; } -} +} // namespace nix diff --git a/src/libstore/derived-path-map.cc b/src/libstore/derived-path-map.cc index e34deb744..bcbdc85bd 100644 --- a/src/libstore/derived-path-map.cc +++ b/src/libstore/derived-path-map.cc @@ -6,18 +6,20 @@ namespace nix { template typename DerivedPathMap::ChildNode & DerivedPathMap::ensureSlot(const SingleDerivedPath & k) { - std::function initIter; + std::function initIter; initIter = [&](const auto & k) -> auto & { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & bo) -> auto & { - // will not overwrite if already there - return map[bo.path]; + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & bo) -> auto & { + // will not overwrite if already there + return map[bo.path]; + }, + [&](const SingleDerivedPath::Built & bfd) -> auto & { + auto & n = initIter(*bfd.drvPath); + return n.childMap[bfd.output]; + }, }, - [&](const SingleDerivedPath::Built & bfd) -> auto & { - auto & n = initIter(*bfd.drvPath); - return n.childMap[bfd.output]; - }, - }, k.raw()); + k.raw()); }; return initIter(k); } @@ -25,39 +27,39 @@ typename DerivedPathMap::ChildNode & DerivedPathMap::ensureSlot(const Sing template typename DerivedPathMap::ChildNode * DerivedPathMap::findSlot(const SingleDerivedPath & k) { - std::function initIter; + std::function initIter; initIter = [&](const auto & k) { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & bo) { - auto it = map.find(bo.path); - return it != map.end() - ? &it->second - : nullptr; - }, - [&](const SingleDerivedPath::Built & bfd) { - auto * n = initIter(*bfd.drvPath); - if (!n) return (ChildNode *)nullptr; + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & bo) { + auto it = map.find(bo.path); + return it != map.end() ? &it->second : nullptr; + }, + [&](const SingleDerivedPath::Built & bfd) { + auto * n = initIter(*bfd.drvPath); + if (!n) + return (ChildNode *) nullptr; - auto it = n->childMap.find(bfd.output); - return it != n->childMap.end() - ? &it->second - : nullptr; + auto it = n->childMap.find(bfd.output); + return it != n->childMap.end() ? &it->second : nullptr; + }, }, - }, k.raw()); + k.raw()); }; return initIter(k); } -} +} // namespace nix // instantiations #include "nix/store/build/derivation-trampoline-goal.hh" + namespace nix { template<> -bool DerivedPathMap::ChildNode::operator == ( - const DerivedPathMap::ChildNode &) const noexcept = default; +bool DerivedPathMap::ChildNode::operator==(const DerivedPathMap::ChildNode &) const noexcept = + default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. #if 0 @@ -71,5 +73,4 @@ template struct DerivedPathMap; template struct DerivedPathMap>>; - -}; +}; // namespace nix diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 6186f0582..1fee1ae75 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -10,38 +10,22 @@ namespace nix { // Custom implementation to avoid `ref` ptr equality -GENERATE_CMP_EXT( - , - std::strong_ordering, - SingleDerivedPathBuilt, - *me->drvPath, - me->output); +GENERATE_CMP_EXT(, std::strong_ordering, SingleDerivedPathBuilt, *me->drvPath, me->output); // Custom implementation to avoid `ref` ptr equality // TODO no `GENERATE_CMP_EXT` because no `std::set::operator<=>` on // Darwin, per header. -GENERATE_EQUAL( - , - DerivedPathBuilt ::, - DerivedPathBuilt, - *me->drvPath, - me->outputs); -GENERATE_ONE_CMP( - , - bool, - DerivedPathBuilt ::, - <, - DerivedPathBuilt, - *me->drvPath, - me->outputs); +GENERATE_EQUAL(, DerivedPathBuilt ::, DerivedPathBuilt, *me->drvPath, me->outputs); +GENERATE_ONE_CMP(, bool, DerivedPathBuilt ::, <, DerivedPathBuilt, *me->drvPath, me->outputs); nlohmann::json DerivedPath::Opaque::toJSON(const StoreDirConfig & store) const { return store.printStorePath(path); } -nlohmann::json SingleDerivedPath::Built::toJSON(Store & store) const { +nlohmann::json SingleDerivedPath::Built::toJSON(Store & store) const +{ nlohmann::json res; res["drvPath"] = drvPath->toJSON(store); // Fallback for the input-addressed derivation case: We expect to always be @@ -59,7 +43,8 @@ nlohmann::json SingleDerivedPath::Built::toJSON(Store & store) const { return res; } -nlohmann::json DerivedPath::Built::toJSON(Store & store) const { +nlohmann::json DerivedPath::Built::toJSON(Store & store) const +{ nlohmann::json res; res["drvPath"] = drvPath->toJSON(store); // Fallback for the input-addressed derivation case: We expect to always be @@ -67,7 +52,8 @@ nlohmann::json DerivedPath::Built::toJSON(Store & store) const { // FIXME try-resolve on drvPath const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *drvPath)); for (const auto & [output, outputPathOpt] : outputMap) { - if (!outputs.contains(output)) continue; + if (!outputs.contains(output)) + continue; if (outputPathOpt) res["outputs"][output] = store.printStorePath(*outputPathOpt); else @@ -78,16 +64,12 @@ nlohmann::json DerivedPath::Built::toJSON(Store & store) const { nlohmann::json SingleDerivedPath::toJSON(Store & store) const { - return std::visit([&](const auto & buildable) { - return buildable.toJSON(store); - }, raw()); + return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); } nlohmann::json DerivedPath::toJSON(Store & store) const { - return std::visit([&](const auto & buildable) { - return buildable.toJSON(store); - }, raw()); + return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); } std::string DerivedPath::Opaque::to_string(const StoreDirConfig & store) const @@ -107,82 +89,77 @@ std::string SingleDerivedPath::Built::to_string_legacy(const StoreDirConfig & st std::string DerivedPath::Built::to_string(const StoreDirConfig & store) const { - return drvPath->to_string(store) - + '^' - + outputs.to_string(); + return drvPath->to_string(store) + '^' + outputs.to_string(); } std::string DerivedPath::Built::to_string_legacy(const StoreDirConfig & store) const { - return drvPath->to_string_legacy(store) - + "!" - + outputs.to_string(); + return drvPath->to_string_legacy(store) + "!" + outputs.to_string(); } std::string SingleDerivedPath::to_string(const StoreDirConfig & store) const { - return std::visit( - [&](const auto & req) { return req.to_string(store); }, - raw()); + return std::visit([&](const auto & req) { return req.to_string(store); }, raw()); } std::string DerivedPath::to_string(const StoreDirConfig & store) const { - return std::visit( - [&](const auto & req) { return req.to_string(store); }, - raw()); + return std::visit([&](const auto & req) { return req.to_string(store); }, raw()); } std::string SingleDerivedPath::to_string_legacy(const StoreDirConfig & store) const { - return std::visit(overloaded { - [&](const SingleDerivedPath::Built & req) { return req.to_string_legacy(store); }, - [&](const SingleDerivedPath::Opaque & req) { return req.to_string(store); }, - }, this->raw()); + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Built & req) { return req.to_string_legacy(store); }, + [&](const SingleDerivedPath::Opaque & req) { return req.to_string(store); }, + }, + this->raw()); } std::string DerivedPath::to_string_legacy(const StoreDirConfig & store) const { - return std::visit(overloaded { - [&](const DerivedPath::Built & req) { return req.to_string_legacy(store); }, - [&](const DerivedPath::Opaque & req) { return req.to_string(store); }, - }, this->raw()); + return std::visit( + overloaded{ + [&](const DerivedPath::Built & req) { return req.to_string_legacy(store); }, + [&](const DerivedPath::Opaque & req) { return req.to_string(store); }, + }, + this->raw()); } - DerivedPath::Opaque DerivedPath::Opaque::parse(const StoreDirConfig & store, std::string_view s) { return {store.parseStorePath(s)}; } -void drvRequireExperiment( - const SingleDerivedPath & drv, - const ExperimentalFeatureSettings & xpSettings) +void drvRequireExperiment(const SingleDerivedPath & drv, const ExperimentalFeatureSettings & xpSettings) { - std::visit(overloaded { - [&](const SingleDerivedPath::Opaque &) { - // plain drv path; no experimental features required. + std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque &) { + // plain drv path; no experimental features required. + }, + [&](const SingleDerivedPath::Built &) { xpSettings.require(Xp::DynamicDerivations); }, }, - [&](const SingleDerivedPath::Built &) { - xpSettings.require(Xp::DynamicDerivations); - }, - }, drv.raw()); + drv.raw()); } SingleDerivedPath::Built SingleDerivedPath::Built::parse( - const StoreDirConfig & store, ref drv, + const StoreDirConfig & store, + ref drv, OutputNameView output, const ExperimentalFeatureSettings & xpSettings) { drvRequireExperiment(*drv, xpSettings); return { .drvPath = drv, - .output = std::string { output }, + .output = std::string{output}, }; } DerivedPath::Built DerivedPath::Built::parse( - const StoreDirConfig & store, ref drv, + const StoreDirConfig & store, + ref drv, OutputNameView outputsS, const ExperimentalFeatureSettings & xpSettings) { @@ -194,117 +171,105 @@ DerivedPath::Built DerivedPath::Built::parse( } static SingleDerivedPath parseWithSingle( - const StoreDirConfig & store, std::string_view s, std::string_view separator, + const StoreDirConfig & store, + std::string_view s, + std::string_view separator, const ExperimentalFeatureSettings & xpSettings) { size_t n = s.rfind(separator); return n == s.npos - ? (SingleDerivedPath) SingleDerivedPath::Opaque::parse(store, s) - : (SingleDerivedPath) SingleDerivedPath::Built::parse(store, - make_ref(parseWithSingle( - store, - s.substr(0, n), - separator, - xpSettings)), - s.substr(n + 1), - xpSettings); + ? (SingleDerivedPath) SingleDerivedPath::Opaque::parse(store, s) + : (SingleDerivedPath) SingleDerivedPath::Built::parse( + store, + make_ref(parseWithSingle(store, s.substr(0, n), separator, xpSettings)), + s.substr(n + 1), + xpSettings); } SingleDerivedPath SingleDerivedPath::parse( - const StoreDirConfig & store, - std::string_view s, - const ExperimentalFeatureSettings & xpSettings) + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { return parseWithSingle(store, s, "^", xpSettings); } SingleDerivedPath SingleDerivedPath::parseLegacy( - const StoreDirConfig & store, - std::string_view s, - const ExperimentalFeatureSettings & xpSettings) + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { return parseWithSingle(store, s, "!", xpSettings); } static DerivedPath parseWith( - const StoreDirConfig & store, std::string_view s, std::string_view separator, + const StoreDirConfig & store, + std::string_view s, + std::string_view separator, const ExperimentalFeatureSettings & xpSettings) { size_t n = s.rfind(separator); return n == s.npos - ? (DerivedPath) DerivedPath::Opaque::parse(store, s) - : (DerivedPath) DerivedPath::Built::parse(store, - make_ref(parseWithSingle( - store, - s.substr(0, n), - separator, - xpSettings)), - s.substr(n + 1), - xpSettings); + ? (DerivedPath) DerivedPath::Opaque::parse(store, s) + : (DerivedPath) DerivedPath::Built::parse( + store, + make_ref(parseWithSingle(store, s.substr(0, n), separator, xpSettings)), + s.substr(n + 1), + xpSettings); } -DerivedPath DerivedPath::parse( - const StoreDirConfig & store, - std::string_view s, - const ExperimentalFeatureSettings & xpSettings) +DerivedPath +DerivedPath::parse(const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { return parseWith(store, s, "^", xpSettings); } DerivedPath DerivedPath::parseLegacy( - const StoreDirConfig & store, - std::string_view s, - const ExperimentalFeatureSettings & xpSettings) + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { return parseWith(store, s, "!", xpSettings); } DerivedPath DerivedPath::fromSingle(const SingleDerivedPath & req) { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) -> DerivedPath { - return o; + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) -> DerivedPath { return o; }, + [&](const SingleDerivedPath::Built & b) -> DerivedPath { + return DerivedPath::Built{ + .drvPath = b.drvPath, + .outputs = OutputsSpec::Names{b.output}, + }; + }, }, - [&](const SingleDerivedPath::Built & b) -> DerivedPath { - return DerivedPath::Built { - .drvPath = b.drvPath, - .outputs = OutputsSpec::Names { b.output }, - }; - }, - }, req.raw()); + req.raw()); } const StorePath & SingleDerivedPath::Built::getBaseStorePath() const { - return drvPath->getBaseStorePath(); + return drvPath->getBaseStorePath(); } const StorePath & DerivedPath::Built::getBaseStorePath() const { - return drvPath->getBaseStorePath(); + return drvPath->getBaseStorePath(); } template static inline const StorePath & getBaseStorePath_(const DP & derivedPath) { - return std::visit(overloaded { - [&](const typename DP::Built & bfd) -> auto & { - return bfd.drvPath->getBaseStorePath(); + return std::visit( + overloaded{ + [&](const typename DP::Built & bfd) -> auto & { return bfd.drvPath->getBaseStorePath(); }, + [&](const typename DP::Opaque & bo) -> auto & { return bo.path; }, }, - [&](const typename DP::Opaque & bo) -> auto & { - return bo.path; - }, - }, derivedPath.raw()); + derivedPath.raw()); } const StorePath & SingleDerivedPath::getBaseStorePath() const { - return getBaseStorePath_(*this); + return getBaseStorePath_(*this); } const StorePath & DerivedPath::getBaseStorePath() const { - return getBaseStorePath_(*this); + return getBaseStorePath_(*this); } -} +} // namespace nix diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index 24ce2ad99..b3ac1c8c4 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -8,19 +8,15 @@ std::string DownstreamPlaceholder::render() const return "/" + hash.to_string(HashFormat::Nix32, false); } - DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( - const StorePath & drvPath, - OutputNameView outputName, - const ExperimentalFeatureSettings & xpSettings) + const StorePath & drvPath, OutputNameView outputName, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::CaDerivations); auto drvNameWithExtension = drvPath.name(); auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); - auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); - return DownstreamPlaceholder { - hashString(HashAlgorithm::SHA256, clearText) - }; + auto clearText = + "nix-upstream-output:" + std::string{drvPath.hashPart()} + ":" + outputPathName(drvName, outputName); + return DownstreamPlaceholder{hashString(HashAlgorithm::SHA256, clearText)}; } DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( @@ -30,29 +26,25 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( { xpSettings.require(Xp::DynamicDerivations); auto compressed = compressHash(placeholder.hash, 20); - auto clearText = "nix-computed-output:" - + compressed.to_string(HashFormat::Nix32, false) - + ":" + std::string { outputName }; - return DownstreamPlaceholder { - hashString(HashAlgorithm::SHA256, clearText) - }; + auto clearText = + "nix-computed-output:" + compressed.to_string(HashFormat::Nix32, false) + ":" + std::string{outputName}; + return DownstreamPlaceholder{hashString(HashAlgorithm::SHA256, clearText)}; } DownstreamPlaceholder DownstreamPlaceholder::fromSingleDerivedPathBuilt( - const SingleDerivedPath::Built & b, - const ExperimentalFeatureSettings & xpSettings) + const SingleDerivedPath::Built & b, const ExperimentalFeatureSettings & xpSettings) { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & o) { - return DownstreamPlaceholder::unknownCaOutput(o.path, b.output, xpSettings); + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & o) { + return DownstreamPlaceholder::unknownCaOutput(o.path, b.output, xpSettings); + }, + [&](const SingleDerivedPath::Built & b2) { + return DownstreamPlaceholder::unknownDerivation( + DownstreamPlaceholder::fromSingleDerivedPathBuilt(b2, xpSettings), b.output, xpSettings); + }, }, - [&](const SingleDerivedPath::Built & b2) { - return DownstreamPlaceholder::unknownDerivation( - DownstreamPlaceholder::fromSingleDerivedPathBuilt(b2, xpSettings), - b.output, - xpSettings); - }, - }, b.drvPath->raw()); + b.drvPath->raw()); } -} +} // namespace nix diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 819c47bab..74119a529 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -3,7 +3,8 @@ namespace nix { -struct DummyStoreConfig : public std::enable_shared_from_this, virtual StoreConfig { +struct DummyStoreConfig : public std::enable_shared_from_this, virtual StoreConfig +{ using StoreConfig::StoreConfig; DummyStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) @@ -13,16 +14,20 @@ struct DummyStoreConfig : public std::enable_shared_from_this, throw UsageError("`%s` store URIs must not contain an authority part %s", scheme, authority); } - static const std::string name() { return "Dummy Store"; } + static const std::string name() + { + return "Dummy Store"; + } static std::string doc() { return - #include "dummy-store.md" - ; +#include "dummy-store.md" + ; } - static StringSet uriSchemes() { + static StringSet uriSchemes() + { return {"dummy"}; } @@ -38,15 +43,16 @@ struct DummyStore : virtual Store DummyStore(ref config) : Store{*config} , config(config) - { } + { + } std::string getUri() override { return *Config::uriSchemes().begin(); } - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override { callback(nullptr); } @@ -60,11 +66,14 @@ struct DummyStore : virtual Store } std::optional queryPathFromHashPart(const std::string & hashPart) override - { unsupported("queryPathFromHashPart"); } + { + unsupported("queryPathFromHashPart"); + } - void addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) override - { unsupported("addToStore"); } + void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override + { + unsupported("addToStore"); + } virtual StorePath addToStoreFromDump( Source & dump, @@ -74,14 +83,20 @@ struct DummyStore : virtual Store HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override - { unsupported("addToStore"); } + { + unsupported("addToStore"); + } void narFromPath(const StorePath & path, Sink & sink) override - { unsupported("narFromPath"); } + { + unsupported("narFromPath"); + } - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override - { callback(nullptr); } + void + queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept override + { + callback(nullptr); + } virtual ref getFSAccessor(bool requireValidPath) override { @@ -96,4 +111,4 @@ ref DummyStore::Config::openStore() const static RegisterStoreImplementation regDummyStore; -} +} // namespace nix diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 5bbdd1e5c..a199d9680 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -35,18 +35,15 @@ void Store::exportPath(const StorePath & path, Sink & sink) Don't complain if the stored hash is zero (unknown). */ Hash hash = hashSink.currentHash().first; if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) - throw Error("hash of path '%s' has changed from '%s' to '%s'!", - printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)); + throw Error( + "hash of path '%s' has changed from '%s' to '%s'!", + printStorePath(path), + info->narHash.to_string(HashFormat::Nix32, true), + hash.to_string(HashFormat::Nix32, true)); - teeSink - << exportMagic - << printStorePath(path); - CommonProto::write(*this, - CommonProto::WriteConn { .to = teeSink }, - info->references); - teeSink - << (info->deriver ? printStorePath(*info->deriver) : "") - << 0; + teeSink << exportMagic << printStorePath(path); + CommonProto::write(*this, CommonProto::WriteConn{.to = teeSink}, info->references); + teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; } StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) @@ -54,12 +51,14 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) StorePaths res; while (true) { auto n = readNum(source); - if (n == 0) break; - if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'"); + if (n == 0) + break; + if (n != 1) + throw Error("input doesn't look like something created by 'nix-store --export'"); /* Extract the NAR from the source. */ StringSink saved; - TeeSource tee { source, saved }; + TeeSource tee{source, saved}; NullFileSystemObjectSink ether; parseDump(ether, tee); @@ -69,14 +68,13 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) auto path = parseStorePath(readString(source)); - //Activity act(*logger, lvlInfo, "importing path '%s'", info.path); + // Activity act(*logger, lvlInfo, "importing path '%s'", info.path); - auto references = CommonProto::Serialise::read(*this, - CommonProto::ReadConn { .from = source }); + auto references = CommonProto::Serialise::read(*this, CommonProto::ReadConn{.from = source}); auto deriver = readString(source); auto narHash = hashString(HashAlgorithm::SHA256, saved.s); - ValidPathInfo info { path, narHash }; + ValidPathInfo info{path, narHash}; if (deriver != "") info.deriver = parseStorePath(deriver); info.references = references; @@ -96,4 +94,4 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) return res; } -} +} // namespace nix diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 7e29d00e6..c29da12e8 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -10,11 +10,11 @@ #include "store-config-private.hh" #if NIX_WITH_S3_SUPPORT -#include +# include #endif #ifdef __linux__ -# include "nix/util/linux-namespaces.hh" +# include "nix/util/linux-namespaces.hh" #endif #include @@ -77,7 +77,7 @@ struct curlFileTransfer : public FileTransfer std::chrono::steady_clock::time_point startTime = std::chrono::steady_clock::now(); - inline static const std::set successfulStatuses {200, 201, 204, 206, 304, 0 /* other protocol */}; + inline static const std::set successfulStatuses{200, 201, 204, 206, 304, 0 /* other protocol */}; /* Get the HTTP status code, or 0 for other protocols. */ long getHTTPStatus() @@ -90,14 +90,18 @@ struct curlFileTransfer : public FileTransfer return httpStatus; } - TransferItem(curlFileTransfer & fileTransfer, + TransferItem( + curlFileTransfer & fileTransfer, const FileTransferRequest & request, Callback && callback) : fileTransfer(fileTransfer) , request(request) - , act(*logger, lvlTalkative, actFileTransfer, - fmt("%sing '%s'", request.verb(), request.uri), - {request.uri}, request.parentAct) + , act(*logger, + lvlTalkative, + actFileTransfer, + fmt("%sing '%s'", request.verb(), request.uri), + {request.uri}, + request.parentAct) , callback(std::move(callback)) , finalSink([this](std::string_view data) { if (errorSink) { @@ -115,7 +119,7 @@ struct curlFileTransfer : public FileTransfer } } else this->result.data.append(data); - }) + }) { result.urls.push_back(request.uri); @@ -124,7 +128,7 @@ struct curlFileTransfer : public FileTransfer requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); if (!request.mimeType.empty()) requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); - for (auto it = request.headers.begin(); it != request.headers.end(); ++it){ + for (auto it = request.headers.begin(); it != request.headers.end(); ++it) { requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str()); } } @@ -136,7 +140,8 @@ struct curlFileTransfer : public FileTransfer curl_multi_remove_handle(fileTransfer.curlm, req); curl_easy_cleanup(req); } - if (requestHeaders) curl_slist_free_all(requestHeaders); + if (requestHeaders) + curl_slist_free_all(requestHeaders); try { if (!done) fail(FileTransferError(Interrupted, {}, "download of '%s' was interrupted", request.uri)); @@ -172,12 +177,12 @@ struct curlFileTransfer : public FileTransfer if (!decompressionSink) { decompressionSink = makeDecompressionSink(encoding, finalSink); - if (! successfulStatuses.count(getHTTPStatus())) { + if (!successfulStatuses.count(getHTTPStatus())) { // In this case we want to construct a TeeSink, to keep // the response around (which we figure won't be big // like an actual download should be) to improve error // messages. - errorSink = StringSink { }; + errorSink = StringSink{}; } } @@ -247,7 +252,8 @@ struct curlFileTransfer : public FileTransfer else if (name == "link" || name == "x-amz-meta-link") { auto value = trim(line.substr(i + 1)); - static std::regex linkRegex("<([^>]*)>; rel=\"immutable\"", std::regex::extended | std::regex::icase); + static std::regex linkRegex( + "<([^>]*)>; rel=\"immutable\"", std::regex::extended | std::regex::icase); if (std::smatch match; std::regex_match(value, match, linkRegex)) result.immutableUrl = match.str(1); else @@ -273,7 +279,8 @@ struct curlFileTransfer : public FileTransfer return getInterrupted(); } - static int progressCallbackWrapper(void * userp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow) + static int progressCallbackWrapper( + void * userp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow) { auto & item = *static_cast(userp); auto isUpload = bool(item.request.data); @@ -288,7 +295,8 @@ struct curlFileTransfer : public FileTransfer } size_t readOffset = 0; - size_t readCallback(char *buffer, size_t size, size_t nitems) + + size_t readCallback(char * buffer, size_t size, size_t nitems) { if (readOffset == request.data->length()) return 0; @@ -299,18 +307,19 @@ struct curlFileTransfer : public FileTransfer return count; } - static size_t readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp) + static size_t readCallbackWrapper(char * buffer, size_t size, size_t nitems, void * userp) { return ((TransferItem *) userp)->readCallback(buffer, size, nitems); } - #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 - static int cloexec_callback(void *, curl_socket_t curlfd, curlsocktype purpose) { +#if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 + static int cloexec_callback(void *, curl_socket_t curlfd, curlsocktype purpose) + { unix::closeOnExec(curlfd); vomit("cloexec set for fd %i", curlfd); return CURL_SOCKOPT_OK; } - #endif +#endif size_t seekCallback(curl_off_t offset, int origin) { @@ -324,14 +333,15 @@ struct curlFileTransfer : public FileTransfer return CURL_SEEKFUNC_OK; } - static size_t seekCallbackWrapper(void *clientp, curl_off_t offset, int origin) + static size_t seekCallbackWrapper(void * clientp, curl_off_t offset, int origin) { return ((TransferItem *) clientp)->seekCallback(offset, origin); } void init() { - if (!req) req = curl_easy_init(); + if (!req) + req = curl_easy_init(); curl_easy_reset(req); @@ -344,18 +354,21 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); - curl_easy_setopt(req, CURLOPT_USERAGENT, - ("curl/" LIBCURL_VERSION " Nix/" + nixVersion + - (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str()); - #if LIBCURL_VERSION_NUM >= 0x072b00 + curl_easy_setopt( + req, + CURLOPT_USERAGENT, + ("curl/" LIBCURL_VERSION " Nix/" + nixVersion + + (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")) + .c_str()); +#if LIBCURL_VERSION_NUM >= 0x072b00 curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1); - #endif - #if LIBCURL_VERSION_NUM >= 0x072f00 +#endif +#if LIBCURL_VERSION_NUM >= 0x072f00 if (fileTransferSettings.enableHttp2) curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS); else curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1); - #endif +#endif curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper); curl_easy_setopt(req, CURLOPT_WRITEDATA, this); curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper); @@ -393,9 +406,9 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } - #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 +#if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 curl_easy_setopt(req, CURLOPT_SOCKOPTFUNCTION, cloexec_callback); - #endif +#endif curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, fileTransferSettings.connectTimeout.get()); @@ -425,10 +438,14 @@ struct curlFileTransfer : public FileTransfer auto httpStatus = getHTTPStatus(); - debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes, duration = %.2f s", - request.verb(), request.uri, code, httpStatus, result.bodySize, - std::chrono::duration_cast(finishTime - startTime).count() / 1000.0f - ); + debug( + "finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes, duration = %.2f s", + request.verb(), + request.uri, + code, + httpStatus, + result.bodySize, + std::chrono::duration_cast(finishTime - startTime).count() / 1000.0f); appendCurrentUrl(); @@ -448,8 +465,7 @@ struct curlFileTransfer : public FileTransfer if (writeException) failEx(writeException); - else if (code == CURLE_OK && successfulStatuses.count(httpStatus)) - { + else if (code == CURLE_OK && successfulStatuses.count(httpStatus)) { result.cached = httpStatus == 304; // In 2021, GitHub responds to If-None-Match with 304, @@ -487,32 +503,32 @@ struct curlFileTransfer : public FileTransfer // * 511 we're behind a captive portal err = Misc; } else { - // Don't bother retrying on certain cURL errors either +// Don't bother retrying on certain cURL errors either - // Allow selecting a subset of enum values - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wswitch-enum" +// Allow selecting a subset of enum values +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" switch (code) { - case CURLE_FAILED_INIT: - case CURLE_URL_MALFORMAT: - case CURLE_NOT_BUILT_IN: - case CURLE_REMOTE_ACCESS_DENIED: - case CURLE_FILE_COULDNT_READ_FILE: - case CURLE_FUNCTION_NOT_FOUND: - case CURLE_ABORTED_BY_CALLBACK: - case CURLE_BAD_FUNCTION_ARGUMENT: - case CURLE_INTERFACE_FAILED: - case CURLE_UNKNOWN_OPTION: - case CURLE_SSL_CACERT_BADFILE: - case CURLE_TOO_MANY_REDIRECTS: - case CURLE_WRITE_ERROR: - case CURLE_UNSUPPORTED_PROTOCOL: - err = Misc; - break; - default: // Shut up warnings - break; + case CURLE_FAILED_INIT: + case CURLE_URL_MALFORMAT: + case CURLE_NOT_BUILT_IN: + case CURLE_REMOTE_ACCESS_DENIED: + case CURLE_FILE_COULDNT_READ_FILE: + case CURLE_FUNCTION_NOT_FOUND: + case CURLE_ABORTED_BY_CALLBACK: + case CURLE_BAD_FUNCTION_ARGUMENT: + case CURLE_INTERFACE_FAILED: + case CURLE_UNKNOWN_OPTION: + case CURLE_SSL_CACERT_BADFILE: + case CURLE_TOO_MANY_REDIRECTS: + case CURLE_WRITE_ERROR: + case CURLE_UNSUPPORTED_PROTOCOL: + err = Misc; + break; + default: // Shut up warnings + break; } - #pragma GCC diagnostic pop +#pragma GCC diagnostic pop } attempt++; @@ -520,31 +536,40 @@ struct curlFileTransfer : public FileTransfer std::optional response; if (errorSink) response = std::move(errorSink->s); - auto exc = - code == CURLE_ABORTED_BY_CALLBACK && getInterrupted() - ? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri) - : httpStatus != 0 - ? FileTransferError(err, - std::move(response), - "unable to %s '%s': HTTP error %d%s", - request.verb(), request.uri, httpStatus, - code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) - : FileTransferError(err, - std::move(response), - "unable to %s '%s': %s (%d) %s", - request.verb(), request.uri, curl_easy_strerror(code), code, errbuf); + auto exc = code == CURLE_ABORTED_BY_CALLBACK && getInterrupted() ? FileTransferError( + Interrupted, + std::move(response), + "%s of '%s' was interrupted", + request.verb(), + request.uri) + : httpStatus != 0 + ? FileTransferError( + err, + std::move(response), + "unable to %s '%s': HTTP error %d%s", + request.verb(), + request.uri, + httpStatus, + code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) + : FileTransferError( + err, + std::move(response), + "unable to %s '%s': %s (%d) %s", + request.verb(), + request.uri, + curl_easy_strerror(code), + code, + errbuf); /* If this is a transient error, then maybe retry the download after a while. If we're writing to a sink, we can only retry if the server supports ranged requests. */ - if (err == Transient - && attempt < request.tries - && (!this->request.dataCallback - || writtenToSink == 0 - || (acceptRanges && encoding.empty()))) - { - int ms = retryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937)); + if (err == Transient && attempt < request.tries + && (!this->request.dataCallback || writtenToSink == 0 || (acceptRanges && encoding.empty()))) { + int ms = retryTimeMs + * std::pow( + 2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937)); if (writtenToSink) warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms); else @@ -553,8 +578,7 @@ struct curlFileTransfer : public FileTransfer errorSink.reset(); embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms); fileTransfer.enqueueItem(shared_from_this()); - } - else + } else fail(std::move(exc)); } } @@ -562,23 +586,28 @@ struct curlFileTransfer : public FileTransfer struct State { - struct EmbargoComparator { - bool operator() (const std::shared_ptr & i1, const std::shared_ptr & i2) { + struct EmbargoComparator + { + bool operator()(const std::shared_ptr & i1, const std::shared_ptr & i2) + { return i1->embargo > i2->embargo; } }; + bool quit = false; - std::priority_queue, std::vector>, EmbargoComparator> incoming; + std:: + priority_queue, std::vector>, EmbargoComparator> + incoming; }; Sync state_; - #ifndef _WIN32 // TODO need graceful async exit support on Windows? +#ifndef _WIN32 // TODO need graceful async exit support on Windows? /* We can't use a std::condition_variable to wake up the curl thread, because it only monitors file descriptors. So use a pipe instead. */ Pipe wakeupPipe; - #endif +#endif std::thread workerThread; @@ -590,18 +619,17 @@ struct curlFileTransfer : public FileTransfer curlm = curl_multi_init(); - #if LIBCURL_VERSION_NUM >= 0x072b00 // Multiplex requires >= 7.43.0 +#if LIBCURL_VERSION_NUM >= 0x072b00 // Multiplex requires >= 7.43.0 curl_multi_setopt(curlm, CURLMOPT_PIPELINING, CURLPIPE_MULTIPLEX); - #endif - #if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0 - curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS, - fileTransferSettings.httpConnections.get()); - #endif +#endif +#if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0 + curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS, fileTransferSettings.httpConnections.get()); +#endif - #ifndef _WIN32 // TODO need graceful async exit support on Windows? +#ifndef _WIN32 // TODO need graceful async exit support on Windows? wakeupPipe.create(); fcntl(wakeupPipe.readSide.get(), F_SETFL, O_NONBLOCK); - #endif +#endif workerThread = std::thread([&]() { workerThreadEntry(); }); } @@ -612,7 +640,8 @@ struct curlFileTransfer : public FileTransfer workerThread.join(); - if (curlm) curl_multi_cleanup(curlm); + if (curlm) + curl_multi_cleanup(curlm); } void stopWorkerThread() @@ -622,28 +651,26 @@ struct curlFileTransfer : public FileTransfer auto state(state_.lock()); state->quit = true; } - #ifndef _WIN32 // TODO need graceful async exit support on Windows? +#ifndef _WIN32 // TODO need graceful async exit support on Windows? writeFull(wakeupPipe.writeSide.get(), " ", false); - #endif +#endif } void workerThreadMain() { - /* Cause this thread to be notified on SIGINT. */ - #ifndef _WIN32 // TODO need graceful async exit support on Windows? - auto callback = createInterruptCallback([&]() { - stopWorkerThread(); - }); - #endif +/* Cause this thread to be notified on SIGINT. */ +#ifndef _WIN32 // TODO need graceful async exit support on Windows? + auto callback = createInterruptCallback([&]() { stopWorkerThread(); }); +#endif - #ifdef __linux__ +#ifdef __linux__ try { tryUnshareFilesystem(); } catch (nix::Error & e) { e.addTrace({}, "in download thread"); throw; } - #endif +#endif std::map> items; @@ -677,16 +704,19 @@ struct curlFileTransfer : public FileTransfer /* Wait for activity, including wakeup events. */ int numfds = 0; struct curl_waitfd extraFDs[1]; - #ifndef _WIN32 // TODO need graceful async exit support on Windows? +#ifndef _WIN32 // TODO need graceful async exit support on Windows? extraFDs[0].fd = wakeupPipe.readSide.get(); extraFDs[0].events = CURL_WAIT_POLLIN; extraFDs[0].revents = 0; - #endif +#endif long maxSleepTimeMs = items.empty() ? 10000 : 100; - auto sleepTimeMs = - nextWakeup != std::chrono::steady_clock::time_point() - ? std::max(0, (int) std::chrono::duration_cast(nextWakeup - std::chrono::steady_clock::now()).count()) - : maxSleepTimeMs; + auto sleepTimeMs = nextWakeup != std::chrono::steady_clock::time_point() + ? std::max( + 0, + (int) std::chrono::duration_cast( + nextWakeup - std::chrono::steady_clock::now()) + .count()) + : maxSleepTimeMs; vomit("download thread waiting for %d ms", sleepTimeMs); mc = curl_multi_wait(curlm, extraFDs, 1, sleepTimeMs, &numfds); if (mc != CURLM_OK) @@ -715,8 +745,7 @@ struct curlFileTransfer : public FileTransfer incoming.push_back(item); state->incoming.pop(); } else { - if (nextWakeup == std::chrono::steady_clock::time_point() - || item->embargo < nextWakeup) + if (nextWakeup == std::chrono::steady_clock::time_point() || item->embargo < nextWakeup) nextWakeup = item->embargo; break; } @@ -747,16 +776,15 @@ struct curlFileTransfer : public FileTransfer { auto state(state_.lock()); - while (!state->incoming.empty()) state->incoming.pop(); + while (!state->incoming.empty()) + state->incoming.pop(); state->quit = true; } } void enqueueItem(std::shared_ptr item) { - if (item->request.data - && !hasPrefix(item->request.uri, "http://") - && !hasPrefix(item->request.uri, "https://")) + if (item->request.data && !hasPrefix(item->request.uri, "http://") && !hasPrefix(item->request.uri, "https://")) throw nix::Error("uploading to '%s' is not supported", item->request.uri); { @@ -765,9 +793,9 @@ struct curlFileTransfer : public FileTransfer throw nix::Error("cannot enqueue download request because the download thread is shutting down"); state->incoming.push(item); } - #ifndef _WIN32 // TODO need graceful async exit support on Windows? +#ifndef _WIN32 // TODO need graceful async exit support on Windows? writeFull(wakeupPipe.writeSide.get(), " "); - #endif +#endif } #if NIX_WITH_S3_SUPPORT @@ -776,8 +804,8 @@ struct curlFileTransfer : public FileTransfer auto [path, params] = splitUriAndParams(uri); auto slash = path.find('/', 5); // 5 is the length of "s3://" prefix - if (slash == std::string::npos) - throw nix::Error("bad S3 URI '%s'", path); + if (slash == std::string::npos) + throw nix::Error("bad S3 URI '%s'", path); std::string bucketName(path, 5, slash - 5); std::string key(path, slash + 1); @@ -786,8 +814,7 @@ struct curlFileTransfer : public FileTransfer } #endif - void enqueueFileTransfer(const FileTransferRequest & request, - Callback callback) override + void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override { /* Ugly hack to support s3:// URIs. */ if (hasPrefix(request.uri, "s3://")) { @@ -814,7 +841,9 @@ struct curlFileTransfer : public FileTransfer #else throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); #endif - } catch (...) { callback.rethrow(); } + } catch (...) { + callback.rethrow(); + } return; } @@ -845,14 +874,13 @@ ref makeFileTransfer() std::future FileTransfer::enqueueFileTransfer(const FileTransferRequest & request) { auto promise = std::make_shared>(); - enqueueFileTransfer(request, - {[promise](std::future fut) { - try { - promise->set_value(fut.get()); - } catch (...) { - promise->set_exception(std::current_exception()); - } - }}); + enqueueFileTransfer(request, {[promise](std::future fut) { + try { + promise->set_value(fut.get()); + } catch (...) { + promise->set_exception(std::current_exception()); + } + }}); return promise->get_future(); } @@ -868,9 +896,7 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request) } void FileTransfer::download( - FileTransferRequest && request, - Sink & sink, - std::function resultCallback) + FileTransferRequest && request, Sink & sink, std::function resultCallback) { /* Note: we can't call 'sink' via request.dataCallback, because that would cause the sink to execute on the fileTransfer @@ -880,7 +906,8 @@ void FileTransfer::download( Therefore we use a buffer to communicate data between the download thread and the calling thread. */ - struct State { + struct State + { bool quit = false; std::exception_ptr exc; std::string data; @@ -898,10 +925,10 @@ void FileTransfer::download( }); request.dataCallback = [_state](std::string_view data) { - auto state(_state->lock()); - if (state->quit) return; + if (state->quit) + return; /* If the buffer is full, then go to sleep until the calling thread wakes us up (i.e. when it has removed data from the @@ -921,8 +948,8 @@ void FileTransfer::download( state->avail.notify_one(); }; - enqueueFileTransfer(request, - {[_state, resultCallback{std::move(resultCallback)}](std::future fut) { + enqueueFileTransfer( + request, {[_state, resultCallback{std::move(resultCallback)}](std::future fut) { auto state(_state->lock()); state->quit = true; try { @@ -949,13 +976,15 @@ void FileTransfer::download( if (state->data.empty()) { if (state->quit) { - if (state->exc) std::rethrow_exception(state->exc); + if (state->exc) + std::rethrow_exception(state->exc); return; } state.wait(state->avail); - if (state->data.empty()) continue; + if (state->data.empty()) + continue; } chunk = std::move(state->data); @@ -974,8 +1003,11 @@ void FileTransfer::download( } template -FileTransferError::FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args) - : Error(args...), error(error), response(response) +FileTransferError::FileTransferError( + FileTransfer::Error error, std::optional response, const Args &... args) + : Error(args...) + , error(error) + , response(response) { const auto hf = HintFmt(args...); // FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how @@ -987,4 +1019,4 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optionalget()), &st) == -1) throw SysError("statting '%1%'", fnTempRoots); - if (st.st_size == 0) break; + if (st.st_size == 0) + break; /* The garbage collector deleted this file before we could get a lock. (It won't delete the file after we get a lock.) @@ -80,12 +80,12 @@ void LocalStore::createTempRootsFile() } } - void LocalStore::addTempRoot(const StorePath & path) { if (config->readOnly) { - debug("Read-only store doesn't support creating lock files for temp roots, but nothing can be deleted anyways."); - return; + debug( + "Read-only store doesn't support creating lock files for temp roots, but nothing can be deleted anyways."); + return; } createTempRootsFile(); @@ -97,7 +97,7 @@ void LocalStore::addTempRoot(const StorePath & path) *fdGCLock = openGCLock(); } - restart: +restart: /* Try to acquire a shared global GC lock (non-blocking). This only succeeds if the garbage collector is not currently running. */ @@ -157,10 +157,8 @@ void LocalStore::addTempRoot(const StorePath & path) writeFull(_fdTempRoots.lock()->get(), s); } - static std::string censored = "{censored}"; - void LocalStore::findTempRoots(Roots & tempRoots, bool censor) { /* Read the `temproots' directory for per-process temporary root @@ -178,14 +176,17 @@ void LocalStore::findTempRoots(Roots & tempRoots, bool censor) pid_t pid = std::stoi(name); debug("reading temporary root file '%1%'", path); - AutoCloseFD fd(toDescriptor(open(path.c_str(), + AutoCloseFD fd(toDescriptor(open( + path.c_str(), #ifndef _WIN32 O_CLOEXEC | #endif - O_RDWR, 0666))); + O_RDWR, + 0666))); if (!fd) { /* It's okay if the file has disappeared. */ - if (errno == ENOENT) continue; + if (errno == ENOENT) + continue; throw SysError("opening temporary roots file '%1%'", path); } @@ -214,7 +215,6 @@ void LocalStore::findTempRoots(Roots & tempRoots, bool censor) } } - void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, Roots & roots) { auto foundRoot = [&](const Path & path, const Path & target) { @@ -224,7 +224,8 @@ void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, R roots[std::move(storePath)].emplace(path); else printInfo("skipping invalid root from '%1%' to '%2%'", path, target); - } catch (BadStorePath &) { } + } catch (BadStorePath &) { + } }; try { @@ -253,9 +254,11 @@ void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, R unlink(path.c_str()); } } else { - if (!std::filesystem::is_symlink(target)) return; + if (!std::filesystem::is_symlink(target)) + return; Path target2 = readLink(target); - if (isInStore(target2)) foundRoot(target, target2); + if (isInStore(target2)) + foundRoot(target, target2); } } } @@ -270,7 +273,8 @@ void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, R catch (std::filesystem::filesystem_error & e) { /* We only ignore permanent failures. */ - if (e.code() == std::errc::permission_denied || e.code() == std::errc::no_such_file_or_directory || e.code() == std::errc::not_a_directory) + if (e.code() == std::errc::permission_denied || e.code() == std::errc::no_such_file_or_directory + || e.code() == std::errc::not_a_directory) printInfo("cannot read potential root '%1%'", path); else throw; @@ -285,7 +289,6 @@ void LocalStore::findRoots(const Path & path, std::filesystem::file_type type, R } } - void LocalStore::findRootsNoTemp(Roots & roots, bool censor) { /* Process direct roots in {gcroots,profiles}. */ @@ -298,7 +301,6 @@ void LocalStore::findRootsNoTemp(Roots & roots, bool censor) findRuntimeRoots(roots, censor); } - Roots LocalStore::findRoots(bool censor) { Roots roots; @@ -320,9 +322,8 @@ static void readProcLink(const std::filesystem::path & file, UncheckedRoots & ro try { buf = std::filesystem::read_symlink(file); } catch (std::filesystem::filesystem_error & e) { - if (e.code() == std::errc::no_such_file_or_directory - || e.code() == std::errc::permission_denied - || e.code() == std::errc::no_such_process) + if (e.code() == std::errc::no_such_file_or_directory || e.code() == std::errc::permission_denied + || e.code() == std::errc::no_such_process) return; throw; } @@ -362,7 +363,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) checkInterrupt(); if (boost::regex_match(ent->d_name, digitsRegex)) { try { - readProcLink(fmt("/proc/%s/exe" ,ent->d_name), unchecked); + readProcLink(fmt("/proc/%s/exe", ent->d_name), unchecked); readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked); auto fdStr = fmt("/proc/%s/fd", ent->d_name); @@ -395,7 +396,9 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) auto envFile = fmt("/proc/%s/environ", ent->d_name); auto envString = readFile(envFile); auto env_end = boost::sregex_iterator{}; - for (auto i = boost::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i) + for (auto i = boost::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; + i != env_end; + ++i) unchecked[i->str()].emplace(envFile); } catch (SystemError & e) { if (errno == ENOENT || errno == EACCES || errno == ESRCH) @@ -416,7 +419,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) try { boost::regex lsofRegex(R"(^n(/.*)$)"); auto lsofLines = - tokenizeString>(runProgram(LSOF, true, { "-n", "-w", "-F", "n" }), "\n"); + tokenizeString>(runProgram(LSOF, true, {"-n", "-w", "-F", "n"}), "\n"); for (const auto & line : lsofLines) { boost::smatch match; if (boost::regex_match(line, match, lsofRegex)) @@ -435,22 +438,24 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) #endif for (auto & [target, links] : unchecked) { - if (!isInStore(target)) continue; + if (!isInStore(target)) + continue; try { auto path = toStorePath(target).first; - if (!isValidPath(path)) continue; + if (!isValidPath(path)) + continue; debug("got additional root '%1%'", printStorePath(path)); if (censor) roots[path].insert(censored); else roots[path].insert(links.begin(), links.end()); - } catch (BadStorePath &) { } + } catch (BadStorePath &) { + } } } - -struct GCLimitReached { }; - +struct GCLimitReached +{}; void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) { @@ -521,7 +526,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) fdServer.close(); while (true) { auto item = remove_begin(*connections.lock()); - if (!item) break; + if (!item) + break; auto & [fd, thread] = *item; shutdown(fd, SHUT_RDWR); thread.join(); @@ -543,7 +549,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Accept a new connection. */ assert(fds[1].revents & POLLIN); AutoCloseFD fdClient = accept(fdServer.get(), nullptr, nullptr); - if (!fdClient) continue; + if (!fdClient) + continue; debug("GC roots server accepted new client"); @@ -604,7 +611,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) Finally stopServer([&]() { writeFull(shutdownPipe.writeSide.get(), "x", false); wakeup.notify_all(); - if (serverThread.joinable()) serverThread.join(); + if (serverThread.joinable()) + serverThread.join(); }); #endif @@ -616,7 +624,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) if (!options.ignoreLiveness) findRootsNoTemp(rootMap, true); - for (auto & i : rootMap) roots.insert(i.first); + for (auto & i : rootMap) + roots.insert(i.first); /* Read the temporary roots created before we acquired the global GC root. Any new roots will be sent to our socket. */ @@ -633,8 +642,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Helper function that deletes a path from the store and throws GCLimitReached if we've deleted enough garbage. */ - auto deleteFromStore = [&](std::string_view baseName) - { + auto deleteFromStore = [&](std::string_view baseName) { Path path = storeDir + "/" + std::string(baseName); Path realPath = config->realStoreDir + "/" + std::string(baseName); @@ -701,19 +709,24 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* If we've previously deleted this path, we don't have to handle it again. */ - if (dead.count(*path)) continue; + if (dead.count(*path)) + continue; - auto markAlive = [&]() - { + auto markAlive = [&]() { alive.insert(*path); alive.insert(start); try { StorePathSet closure; - computeFSClosure(*path, closure, - /* flipDirection */ false, gcKeepOutputs, gcKeepDerivations); + computeFSClosure( + *path, + closure, + /* flipDirection */ false, + gcKeepOutputs, + gcKeepDerivations); for (auto & p : closure) alive.insert(p); - } catch (InvalidPath &) { } + } catch (InvalidPath &) { + } }; /* If this is a root, bail out. */ @@ -722,8 +735,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) return markAlive(); } - if (options.action == GCOptions::gcDeleteSpecific - && !options.pathsToDelete.count(*path)) + if (options.action == GCOptions::gcDeleteSpecific && !options.pathsToDelete.count(*path)) return; { @@ -753,9 +765,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) derivation, then visit the derivation outputs. */ if (gcKeepDerivations && path->isDerivation()) { for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(*path)) - if (maybeOutPath && - isValidPath(*maybeOutPath) && - queryPathInfo(*maybeOutPath)->deriver == *path) + if (maybeOutPath && isValidPath(*maybeOutPath) + && queryPathInfo(*maybeOutPath)->deriver == *path) enqueue(*maybeOutPath); } @@ -768,13 +779,14 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } } for (auto & path : topoSortPaths(visited)) { - if (!dead.insert(path).second) continue; + if (!dead.insert(path).second) + continue; if (shouldDelete) { try { invalidatePathChecked(path); deleteFromStore(path.to_string()); referrersCache.erase(path); - } catch (PathInUse &e) { + } catch (PathInUse & e) { // If we end up here, it's likely a new occurrence // of https://github.com/NixOS/nix/issues/11923 printError("BUG: %s", e.what()); @@ -806,7 +818,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) try { AutoCloseDir dir(opendir(config->realStoreDir.get().c_str())); - if (!dir) throw SysError("opening directory '%1%'", config->realStoreDir); + if (!dir) + throw SysError("opening directory '%1%'", config->realStoreDir); /* Read the store and delete all paths that are invalid or unreachable. We don't use readDirectory() here so that @@ -817,13 +830,13 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) while (errno = 0, dirent = readdir(dir.get())) { checkInterrupt(); std::string name = dirent->d_name; - if (name == "." || name == ".." || name == linksName) continue; + if (name == "." || name == ".." || name == linksName) + continue; if (auto storePath = maybeParseStorePath(storeDir + "/" + name)) deleteReferrersClosure(*storePath); else deleteFromStore(name); - } } catch (GCLimitReached & e) { } @@ -850,7 +863,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) printInfo("deleting unused links..."); AutoCloseDir dir(opendir(linksDir.c_str())); - if (!dir) throw SysError("opening directory '%1%'", linksDir); + if (!dir) + throw SysError("opening directory '%1%'", linksDir); int64_t actualSize = 0, unsharedSize = 0; @@ -858,7 +872,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) while (errno = 0, dirent = readdir(dir.get())) { checkInterrupt(); std::string name = dirent->d_name; - if (name == "." || name == "..") continue; + if (name == "." || name == "..") + continue; Path path = linksDir + "/" + name; auto st = lstat(path); @@ -889,15 +904,15 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) #endif ; - printInfo("note: currently hard linking saves %.2f MiB", + printInfo( + "note: currently hard linking saves %.2f MiB", ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0))); } /* While we're at it, vacuum the database. */ - //if (options.action == GCOptions::gcDeleteDead) vacuumDB(); + // if (options.action == GCOptions::gcDeleteDead) vacuumDB(); } - void LocalStore::autoGC(bool sync) { #if HAVE_STATVFS @@ -927,15 +942,18 @@ void LocalStore::autoGC(bool sync) auto now = std::chrono::steady_clock::now(); - if (now < state->lastGCCheck + std::chrono::seconds(settings.minFreeCheckInterval)) return; + if (now < state->lastGCCheck + std::chrono::seconds(settings.minFreeCheckInterval)) + return; auto avail = getAvail(); state->lastGCCheck = now; - if (avail >= settings.minFree || avail >= settings.maxFree) return; + if (avail >= settings.minFree || avail >= settings.maxFree) + return; - if (avail > state->availAfterGC * 0.97) return; + if (avail > state->availAfterGC * 0.97) + return; state->gcRunning = true; @@ -943,7 +961,6 @@ void LocalStore::autoGC(bool sync) future = state->gcFuture = promise.get_future().share(); std::thread([promise{std::move(promise)}, this, avail, getAvail]() mutable { - try { /* Wake up any threads waiting for the auto-GC to finish. */ @@ -970,15 +987,14 @@ void LocalStore::autoGC(bool sync) // future, but we don't really care. (what??) ignoreExceptionInDestructor(); } - }).detach(); } - sync: +sync: // Wait for the future outside of the state lock. - if (sync) future.get(); + if (sync) + future.get(); #endif } - -} +} // namespace nix diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 1f80cb379..966d37090 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -16,30 +16,29 @@ #include #ifndef _WIN32 -# include +# include #endif #ifdef __GLIBC__ -# include -# include -# include +# include +# include +# include #endif #ifdef __APPLE__ -# include "nix/util/processes.hh" +# include "nix/util/processes.hh" #endif #include "nix/util/config-impl.hh" #ifdef __APPLE__ -#include +# include #endif #include "store-config-private.hh" namespace nix { - /* The default location of the daemon socket, relative to nixStateDir. The socket is in a directory to allow you to control access to the Nix daemon by setting the mode/ownership of the directory @@ -55,17 +54,18 @@ Settings::Settings() : nixPrefix(NIX_PREFIX) , nixStore( #ifndef _WIN32 - // On Windows `/nix/store` is not a canonical path, but we dont' - // want to deal with that yet. - canonPath + // On Windows `/nix/store` is not a canonical path, but we dont' + // want to deal with that yet. + canonPath #endif - (getEnvNonEmpty("NIX_STORE_DIR").value_or(getEnvNonEmpty("NIX_STORE").value_or(NIX_STORE_DIR)))) + (getEnvNonEmpty("NIX_STORE_DIR").value_or(getEnvNonEmpty("NIX_STORE").value_or(NIX_STORE_DIR)))) , nixDataDir(canonPath(getEnvNonEmpty("NIX_DATA_DIR").value_or(NIX_DATA_DIR))) , nixLogDir(canonPath(getEnvNonEmpty("NIX_LOG_DIR").value_or(NIX_LOG_DIR))) , nixStateDir(canonPath(getEnvNonEmpty("NIX_STATE_DIR").value_or(NIX_STATE_DIR))) , nixConfDir(canonPath(getEnvNonEmpty("NIX_CONF_DIR").value_or(NIX_CONF_DIR))) , nixUserConfFiles(getUserConfigFiles()) - , nixDaemonSocketFile(canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH))) + , nixDaemonSocketFile( + canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH))) { #ifndef _WIN32 buildUsersGroup = isRootUser() ? "nixbld" : ""; @@ -91,7 +91,8 @@ Settings::Settings() /* chroot-like behavior from Apple's sandbox */ #ifdef __APPLE__ - sandboxPaths = tokenizeString("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib"); + sandboxPaths = tokenizeString( + "/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib"); allowedImpureHostPrefixes = tokenizeString("/System/Library /usr/lib /dev /bin/sh"); #endif } @@ -102,7 +103,8 @@ void loadConfFile(AbstractConfig & config) try { std::string contents = readFile(path); config.applyConfig(contents, path); - } catch (SystemError &) { } + } catch (SystemError &) { + } }; applyConfigFile(settings.nixConfDir + "/nix.conf"); @@ -120,7 +122,6 @@ void loadConfFile(AbstractConfig & config) if (nixConfEnv.has_value()) { config.applyConfig(nixConfEnv.value(), "NIX_CONFIG"); } - } std::vector getUserConfigFiles() @@ -146,13 +147,14 @@ unsigned int Settings::getDefaultCores() const const unsigned int maxCPU = getMaxCPU(); if (maxCPU > 0) - return maxCPU; + return maxCPU; else - return concurrency; + return concurrency; } #ifdef __APPLE__ -static bool hasVirt() { +static bool hasVirt() +{ int hasVMM; int hvSupport; @@ -181,19 +183,19 @@ StringSet Settings::getDefaultSystemFeatures() actually require anything special on the machines. */ StringSet features{"nixos-test", "benchmark", "big-parallel"}; - #ifdef __linux__ +#ifdef __linux__ features.insert("uid-range"); - #endif +#endif - #ifdef __linux__ +#ifdef __linux__ if (access("/dev/kvm", R_OK | W_OK) == 0) features.insert("kvm"); - #endif +#endif - #ifdef __APPLE__ +#ifdef __APPLE__ if (hasVirt()) features.insert("apple-virt"); - #endif +#endif return features; } @@ -214,8 +216,11 @@ StringSet Settings::getDefaultExtraPlatforms() // machines. Note that we can’t force processes from executing // x86_64 in aarch64 environments or vice versa since they can // always exec with their own binary preferences. - if (std::string{NIX_LOCAL_SYSTEM} == "aarch64-darwin" && - runProgram(RunOptions {.program = "arch", .args = {"-arch", "x86_64", "/usr/bin/true"}, .mergeStderrToStdout = true}).first == 0) + if (std::string{NIX_LOCAL_SYSTEM} == "aarch64-darwin" + && runProgram( + RunOptions{.program = "arch", .args = {"-arch", "x86_64", "/usr/bin/true"}, .mergeStderrToStdout = true}) + .first + == 0) extraPlatforms.insert("x86_64-darwin"); #endif @@ -237,41 +242,57 @@ bool Settings::isWSL1() Path Settings::getDefaultSSLCertFile() { - for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"}) - if (pathAccessible(fn)) return fn; + for (auto & fn : + {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"}) + if (pathAccessible(fn)) + return fn; return ""; } std::string nixVersion = PACKAGE_VERSION; -NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, { - {SandboxMode::smEnabled, true}, - {SandboxMode::smRelaxed, "relaxed"}, - {SandboxMode::smDisabled, false}, -}); +NLOHMANN_JSON_SERIALIZE_ENUM( + SandboxMode, + { + {SandboxMode::smEnabled, true}, + {SandboxMode::smRelaxed, "relaxed"}, + {SandboxMode::smDisabled, false}, + }); -template<> SandboxMode BaseSetting::parse(const std::string & str) const +template<> +SandboxMode BaseSetting::parse(const std::string & str) const { - if (str == "true") return smEnabled; - else if (str == "relaxed") return smRelaxed; - else if (str == "false") return smDisabled; - else throw UsageError("option '%s' has invalid value '%s'", name, str); + if (str == "true") + return smEnabled; + else if (str == "relaxed") + return smRelaxed; + else if (str == "false") + return smDisabled; + else + throw UsageError("option '%s' has invalid value '%s'", name, str); } -template<> struct BaseSetting::trait +template<> +struct BaseSetting::trait { static constexpr bool appendable = false; }; -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { - if (value == smEnabled) return "true"; - else if (value == smRelaxed) return "relaxed"; - else if (value == smDisabled) return "false"; - else unreachable(); + if (value == smEnabled) + return "true"; + else if (value == smRelaxed) + return "relaxed"; + else if (value == smDisabled) + return "false"; + else + unreachable(); } -template<> void BaseSetting::convertToArg(Args & args, const std::string & category) +template<> +void BaseSetting::convertToArg(Args & args, const std::string & category) { args.addFlag({ .longName = name, @@ -298,7 +319,8 @@ template<> void BaseSetting::convertToArg(Args & args, const std::s unsigned int MaxBuildJobsSetting::parse(const std::string & str) const { - if (str == "auto") return std::max(1U, std::thread::hardware_concurrency()); + if (str == "auto") + return std::max(1U, std::thread::hardware_concurrency()); else { if (auto n = string2Int(str)) return *n; @@ -307,7 +329,6 @@ unsigned int MaxBuildJobsSetting::parse(const std::string & str) const } } - static void preloadNSS() { /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of @@ -346,15 +367,18 @@ static void preloadNSS() static bool initLibStoreDone = false; -void assertLibStoreInitialized() { +void assertLibStoreInitialized() +{ if (!initLibStoreDone) { printError("The program must call nix::initNix() before calling any libstore library functions."); abort(); }; } -void initLibStore(bool loadConfig) { - if (initLibStoreDone) return; +void initLibStore(bool loadConfig) +{ + if (initLibStoreDone) + return; initLibUtil(); @@ -371,7 +395,8 @@ void initLibStore(bool loadConfig) { by calling curl_global_init here, which should mean curl will already have been initialized by the time we try to do so in a forked process. - [1] https://github.com/apple-oss-distributions/objc4/blob/01edf1705fbc3ff78a423cd21e03dfc21eb4d780/runtime/objc-initialize.mm#L614-L636 + [1] + https://github.com/apple-oss-distributions/objc4/blob/01edf1705fbc3ff78a423cd21e03dfc21eb4d780/runtime/objc-initialize.mm#L614-L636 */ curl_global_init(CURL_GLOBAL_ALL); #ifdef __APPLE__ @@ -385,5 +410,4 @@ void initLibStore(bool loadConfig) { initLibStoreDone = true; } - -} +} // namespace nix diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index e44d146b9..21a31c3f5 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -9,7 +9,6 @@ namespace nix { MakeError(UploadToHTTP, Error); - StringSet HttpBinaryCacheStoreConfig::uriSchemes() { static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; @@ -20,33 +19,26 @@ StringSet HttpBinaryCacheStoreConfig::uriSchemes() } HttpBinaryCacheStoreConfig::HttpBinaryCacheStoreConfig( - std::string_view scheme, - std::string_view _cacheUri, - const Params & params) + std::string_view scheme, std::string_view _cacheUri, const Params & params) : StoreConfig(params) , BinaryCacheStoreConfig(params) , cacheUri( - std::string { scheme } - + "://" - + (!_cacheUri.empty() - ? _cacheUri - : throw UsageError("`%s` Store requires a non-empty authority in Store URL", scheme))) + std::string{scheme} + "://" + + (!_cacheUri.empty() ? _cacheUri + : throw UsageError("`%s` Store requires a non-empty authority in Store URL", scheme))) { while (!cacheUri.empty() && cacheUri.back() == '/') cacheUri.pop_back(); } - std::string HttpBinaryCacheStoreConfig::doc() { return - #include "http-binary-cache-store.md" - ; +#include "http-binary-cache-store.md" + ; } - -class HttpBinaryCacheStore : - public virtual BinaryCacheStore +class HttpBinaryCacheStore : public virtual BinaryCacheStore { struct State { @@ -63,8 +55,7 @@ public: ref config; HttpBinaryCacheStore(ref config) - : Store{*config} - // TODO it will actually mutate the configuration + : Store{*config} // TODO it will actually mutate the configuration , BinaryCacheStore{*config} , config{config} { @@ -108,7 +99,8 @@ protected: void checkEnabled() { auto state(_state.lock()); - if (state->enabled) return; + if (state->enabled) + return; if (std::chrono::steady_clock::now() > state->disabledUntil) { state->enabled = true; debug("re-enabling binary cache '%s'", getUri()); @@ -136,7 +128,8 @@ protected: } } - void upsertFile(const std::string & path, + void upsertFile( + const std::string & path, std::shared_ptr> istream, const std::string & mimeType) override { @@ -154,9 +147,8 @@ protected: { return FileTransferRequest( hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://") - ? path - : config->cacheUri + "/" + path); - + ? path + : config->cacheUri + "/" + path); } void getFile(const std::string & path, Sink & sink) override @@ -173,8 +165,7 @@ protected: } } - void getFile(const std::string & path, - Callback> callback) noexcept override + void getFile(const std::string & path, Callback> callback) noexcept override { auto callbackPtr = std::make_shared(std::move(callback)); @@ -183,8 +174,8 @@ protected: auto request(makeRequest(path)); - getFileTransfer()->enqueueFileTransfer(request, - {[callbackPtr, this](std::future result) { + getFileTransfer()->enqueueFileTransfer( + request, {[callbackPtr, this](std::future result) { try { (*callbackPtr)(std::move(result.get().data)); } catch (FileTransferError & e) { @@ -195,7 +186,7 @@ protected: } catch (...) { callbackPtr->rethrow(); } - }}); + }}); } catch (...) { callbackPtr->rethrow(); @@ -232,12 +223,11 @@ protected: ref HttpBinaryCacheStore::Config::openStore() const { - return make_ref(ref{ - // FIXME we shouldn't actually need a mutable config - std::const_pointer_cast(shared_from_this()) - }); + return make_ref( + ref{// FIXME we shouldn't actually need a mutable config + std::const_pointer_cast(shared_from_this())}); } static RegisterStoreImplementation regHttpBinaryCacheStore; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/binary-cache-store.hh b/src/libstore/include/nix/store/binary-cache-store.hh index 43f2cf690..908500b42 100644 --- a/src/libstore/include/nix/store/binary-cache-store.hh +++ b/src/libstore/include/nix/store/binary-cache-store.hh @@ -17,31 +17,42 @@ struct BinaryCacheStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - const Setting compression{this, "xz", "compression", - "NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."}; + const Setting compression{ + this, "xz", "compression", "NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."}; - const Setting writeNARListing{this, false, "write-nar-listing", - "Whether to write a JSON file that lists the files in each NAR."}; + const Setting writeNARListing{ + this, false, "write-nar-listing", "Whether to write a JSON file that lists the files in each NAR."}; - const Setting writeDebugInfo{this, false, "index-debug-info", + const Setting writeDebugInfo{ + this, + false, + "index-debug-info", R"( Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to fetch debug info on demand )"}; - const Setting secretKeyFile{this, "", "secret-key", - "Path to the secret key used to sign the binary cache."}; + const Setting secretKeyFile{this, "", "secret-key", "Path to the secret key used to sign the binary cache."}; - const Setting secretKeyFiles{this, "", "secret-keys", - "List of comma-separated paths to the secret keys used to sign the binary cache."}; + const Setting secretKeyFiles{ + this, "", "secret-keys", "List of comma-separated paths to the secret keys used to sign the binary cache."}; - const Setting localNarCache{this, "", "local-nar-cache", + const Setting localNarCache{ + this, + "", + "local-nar-cache", "Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."}; - const Setting parallelCompression{this, false, "parallel-compression", + const Setting parallelCompression{ + this, + false, + "parallel-compression", "Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."}; - const Setting compressionLevel{this, -1, "compression-level", + const Setting compressionLevel{ + this, + -1, + "compression-level", R"( The *preset level* to be used when compressing NARs. The meaning and accepted values depend on the compression method selected. @@ -49,14 +60,11 @@ struct BinaryCacheStoreConfig : virtual StoreConfig )"}; }; - /** * @note subclasses must implement at least one of the two * virtual getFile() methods. */ -struct BinaryCacheStore : - virtual Store, - virtual LogStore +struct BinaryCacheStore : virtual Store, virtual LogStore { using Config = BinaryCacheStoreConfig; @@ -82,11 +90,11 @@ public: virtual bool fileExists(const std::string & path) = 0; - virtual void upsertFile(const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType) = 0; + virtual void upsertFile( + const std::string & path, std::shared_ptr> istream, const std::string & mimeType) = 0; - void upsertFile(const std::string & path, + void upsertFile( + const std::string & path, // FIXME: use std::string_view std::string && data, const std::string & mimeType); @@ -106,9 +114,7 @@ public: * Fetch the specified file and call the specified callback with * the result. A subclass may implement this asynchronously. */ - virtual void getFile( - const std::string & path, - Callback> callback) noexcept; + virtual void getFile(const std::string & path, Callback> callback) noexcept; std::optional getFile(const std::string & path); @@ -125,20 +131,22 @@ private: void writeNarInfo(ref narInfo); ref addToStoreCommon( - Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, + Source & narSource, + RepairFlag repair, + CheckSigsFlag checkSigs, std::function mkInfo); public: bool isValidPathUncached(const StorePath & path) override; - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override; + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override; std::optional queryPathFromHashPart(const std::string & hashPart) override; - void addToStore(const ValidPathInfo & info, Source & narSource, - RepairFlag repair, CheckSigsFlag checkSigs) override; + void + addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) override; StorePath addToStoreFromDump( Source & dump, @@ -160,8 +168,8 @@ public: void registerDrvOutput(const Realisation & info) override; - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override; + void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept override; void narFromPath(const StorePath & path, Sink & sink) override; @@ -172,9 +180,8 @@ public: std::optional getBuildLogExact(const StorePath & path) override; void addBuildLog(const StorePath & drvPath, std::string_view log) override; - }; MakeError(NoSuchBinaryCacheFile, Error); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 088b057b6..3b70b781f 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -46,25 +46,42 @@ struct BuildResult */ std::string errorMsg; - std::string toString() const { + std::string toString() const + { auto strStatus = [&]() { switch (status) { - case Built: return "Built"; - case Substituted: return "Substituted"; - case AlreadyValid: return "AlreadyValid"; - case PermanentFailure: return "PermanentFailure"; - case InputRejected: return "InputRejected"; - case OutputRejected: return "OutputRejected"; - case TransientFailure: return "TransientFailure"; - case CachedFailure: return "CachedFailure"; - case TimedOut: return "TimedOut"; - case MiscFailure: return "MiscFailure"; - case DependencyFailed: return "DependencyFailed"; - case LogLimitExceeded: return "LogLimitExceeded"; - case NotDeterministic: return "NotDeterministic"; - case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid"; - case NoSubstituters: return "NoSubstituters"; - default: return "Unknown"; + case Built: + return "Built"; + case Substituted: + return "Substituted"; + case AlreadyValid: + return "AlreadyValid"; + case PermanentFailure: + return "PermanentFailure"; + case InputRejected: + return "InputRejected"; + case OutputRejected: + return "OutputRejected"; + case TransientFailure: + return "TransientFailure"; + case CachedFailure: + return "CachedFailure"; + case TimedOut: + return "TimedOut"; + case MiscFailure: + return "MiscFailure"; + case DependencyFailed: + return "DependencyFailed"; + case LogLimitExceeded: + return "LogLimitExceeded"; + case NotDeterministic: + return "NotDeterministic"; + case ResolvesToAlreadyValid: + return "ResolvesToAlreadyValid"; + case NoSubstituters: + return "NoSubstituters"; + default: + return "Unknown"; }; }(); return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg); @@ -100,8 +117,8 @@ struct BuildResult */ std::optional cpuUser, cpuSystem; - bool operator ==(const BuildResult &) const noexcept; - std::strong_ordering operator <=>(const BuildResult &) const noexcept; + bool operator==(const BuildResult &) const noexcept; + std::strong_ordering operator<=>(const BuildResult &) const noexcept; bool success() { @@ -126,8 +143,10 @@ struct KeyedBuildResult : BuildResult // Hack to work around a gcc "may be used uninitialized" warning. KeyedBuildResult(BuildResult res, DerivedPath path) - : BuildResult(std::move(res)), path(std::move(path)) - { } + : BuildResult(std::move(res)) + , path(std::move(path)) + { + } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 569d1ddbb..66a934d49 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -19,14 +19,10 @@ struct HookInstance; struct DerivationBuilder; #endif -typedef enum {rpAccept, rpDecline, rpPostpone} HookReply; +typedef enum { rpAccept, rpDecline, rpPostpone } HookReply; /** Used internally */ -void runPostBuildHook( - Store & store, - Logger & logger, - const StorePath & drvPath, - const StorePathSet & outputPaths); +void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); /** * A goal for building a derivation. Substitution, (or any other method of @@ -111,9 +107,8 @@ struct DerivationBuildingGoal : public Goal */ std::string machineName; - DerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv, - Worker & worker, - BuildMode buildMode = bmNormal); + DerivationBuildingGoal( + const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode = bmNormal); ~DerivationBuildingGoal(); void timedOut(Error && ex) override; @@ -179,18 +174,16 @@ struct DerivationBuildingGoal : public Goal void started(); - Done done( - BuildResult::Status status, - SingleDrvOutputs builtOutputs = {}, - std::optional ex = {}); + Done done(BuildResult::Status status, SingleDrvOutputs builtOutputs = {}, std::optional ex = {}); void appendLogTailErrorMsg(std::string & msg); StorePathSet exportReferences(const StorePathSet & storePaths); - JobCategory jobCategory() const override { + JobCategory jobCategory() const override + { return JobCategory::Build; }; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-building-misc.hh b/src/libstore/include/nix/store/build/derivation-building-misc.hh index 3259c5e36..46577919b 100644 --- a/src/libstore/include/nix/store/build/derivation-building-misc.hh +++ b/src/libstore/include/nix/store/build/derivation-building-misc.hh @@ -25,6 +25,7 @@ struct InitialOutputStatus { StorePath path; PathStatus status; + /** * Valid in the store, and additionally non-corrupt if we are repairing */ @@ -32,6 +33,7 @@ struct InitialOutputStatus { return status == PathStatus::Valid; } + /** * Merely present, allowed to be corrupt */ @@ -55,4 +57,4 @@ void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, */ std::string showKnownOutputs(Store & store, const Derivation & drv); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index ac9ec5346..d78073a91 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -15,11 +15,7 @@ namespace nix { using std::map; /** Used internally */ -void runPostBuildHook( - Store & store, - Logger & logger, - const StorePath & drvPath, - const StorePathSet & outputPaths); +void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); /** * A goal for realising a single output of a derivation. Various sorts of @@ -62,12 +58,18 @@ struct DerivationGoal : public Goal std::unique_ptr> mcExpectedBuilds; - DerivationGoal(const StorePath & drvPath, const Derivation & drv, - const OutputName & wantedOutput, Worker & worker, + DerivationGoal( + const StorePath & drvPath, + const Derivation & drv, + const OutputName & wantedOutput, + Worker & worker, BuildMode buildMode = bmNormal); ~DerivationGoal() = default; - void timedOut(Error && ex) override { unreachable(); }; + void timedOut(Error && ex) override + { + unreachable(); + }; std::string key() override; @@ -100,14 +102,12 @@ struct DerivationGoal : public Goal Co repairClosure(); - Done done( - BuildResult::Status status, - SingleDrvOutputs builtOutputs = {}, - std::optional ex = {}); + Done done(BuildResult::Status status, SingleDrvOutputs builtOutputs = {}, std::optional ex = {}); - JobCategory jobCategory() const override { + JobCategory jobCategory() const override + { return JobCategory::Administration; }; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh b/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh index 648337695..79b74f4c1 100644 --- a/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-trampoline-goal.hh @@ -131,4 +131,4 @@ private: void commonInit(); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh b/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh index 0176f001a..b42336427 100644 --- a/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh +++ b/src/libstore/include/nix/store/build/drv-output-substitution-goal.hh @@ -20,7 +20,8 @@ class Worker; * 2. Substitute the corresponding output path * 3. Register the output info */ -class DrvOutputSubstitutionGoal : public Goal { +class DrvOutputSubstitutionGoal : public Goal +{ /** * The drv output we're trying to substitute @@ -28,7 +29,11 @@ class DrvOutputSubstitutionGoal : public Goal { DrvOutput id; public: - DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + DrvOutputSubstitutionGoal( + const DrvOutput & id, + Worker & worker, + RepairFlag repair = NoRepair, + std::optional ca = std::nullopt); typedef void (DrvOutputSubstitutionGoal::*GoalState)(); GoalState state; @@ -36,15 +41,19 @@ public: Co init(); Co realisationFetched(Goals waitees, std::shared_ptr outputInfo, nix::ref sub); - void timedOut(Error && ex) override { unreachable(); }; + void timedOut(Error && ex) override + { + unreachable(); + }; std::string key() override; void handleEOF(Descriptor fd) override; - JobCategory jobCategory() const override { + JobCategory jobCategory() const override + { return JobCategory::Substitution; }; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/goal.hh b/src/libstore/include/nix/store/build/goal.hh index ee69c9cc7..52700d12e 100644 --- a/src/libstore/include/nix/store/build/goal.hh +++ b/src/libstore/include/nix/store/build/goal.hh @@ -20,8 +20,9 @@ class Worker; typedef std::shared_ptr GoalPtr; typedef std::weak_ptr WeakGoalPtr; -struct CompareGoalPtrs { - bool operator() (const GoalPtr & a, const GoalPtr & b) const; +struct CompareGoalPtrs +{ + bool operator()(const GoalPtr & a, const GoalPtr & b) const; }; /** @@ -71,7 +72,7 @@ private: Goals waitees; public: - typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters} ExitCode; + typedef enum { ecBusy, ecSuccess, ecFailed, ecNoSubstituters } ExitCode; /** * Backlink to the worker. @@ -114,22 +115,25 @@ public: * Suspend our goal and wait until we get `work`-ed again. * `co_await`-able by @ref Co. */ - struct Suspend {}; + struct Suspend + {}; /** * Return from the current coroutine and suspend our goal * if we're not busy anymore, or jump to the next coroutine * set to be executed/resumed. */ - struct Return {}; + struct Return + {}; /** * `co_return`-ing this will end the goal. * If you're not inside a coroutine, you can safely discard this. */ - struct [[nodiscard]] Done { - private: - Done(){} + struct [[nodiscard]] Done + { + private: + Done() {} friend Goal; }; @@ -183,18 +187,24 @@ public: * * @todo Support returning data natively */ - struct [[nodiscard]] Co { + struct [[nodiscard]] Co + { /** * The underlying handle. */ handle_type handle; - explicit Co(handle_type handle) : handle(handle) {}; - void operator=(Co&&); - Co(Co&& rhs); + explicit Co(handle_type handle) + : handle(handle) {}; + void operator=(Co &&); + Co(Co && rhs); ~Co(); - bool await_ready() { return false; }; + bool await_ready() + { + return false; + }; + /** * When we `co_await` another `Co`-returning coroutine, * we tell the caller of `caller_coroutine.resume()` to switch to our coroutine (@ref handle). @@ -215,21 +225,29 @@ public: * Used on initial suspend, does the same as `std::suspend_always`, * but asserts that everything has been set correctly. */ - struct InitialSuspend { + struct InitialSuspend + { /** * Handle of coroutine that does the * initial suspend */ handle_type handle; - bool await_ready() { return false; }; - void await_suspend(handle_type handle_) { + bool await_ready() + { + return false; + }; + + void await_suspend(handle_type handle_) + { handle = handle_; } - void await_resume() { + + void await_resume() + { assert(handle); - assert(handle.promise().goal); // goal must be set - assert(handle.promise().goal->top_co); // top_co of goal must be set + assert(handle.promise().goal); // goal must be set + assert(handle.promise().goal->top_co); // top_co of goal must be set assert(handle.promise().goal->top_co->handle == handle); // top_co of goal must be us } }; @@ -238,7 +256,8 @@ public: * Promise type for coroutines defined using @ref Co. * Attached to coroutine handle. */ - struct promise_type { + struct promise_type + { /** * Either this is who called us, or it is who we will tail-call. * It is what we "jump" to once we are done. @@ -249,7 +268,7 @@ public: * The goal that we're a part of. * Set either in @ref Co::await_suspend or in constructor of @ref Goal. */ - Goal* goal = nullptr; + Goal * goal = nullptr; /** * Is set to false when destructed to ensure we don't use a @@ -260,8 +279,13 @@ public: /** * The awaiter used by @ref final_suspend. */ - struct final_awaiter { - bool await_ready() noexcept { return false; }; + struct final_awaiter + { + bool await_ready() noexcept + { + return false; + }; + /** * Here we execute our continuation, by passing it back to the caller. * C++ compiler will create code that takes that and executes it promptly. @@ -269,7 +293,11 @@ public: * thus it must be destroyed. */ std::coroutine_handle<> await_suspend(handle_type h) noexcept; - void await_resume() noexcept { assert(false); }; + + void await_resume() noexcept + { + assert(false); + }; }; /** @@ -283,13 +311,19 @@ public: * We use this opportunity to set the @ref goal field * and `top_co` field of @ref Goal. */ - InitialSuspend initial_suspend() { return {}; }; + InitialSuspend initial_suspend() + { + return {}; + }; /** * Called on `co_return`. Creates @ref final_awaiter which * either jumps to continuation or suspends goal. */ - final_awaiter final_suspend() noexcept { return {}; }; + final_awaiter final_suspend() noexcept + { + return {}; + }; /** * Does nothing, but provides an opportunity for @@ -316,24 +350,33 @@ public: * the continuation of the new continuation. Thus, the continuation * passed to @ref return_value must not have a continuation set. */ - void return_value(Co&&); + void return_value(Co &&); /** * If an exception is thrown inside a coroutine, * we re-throw it in the context of the "resumer" of the continuation. */ - void unhandled_exception() { throw; }; + void unhandled_exception() + { + throw; + }; /** * Allows awaiting a @ref Co. */ - Co&& await_transform(Co&& co) { return static_cast(co); } + Co && await_transform(Co && co) + { + return static_cast(co); + } /** * Allows awaiting a @ref Suspend. * Always suspends. */ - std::suspend_always await_transform(Suspend) { return {}; }; + std::suspend_always await_transform(Suspend) + { + return {}; + }; }; protected: @@ -354,7 +397,7 @@ protected: Done amDone(ExitCode result, std::optional ex = {}); public: - virtual void cleanup() { } + virtual void cleanup() {} /** * Hack to say that this goal should not log `ex`, but instead keep @@ -373,7 +416,8 @@ public: std::optional ex; Goal(Worker & worker, Co init) - : worker(worker), top_co(std::move(init)) + : worker(worker) + , top_co(std::move(init)) { // top_co shouldn't have a goal already, should be nullptr. assert(!top_co->handle.promise().goal); @@ -430,9 +474,10 @@ protected: void addToWeakGoals(WeakGoals & goals, GoalPtr p); -} +} // namespace nix template -struct std::coroutine_traits { +struct std::coroutine_traits +{ using promise_type = nix::Goal::promise_type; }; diff --git a/src/libstore/include/nix/store/build/substitution-goal.hh b/src/libstore/include/nix/store/build/substitution-goal.hh index b61706840..9fc6450b1 100644 --- a/src/libstore/include/nix/store/build/substitution-goal.hh +++ b/src/libstore/include/nix/store/build/substitution-goal.hh @@ -33,24 +33,28 @@ struct PathSubstitutionGoal : public Goal */ std::thread thr; - std::unique_ptr> maintainExpectedSubstitutions, - maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload; + std::unique_ptr> maintainExpectedSubstitutions, maintainRunningSubstitutions, + maintainExpectedNar, maintainExpectedDownload; /** * Content address for recomputing store path */ std::optional ca; - Done done( - ExitCode result, - BuildResult::Status status, - std::optional errorMsg = {}); + Done done(ExitCode result, BuildResult::Status status, std::optional errorMsg = {}); public: - PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + PathSubstitutionGoal( + const StorePath & storePath, + Worker & worker, + RepairFlag repair = NoRepair, + std::optional ca = std::nullopt); ~PathSubstitutionGoal(); - void timedOut(Error && ex) override { unreachable(); }; + void timedOut(Error && ex) override + { + unreachable(); + }; /** * We prepend "a$" to the key name to ensure substitution goals @@ -66,7 +70,8 @@ public: */ Co init(); Co gotInfo(); - Co tryToRun(StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed); + Co tryToRun( + StorePath subPath, nix::ref sub, std::shared_ptr info, bool & substituterFailed); Co finished(); /** @@ -78,9 +83,10 @@ public: /* Called by destructor, can't be overridden */ void cleanup() override final; - JobCategory jobCategory() const override { + JobCategory jobCategory() const override + { return JobCategory::Substitution; }; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/build/worker.hh b/src/libstore/include/nix/store/build/worker.hh index 491b8f494..a6de780c1 100644 --- a/src/libstore/include/nix/store/build/worker.hh +++ b/src/libstore/include/nix/store/build/worker.hh @@ -205,11 +205,10 @@ public: */ private: template - std::shared_ptr initGoalIfNeeded(std::weak_ptr & goal_weak, Args && ...args); + std::shared_ptr initGoalIfNeeded(std::weak_ptr & goal_weak, Args &&... args); std::shared_ptr makeDerivationTrampolineGoal( - ref drvReq, - const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal); + ref drvReq, const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal); public: std::shared_ptr makeDerivationTrampolineGoal( @@ -219,21 +218,24 @@ public: BuildMode buildMode = bmNormal); std::shared_ptr makeDerivationGoal( - const StorePath & drvPath, const Derivation & drv, - const OutputName & wantedOutput, BuildMode buildMode = bmNormal); + const StorePath & drvPath, + const Derivation & drv, + const OutputName & wantedOutput, + BuildMode buildMode = bmNormal); /** * @ref DerivationBuildingGoal "derivation goal" */ - std::shared_ptr makeDerivationBuildingGoal( - const StorePath & drvPath, const Derivation & drv, - BuildMode buildMode = bmNormal); + std::shared_ptr + makeDerivationBuildingGoal(const StorePath & drvPath, const Derivation & drv, BuildMode buildMode = bmNormal); /** * @ref PathSubstitutionGoal "substitution goal" */ - std::shared_ptr makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); - std::shared_ptr makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + std::shared_ptr makePathSubstitutionGoal( + const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + std::shared_ptr makeDrvOutputSubstitutionGoal( + const DrvOutput & id, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); /** * Make a goal corresponding to the `DerivedPath`. @@ -268,8 +270,11 @@ public: * Registers a running child process. `inBuildSlot` means that * the process counts towards the jobs limit. */ - void childStarted(GoalPtr goal, const std::set & channels, - bool inBuildSlot, bool respectTimeouts); + void childStarted( + GoalPtr goal, + const std::set & channels, + bool inBuildSlot, + bool respectTimeouts); /** * Unregisters a running child process. `wakeSleepers` should be @@ -343,10 +348,11 @@ public: void updateProgress() { actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds); - actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions); + actSubstitutions.progress( + doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions); act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize); act.setExpected(actCopyPath, expectedNarSize + doneNarSize); } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/builtins.hh b/src/libstore/include/nix/store/builtins.hh index 096c8af7b..cc164fe82 100644 --- a/src/libstore/include/nix/store/builtins.hh +++ b/src/libstore/include/nix/store/builtins.hh @@ -20,7 +20,8 @@ struct RegisterBuiltinBuilder { typedef std::map BuiltinBuilders; - static BuiltinBuilders & builtinBuilders() { + static BuiltinBuilders & builtinBuilders() + { static BuiltinBuilders builders; return builders; } @@ -31,4 +32,4 @@ struct RegisterBuiltinBuilder } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/builtins/buildenv.hh b/src/libstore/include/nix/store/builtins/buildenv.hh index 163666c0b..c152ab00a 100644 --- a/src/libstore/include/nix/store/builtins/buildenv.hh +++ b/src/libstore/include/nix/store/builtins/buildenv.hh @@ -8,11 +8,18 @@ namespace nix { /** * Think of this as a "store level package attrset", but stripped down to no more than the needs of buildenv. */ -struct Package { +struct Package +{ Path path; bool active; int priority; - Package(const Path & path, bool active, int priority) : path{path}, active{active}, priority{priority} {} + + Package(const Path & path, bool active, int priority) + : path{path} + , active{active} + , priority{priority} + { + } }; class BuildEnvFileConflictError : public Error @@ -22,27 +29,23 @@ public: const Path fileB; int priority; - BuildEnvFileConflictError( - const Path fileA, - const Path fileB, - int priority - ) + BuildEnvFileConflictError(const Path fileA, const Path fileB, int priority) : Error( - "Unable to build profile. There is a conflict for the following files:\n" - "\n" - " %1%\n" - " %2%", - fileA, - fileB - ) + "Unable to build profile. There is a conflict for the following files:\n" + "\n" + " %1%\n" + " %2%", + fileA, + fileB) , fileA(fileA) , fileB(fileB) , priority(priority) - {} + { + } }; typedef std::vector Packages; void buildProfile(const Path & out, Packages && pkgs); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/common-protocol-impl.hh b/src/libstore/include/nix/store/common-protocol-impl.hh index e9c726a99..cb1020a3c 100644 --- a/src/libstore/include/nix/store/common-protocol-impl.hh +++ b/src/libstore/include/nix/store/common-protocol-impl.hh @@ -15,14 +15,15 @@ namespace nix { /* protocol-agnostic templates */ -#define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T CommonProto::Serialise< T >::read(const StoreDirConfig & store, CommonProto::ReadConn conn) \ - { \ - return LengthPrefixedProtoHelper::read(store, conn); \ - } \ - TEMPLATE void CommonProto::Serialise< T >::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & t) \ - { \ - LengthPrefixedProtoHelper::write(store, conn, t); \ +#define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void CommonProto::Serialise::write( \ + const StoreDirConfig & store, CommonProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ } #define COMMA_ , @@ -30,12 +31,9 @@ COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::set) COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::tuple) -COMMON_USE_LENGTH_PREFIX_SERIALISER( - template, - std::map) +COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::map) #undef COMMA_ - /* protocol-specific templates */ -} +} // namespace nix diff --git a/src/libstore/include/nix/store/common-protocol.hh b/src/libstore/include/nix/store/common-protocol.hh index 1dc4aa7c5..c1d22fa6c 100644 --- a/src/libstore/include/nix/store/common-protocol.hh +++ b/src/libstore/include/nix/store/common-protocol.hh @@ -14,7 +14,6 @@ struct ContentAddress; struct DrvOutput; struct Realisation; - /** * Shared serializers between the worker protocol, serve protocol, and a * few others. @@ -28,7 +27,8 @@ struct CommonProto * A unidirectional read connection, to be used by the read half of the * canonical serializers below. */ - struct ReadConn { + struct ReadConn + { Source & from; }; @@ -36,7 +36,8 @@ struct CommonProto * A unidirectional write connection, to be used by the write half of the * canonical serializers below. */ - struct WriteConn { + struct WriteConn + { Sink & to; }; @@ -54,10 +55,10 @@ struct CommonProto } }; -#define DECLARE_COMMON_SERIALISER(T) \ - struct CommonProto::Serialise< T > \ - { \ - static T read(const StoreDirConfig & store, CommonProto::ReadConn conn); \ +#define DECLARE_COMMON_SERIALISER(T) \ + struct CommonProto::Serialise \ + { \ + static T read(const StoreDirConfig & store, CommonProto::ReadConn conn); \ static void write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & str); \ } @@ -103,4 +104,4 @@ DECLARE_COMMON_SERIALISER(std::optional); template<> DECLARE_COMMON_SERIALISER(std::optional); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/common-ssh-store-config.hh b/src/libstore/include/nix/store/common-ssh-store-config.hh index 82a78f075..9e6a24b74 100644 --- a/src/libstore/include/nix/store/common-ssh-store-config.hh +++ b/src/libstore/include/nix/store/common-ssh-store-config.hh @@ -13,16 +13,18 @@ struct CommonSSHStoreConfig : virtual StoreConfig CommonSSHStoreConfig(std::string_view scheme, std::string_view host, const Params & params); - const Setting sshKey{this, "", "ssh-key", - "Path to the SSH private key used to authenticate to the remote machine."}; + const Setting sshKey{ + this, "", "ssh-key", "Path to the SSH private key used to authenticate to the remote machine."}; - const Setting sshPublicHostKey{this, "", "base64-ssh-public-host-key", - "The public host key of the remote machine."}; + const Setting sshPublicHostKey{ + this, "", "base64-ssh-public-host-key", "The public host key of the remote machine."}; - const Setting compress{this, false, "compress", - "Whether to enable SSH compression."}; + const Setting compress{this, false, "compress", "Whether to enable SSH compression."}; - const Setting remoteStore{this, "", "remote-store", + const Setting remoteStore{ + this, + "", + "remote-store", R"( [Store URL](@docroot@/store/types/index.md#store-url-format) to be used on the remote machine. The default is `auto` @@ -54,9 +56,7 @@ struct CommonSSHStoreConfig : virtual StoreConfig * * See that constructor for details on the remaining two arguments. */ - SSHMaster createSSHMaster( - bool useMaster, - Descriptor logFD = INVALID_DESCRIPTOR) const; + SSHMaster createSSHMaster(bool useMaster, Descriptor logFD = INVALID_DESCRIPTOR) const; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/content-address.hh b/src/libstore/include/nix/store/content-address.hh index 8442fabb2..0a3dc79bd 100644 --- a/src/libstore/include/nix/store/content-address.hh +++ b/src/libstore/include/nix/store/content-address.hh @@ -73,8 +73,8 @@ struct ContentAddressMethod Raw raw; - bool operator ==(const ContentAddressMethod &) const = default; - auto operator <=>(const ContentAddressMethod &) const = default; + bool operator==(const ContentAddressMethod &) const = default; + auto operator<=>(const ContentAddressMethod &) const = default; MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod); @@ -132,7 +132,6 @@ struct ContentAddressMethod FileIngestionMethod getFileIngestionMethod() const; }; - /* * Mini content address */ @@ -161,8 +160,8 @@ struct ContentAddress */ Hash hash; - bool operator ==(const ContentAddress &) const = default; - auto operator <=>(const ContentAddress &) const = default; + bool operator==(const ContentAddress &) const = default; + auto operator<=>(const ContentAddress &) const = default; /** * Compute the content-addressability assertion @@ -184,7 +183,6 @@ struct ContentAddress */ std::string renderContentAddress(std::optional ca); - /* * Full content address * @@ -221,9 +219,9 @@ struct StoreReferences */ size_t size() const; - bool operator ==(const StoreReferences &) const = default; + bool operator==(const StoreReferences &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=>(const StoreReferences &) const = default; + // auto operator <=>(const StoreReferences &) const = default; }; // This matches the additional info that we need for makeTextPath @@ -240,9 +238,9 @@ struct TextInfo */ StorePathSet references; - bool operator ==(const TextInfo &) const = default; + bool operator==(const TextInfo &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=>(const TextInfo &) const = default; + // auto operator <=>(const TextInfo &) const = default; }; struct FixedOutputInfo @@ -262,9 +260,9 @@ struct FixedOutputInfo */ StoreReferences references; - bool operator ==(const FixedOutputInfo &) const = default; + bool operator==(const FixedOutputInfo &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=>(const FixedOutputInfo &) const = default; + // auto operator <=>(const FixedOutputInfo &) const = default; }; /** @@ -274,16 +272,13 @@ struct FixedOutputInfo */ struct ContentAddressWithReferences { - typedef std::variant< - TextInfo, - FixedOutputInfo - > Raw; + typedef std::variant Raw; Raw raw; - bool operator ==(const ContentAddressWithReferences &) const = default; + bool operator==(const ContentAddressWithReferences &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=>(const ContentAddressWithReferences &) const = default; + // auto operator <=>(const ContentAddressWithReferences &) const = default; MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences); @@ -306,12 +301,11 @@ struct ContentAddressWithReferences * *partial function* and exceptions will be thrown for invalid * combinations. */ - static ContentAddressWithReferences fromParts( - ContentAddressMethod method, Hash hash, StoreReferences refs); + static ContentAddressWithReferences fromParts(ContentAddressMethod method, Hash hash, StoreReferences refs); ContentAddressMethod getMethod() const; Hash getHash() const; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/daemon.hh b/src/libstore/include/nix/store/daemon.hh index d14541df7..4d550696e 100644 --- a/src/libstore/include/nix/store/daemon.hh +++ b/src/libstore/include/nix/store/daemon.hh @@ -8,11 +8,6 @@ namespace nix::daemon { enum RecursiveFlag : bool { NotRecursive = false, Recursive = true }; -void processConnection( - ref store, - FdSource && from, - FdSink && to, - TrustedFlag trusted, - RecursiveFlag recursive); +void processConnection(ref store, FdSource && from, FdSink && to, TrustedFlag trusted, RecursiveFlag recursive); -} +} // namespace nix::daemon diff --git a/src/libstore/include/nix/store/derivation-options.hh b/src/libstore/include/nix/store/derivation-options.hh index f61a43e60..ff3693366 100644 --- a/src/libstore/include/nix/store/derivation-options.hh +++ b/src/libstore/include/nix/store/derivation-options.hh @@ -201,7 +201,7 @@ struct DerivationOptions bool useUidRange(const BasicDerivation & drv) const; }; -}; +}; // namespace nix JSON_IMPL(DerivationOptions); JSON_IMPL(DerivationOptions::OutputChecks) diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index a813137bc..41cd179f4 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -31,8 +31,8 @@ struct DerivationOutput { StorePath path; - bool operator == (const InputAddressed &) const = default; - auto operator <=> (const InputAddressed &) const = default; + bool operator==(const InputAddressed &) const = default; + auto operator<=>(const InputAddressed &) const = default; }; /** @@ -56,8 +56,8 @@ struct DerivationOutput */ StorePath path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; - bool operator == (const CAFixed &) const = default; - auto operator <=> (const CAFixed &) const = default; + bool operator==(const CAFixed &) const = default; + auto operator<=>(const CAFixed &) const = default; }; /** @@ -77,17 +77,18 @@ struct DerivationOutput */ HashAlgorithm hashAlgo; - bool operator == (const CAFloating &) const = default; - auto operator <=> (const CAFloating &) const = default; + bool operator==(const CAFloating &) const = default; + auto operator<=>(const CAFloating &) const = default; }; /** * Input-addressed output which depends on a (CA) derivation whose hash * isn't known yet. */ - struct Deferred { - bool operator == (const Deferred &) const = default; - auto operator <=> (const Deferred &) const = default; + struct Deferred + { + bool operator==(const Deferred &) const = default; + auto operator<=>(const Deferred &) const = default; }; /** @@ -106,22 +107,16 @@ struct DerivationOutput */ HashAlgorithm hashAlgo; - bool operator == (const Impure &) const = default; - auto operator <=> (const Impure &) const = default; + bool operator==(const Impure &) const = default; + auto operator<=>(const Impure &) const = default; }; - typedef std::variant< - InputAddressed, - CAFixed, - CAFloating, - Deferred, - Impure - > Raw; + typedef std::variant Raw; Raw raw; - bool operator == (const DerivationOutput &) const = default; - auto operator <=> (const DerivationOutput &) const = default; + bool operator==(const DerivationOutput &) const = default; + auto operator<=>(const DerivationOutput &) const = default; MAKE_WRAPPER_CONSTRUCTOR(DerivationOutput); @@ -136,12 +131,10 @@ struct DerivationOutput * the safer interface provided by * BasicDerivation::outputsAndOptPaths */ - std::optional path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; + std::optional + path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; - nlohmann::json toJSON( - const StoreDirConfig & store, - std::string_view drvName, - OutputNameView outputName) const; + nlohmann::json toJSON(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; /** * @param xpSettings Stop-gap to avoid globals during unit tests. */ @@ -161,8 +154,7 @@ typedef std::map DerivationOutputs; * path in which it would be written. To calculate values of these * types, see the corresponding functions in BasicDerivation. */ -typedef std::map>> - DerivationOutputsAndOptPaths; +typedef std::map>> DerivationOutputsAndOptPaths; /** * For inputs that are sub-derivations, we specify exactly which @@ -170,26 +162,29 @@ typedef std::map DerivationInputs; -struct DerivationType { +struct DerivationType +{ /** * Input-addressed derivation types */ - struct InputAddressed { + struct InputAddressed + { /** * True iff the derivation type can't be determined statically, * for instance because it (transitively) depends on a content-addressed * derivation. - */ + */ bool deferred; - bool operator == (const InputAddressed &) const = default; - auto operator <=> (const InputAddressed &) const = default; + bool operator==(const InputAddressed &) const = default; + auto operator<=>(const InputAddressed &) const = default; }; /** * Content-addressing derivation types */ - struct ContentAddressed { + struct ContentAddressed + { /** * Whether the derivation should be built safely inside a sandbox. */ @@ -207,8 +202,8 @@ struct DerivationType { */ bool fixed; - bool operator == (const ContentAddressed &) const = default; - auto operator <=> (const ContentAddressed &) const = default; + bool operator==(const ContentAddressed &) const = default; + auto operator<=>(const ContentAddressed &) const = default; }; /** @@ -217,21 +212,18 @@ struct DerivationType { * This is similar at build-time to the content addressed, not standboxed, not fixed * type, but has some restrictions on its usage. */ - struct Impure { - bool operator == (const Impure &) const = default; - auto operator <=> (const Impure &) const = default; + struct Impure + { + bool operator==(const Impure &) const = default; + auto operator<=>(const Impure &) const = default; }; - typedef std::variant< - InputAddressed, - ContentAddressed, - Impure - > Raw; + typedef std::variant Raw; Raw raw; - bool operator == (const DerivationType &) const = default; - auto operator <=> (const DerivationType &) const = default; + bool operator==(const DerivationType &) const = default; + auto operator<=>(const DerivationType &) const = default; MAKE_WRAPPER_CONSTRUCTOR(DerivationType); @@ -300,9 +292,9 @@ struct BasicDerivation BasicDerivation() = default; BasicDerivation(BasicDerivation &&) = default; BasicDerivation(const BasicDerivation &) = default; - BasicDerivation& operator=(BasicDerivation &&) = default; - BasicDerivation& operator=(const BasicDerivation &) = default; - virtual ~BasicDerivation() { }; + BasicDerivation & operator=(BasicDerivation &&) = default; + BasicDerivation & operator=(const BasicDerivation &) = default; + virtual ~BasicDerivation() {}; bool isBuiltin() const; @@ -331,9 +323,9 @@ struct BasicDerivation */ void applyRewrites(const StringMap & rewrites); - bool operator == (const BasicDerivation &) const = default; + bool operator==(const BasicDerivation &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=> (const BasicDerivation &) const = default; + // auto operator <=> (const BasicDerivation &) const = default; }; class Store; @@ -348,7 +340,9 @@ struct Derivation : BasicDerivation /** * Print a derivation. */ - std::string unparse(const StoreDirConfig & store, bool maskOutputs, + std::string unparse( + const StoreDirConfig & store, + bool maskOutputs, DerivedPathMap::ChildNode::Map * actualInputs = nullptr) const; /** @@ -369,7 +363,8 @@ struct Derivation : BasicDerivation */ std::optional tryResolve( Store & store, - std::function(ref drvPath, const std::string & outputName)> queryResolutionChain) const; + std::function(ref drvPath, const std::string & outputName)> + queryResolutionChain) const; /** * Check that the derivation is valid and does not present any @@ -382,8 +377,16 @@ struct Derivation : BasicDerivation void checkInvariants(Store & store, const StorePath & drvPath) const; Derivation() = default; - Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { } - Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } + + Derivation(const BasicDerivation & bd) + : BasicDerivation(bd) + { + } + + Derivation(BasicDerivation && bd) + : BasicDerivation(std::move(bd)) + { + } nlohmann::json toJSON(const StoreDirConfig & store) const; static Derivation fromJSON( @@ -391,21 +394,17 @@ struct Derivation : BasicDerivation const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); - bool operator == (const Derivation &) const = default; + bool operator==(const Derivation &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - //auto operator <=> (const Derivation &) const = default; + // auto operator <=> (const Derivation &) const = default; }; - class Store; /** * Write a derivation to the Nix store, and return its path. */ -StorePath writeDerivation(Store & store, - const Derivation & drv, - RepairFlag repair = NoRepair, - bool readOnly = false); +StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair = NoRepair, bool readOnly = false); /** * Read a derivation from a file. @@ -432,7 +431,6 @@ bool isDerivation(std::string_view fileName); */ std::string outputPathName(std::string_view drvName, OutputNameView outputName); - /** * The hashes modulo of a derivation. * @@ -440,7 +438,8 @@ std::string outputPathName(std::string_view drvName, OutputNameView outputName); * derivations (fixed-output or not) will have a different hash for each * output. */ -struct DrvHash { +struct DrvHash +{ /** * Map from output names to hashes */ @@ -466,7 +465,7 @@ struct DrvHash { Kind kind; }; -void operator |= (DrvHash::Kind & self, const DrvHash::Kind & other) noexcept; +void operator|=(DrvHash::Kind & self, const DrvHash::Kind & other) noexcept; /** * Returns hashes with the details of fixed-output subderivations @@ -526,4 +525,4 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva */ std::string hashPlaceholder(const OutputNameView outputName); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/derived-path-map.hh b/src/libstore/include/nix/store/derived-path-map.hh index 6dae73fab..c10af84ca 100644 --- a/src/libstore/include/nix/store/derived-path-map.hh +++ b/src/libstore/include/nix/store/derived-path-map.hh @@ -28,11 +28,13 @@ namespace nix { * "optional" types. */ template -struct DerivedPathMap { +struct DerivedPathMap +{ /** * A child node (non-root node). */ - struct ChildNode { + struct ChildNode + { /** * Value of this child node. * @@ -50,7 +52,7 @@ struct DerivedPathMap { */ Map childMap; - bool operator == (const ChildNode &) const noexcept; + bool operator==(const ChildNode &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. // decltype(std::declval() <=> std::declval()) @@ -67,7 +69,7 @@ struct DerivedPathMap { */ Map map; - bool operator == (const DerivedPathMap &) const = default; + bool operator==(const DerivedPathMap &) const = default; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. // auto operator <=> (const DerivedPathMap &) const noexcept; @@ -94,8 +96,7 @@ struct DerivedPathMap { }; template<> -bool DerivedPathMap::ChildNode::operator == ( - const DerivedPathMap::ChildNode &) const noexcept; +bool DerivedPathMap::ChildNode::operator==(const DerivedPathMap::ChildNode &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. #if 0 @@ -110,4 +111,4 @@ inline auto DerivedPathMap::operator <=> (const DerivedPathMap::ChildNode; extern template struct DerivedPathMap; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/derived-path.hh b/src/libstore/include/nix/store/derived-path.hh index 64189bd41..bc89b012e 100644 --- a/src/libstore/include/nix/store/derived-path.hh +++ b/src/libstore/include/nix/store/derived-path.hh @@ -24,15 +24,16 @@ class Store; * cannot be simplified further. Since they are opaque, they cannot be * built, but they can fetched. */ -struct DerivedPathOpaque { +struct DerivedPathOpaque +{ StorePath path; std::string to_string(const StoreDirConfig & store) const; static DerivedPathOpaque parse(const StoreDirConfig & store, std::string_view); nlohmann::json toJSON(const StoreDirConfig & store) const; - bool operator == (const DerivedPathOpaque &) const = default; - auto operator <=> (const DerivedPathOpaque &) const = default; + bool operator==(const DerivedPathOpaque &) const = default; + auto operator<=>(const DerivedPathOpaque &) const = default; }; struct SingleDerivedPath; @@ -44,7 +45,8 @@ struct SingleDerivedPath; * evaluated by building the derivation, and then taking the resulting output * path of the given output name. */ -struct SingleDerivedPathBuilt { +struct SingleDerivedPathBuilt +{ ref drvPath; OutputName output; @@ -74,19 +76,17 @@ struct SingleDerivedPathBuilt { * @param xpSettings Stop-gap to avoid globals during unit tests. */ static SingleDerivedPathBuilt parse( - const StoreDirConfig & store, ref drvPath, + const StoreDirConfig & store, + ref drvPath, OutputNameView outputs, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); nlohmann::json toJSON(Store & store) const; - bool operator == (const SingleDerivedPathBuilt &) const noexcept; - std::strong_ordering operator <=> (const SingleDerivedPathBuilt &) const noexcept; + bool operator==(const SingleDerivedPathBuilt &) const noexcept; + std::strong_ordering operator<=>(const SingleDerivedPathBuilt &) const noexcept; }; -using _SingleDerivedPathRaw = std::variant< - DerivedPathOpaque, - SingleDerivedPathBuilt ->; +using _SingleDerivedPathRaw = std::variant; /** * A "derived path" is a very simple sort of expression (not a Nix @@ -99,19 +99,21 @@ using _SingleDerivedPathRaw = std::variant< * - built, in which case it is a pair of a derivation path and an * output name. */ -struct SingleDerivedPath : _SingleDerivedPathRaw { +struct SingleDerivedPath : _SingleDerivedPathRaw +{ using Raw = _SingleDerivedPathRaw; using Raw::Raw; using Opaque = DerivedPathOpaque; using Built = SingleDerivedPathBuilt; - inline const Raw & raw() const { + inline const Raw & raw() const + { return static_cast(*this); } - bool operator == (const SingleDerivedPath &) const = default; - auto operator <=> (const SingleDerivedPath &) const = default; + bool operator==(const SingleDerivedPath &) const = default; + auto operator<=>(const SingleDerivedPath &) const = default; /** * Get the store path this is ultimately derived from (by realising @@ -156,7 +158,7 @@ struct SingleDerivedPath : _SingleDerivedPathRaw { static inline ref makeConstantStorePathRef(StorePath drvPath) { - return make_ref(SingleDerivedPath::Opaque { drvPath }); + return make_ref(SingleDerivedPath::Opaque{drvPath}); } /** @@ -171,7 +173,8 @@ static inline ref makeConstantStorePathRef(StorePath drvPath) * evaluate to single values. Perhaps this should have just a single * output name. */ -struct DerivedPathBuilt { +struct DerivedPathBuilt +{ ref drvPath; OutputsSpec outputs; @@ -201,20 +204,18 @@ struct DerivedPathBuilt { * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivedPathBuilt parse( - const StoreDirConfig & store, ref, + const StoreDirConfig & store, + ref, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); nlohmann::json toJSON(Store & store) const; - bool operator == (const DerivedPathBuilt &) const noexcept; + bool operator==(const DerivedPathBuilt &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. - bool operator < (const DerivedPathBuilt &) const noexcept; + bool operator<(const DerivedPathBuilt &) const noexcept; }; -using _DerivedPathRaw = std::variant< - DerivedPathOpaque, - DerivedPathBuilt ->; +using _DerivedPathRaw = std::variant; /** * A "derived path" is a very simple sort of expression that evaluates @@ -226,20 +227,22 @@ using _DerivedPathRaw = std::variant< * - built, in which case it is a pair of a derivation path and some * output names. */ -struct DerivedPath : _DerivedPathRaw { +struct DerivedPath : _DerivedPathRaw +{ using Raw = _DerivedPathRaw; using Raw::Raw; using Opaque = DerivedPathOpaque; using Built = DerivedPathBuilt; - inline const Raw & raw() const { + inline const Raw & raw() const + { return static_cast(*this); } - bool operator == (const DerivedPath &) const = default; + bool operator==(const DerivedPath &) const = default; // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. - //auto operator <=> (const DerivedPath &) const = default; + // auto operator <=> (const DerivedPath &) const = default; /** * Get the store path this is ultimately derived from (by realising @@ -300,6 +303,5 @@ typedef std::vector DerivedPaths; * @param xpSettings Stop-gap to avoid globals during unit tests. */ void drvRequireExperiment( - const SingleDerivedPath & drv, - const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); -} + const SingleDerivedPath & drv, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); +} // namespace nix diff --git a/src/libstore/include/nix/store/downstream-placeholder.hh b/src/libstore/include/nix/store/downstream-placeholder.hh index da03cd9a6..ee4d9e3c2 100644 --- a/src/libstore/include/nix/store/downstream-placeholder.hh +++ b/src/libstore/include/nix/store/downstream-placeholder.hh @@ -38,7 +38,10 @@ class DownstreamPlaceholder /** * Newtype constructor */ - DownstreamPlaceholder(Hash hash) : hash(hash) { } + DownstreamPlaceholder(Hash hash) + : hash(hash) + { + } public: /** @@ -88,4 +91,4 @@ public: const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 745aeb29e..8ff0de5ef 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -14,14 +14,15 @@ namespace nix { struct FileTransferSettings : Config { - Setting enableHttp2{this, true, "http2", - "Whether to enable HTTP/2 support."}; + Setting enableHttp2{this, true, "http2", "Whether to enable HTTP/2 support."}; - Setting userAgentSuffix{this, "", "user-agent-suffix", - "String appended to the user agent in HTTP requests."}; + Setting userAgentSuffix{ + this, "", "user-agent-suffix", "String appended to the user agent in HTTP requests."}; Setting httpConnections{ - this, 25, "http-connections", + this, + 25, + "http-connections", R"( The maximum number of parallel TCP connections used to fetch files from binary caches and by other downloads. It defaults @@ -30,7 +31,9 @@ struct FileTransferSettings : Config {"binary-caches-parallel-connections"}}; Setting connectTimeout{ - this, 5, "connect-timeout", + this, + 5, + "connect-timeout", R"( The timeout (in seconds) for establishing connections in the binary cache substituter. It corresponds to `curl`’s @@ -38,17 +41,22 @@ struct FileTransferSettings : Config )"}; Setting stalledDownloadTimeout{ - this, 300, "stalled-download-timeout", + this, + 300, + "stalled-download-timeout", R"( The timeout (in seconds) for receiving data from servers during download. Nix cancels idle downloads after this timeout's duration. )"}; - Setting tries{this, 5, "download-attempts", - "The number of times Nix attempts to download a file before giving up."}; + Setting tries{ + this, 5, "download-attempts", "The number of times Nix attempts to download a file before giving up."}; - Setting downloadBufferSize{this, 64 * 1024 * 1024, "download-buffer-size", + Setting downloadBufferSize{ + this, + 64 * 1024 * 1024, + "download-buffer-size", R"( The size of Nix's internal download buffer in bytes during `curl` transfers. If data is not processed quickly enough to exceed the size of this buffer, downloads may stall. @@ -77,7 +85,10 @@ struct FileTransferRequest std::function dataCallback; FileTransferRequest(std::string_view uri) - : uri(uri), parentAct(getCurActivity()) { } + : uri(uri) + , parentAct(getCurActivity()) + { + } std::string verb() const { @@ -122,15 +133,14 @@ class Store; struct FileTransfer { - virtual ~FileTransfer() { } + virtual ~FileTransfer() {} /** * Enqueue a data transfer request, returning a future to the result of * the download. The future may throw a FileTransferError * exception. */ - virtual void enqueueFileTransfer(const FileTransferRequest & request, - Callback callback) = 0; + virtual void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) = 0; std::future enqueueFileTransfer(const FileTransferRequest & request); @@ -148,10 +158,8 @@ struct FileTransfer * Download a file, writing its data to a sink. The sink will be * invoked on the thread of the caller. */ - void download( - FileTransferRequest && request, - Sink & sink, - std::function resultCallback = {}); + void + download(FileTransferRequest && request, Sink & sink, std::function resultCallback = {}); enum Error { NotFound, Forbidden, Misc, Transient, Interrupted }; }; @@ -179,7 +187,7 @@ public: std::optional response; template - FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args); + FileTransferError(FileTransfer::Error error, std::optional response, const Args &... args); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/gc-store.hh b/src/libstore/include/nix/store/gc-store.hh index 8b25ec8d4..9f2255025 100644 --- a/src/libstore/include/nix/store/gc-store.hh +++ b/src/libstore/include/nix/store/gc-store.hh @@ -7,10 +7,8 @@ namespace nix { - typedef std::unordered_map> Roots; - struct GCOptions { /** @@ -55,7 +53,6 @@ struct GCOptions uint64_t maxFreed{std::numeric_limits::max()}; }; - struct GCResults { /** @@ -71,7 +68,6 @@ struct GCResults uint64_t bytesFreed = 0; }; - /** * Mix-in class for \ref Store "stores" which expose a notion of garbage * collection. @@ -117,4 +113,4 @@ struct GcStore : public virtual Store virtual void collectGarbage(const GCOptions & options, GCResults & results) = 0; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 3f9d8697e..310aca80d 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -20,7 +20,8 @@ typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode; struct MaxBuildJobsSetting : public BaseSetting { - MaxBuildJobsSetting(Config * options, + MaxBuildJobsSetting( + Config * options, unsigned int def, const std::string & name, const std::string & description, @@ -34,14 +35,15 @@ struct MaxBuildJobsSetting : public BaseSetting }; const uint32_t maxIdsPerBuild = - #ifdef __linux__ +#ifdef __linux__ 1 << 16 - #else +#else 1 - #endif +#endif ; -class Settings : public Config { +class Settings : public Config +{ StringSet getDefaultSystemFeatures(); @@ -91,7 +93,10 @@ public: */ Path nixDaemonSocketFile; - Setting storeUri{this, getEnv("NIX_REMOTE").value_or("auto"), "store", + Setting storeUri{ + this, + getEnv("NIX_REMOTE").value_or("auto"), + "store", R"( The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format) to use for most operations. @@ -100,14 +105,15 @@ public: section of the manual for supported store types and settings. )"}; - Setting keepFailed{this, false, "keep-failed", - "Whether to keep temporary directories of failed builds."}; + Setting keepFailed{this, false, "keep-failed", "Whether to keep temporary directories of failed builds."}; - Setting keepGoing{this, false, "keep-going", - "Whether to keep building derivations when another build fails."}; + Setting keepGoing{ + this, false, "keep-going", "Whether to keep building derivations when another build fails."}; Setting tryFallback{ - this, false, "fallback", + this, + false, + "fallback", R"( If set to `true`, Nix falls back to building from source if a binary substitute fails. This is equivalent to the `--fallback` @@ -120,12 +126,17 @@ public: */ bool verboseBuild = true; - Setting logLines{this, 25, "log-lines", + Setting logLines{ + this, + 25, + "log-lines", "The number of lines of the tail of " "the log to show if a build fails."}; MaxBuildJobsSetting maxBuildJobs{ - this, 1, "max-jobs", + this, + 1, + "max-jobs", R"( Maximum number of jobs that Nix tries to build locally in parallel. @@ -143,7 +154,9 @@ public: {"build-max-jobs"}}; Setting maxSubstitutionJobs{ - this, 16, "max-substitution-jobs", + this, + 16, + "max-substitution-jobs", R"( This option defines the maximum number of substitution jobs that Nix tries to run in parallel. The default is `16`. The minimum value @@ -181,7 +194,9 @@ public: bool readOnlyMode = false; Setting thisSystem{ - this, NIX_LOCAL_SYSTEM, "system", + this, + NIX_LOCAL_SYSTEM, + "system", R"( The system type of the current Nix installation. Nix only builds a given [store derivation](@docroot@/glossary.md#gloss-store-derivation) locally when its `system` attribute equals any of the values specified here or in [`extra-platforms`](#conf-extra-platforms). @@ -208,7 +223,9 @@ public: )"}; Setting maxSilentTime{ - this, 0, "max-silent-time", + this, + 0, + "max-silent-time", R"( This option defines the maximum number of seconds that a builder can go without producing any data on standard output or standard error. @@ -223,7 +240,9 @@ public: {"build-max-silent-time"}}; Setting buildTimeout{ - this, 0, "timeout", + this, + 0, + "timeout", R"( This option defines the maximum number of seconds that a builder can run. This is useful (for instance in an automated build system) to @@ -236,7 +255,10 @@ public: )", {"build-timeout"}}; - Setting buildHook{this, {"nix", "__build-remote"}, "build-hook", + Setting buildHook{ + this, + {"nix", "__build-remote"}, + "build-hook", R"( The path to the helper program that executes remote builds. @@ -249,7 +271,9 @@ public: )"}; Setting builders{ - this, "@" + nixConfDir + "/machines", "builders", + this, + "@" + nixConfDir + "/machines", + "builders", R"( A semicolon- or newline-separated list of build machines. @@ -365,16 +389,21 @@ public: If you want the remote machines to use substituters, set [`builders-use-substitutes`](#conf-builders-use-substitutes) to `true`. )", - {}, false}; + {}, + false}; Setting alwaysAllowSubstitutes{ - this, false, "always-allow-substitutes", + this, + false, + "always-allow-substitutes", R"( If set to `true`, Nix ignores the [`allowSubstitutes`](@docroot@/language/advanced-attributes.md) attribute in derivations and always attempt to use [available substituters](#conf-substituters). )"}; Setting buildersUseSubstitutes{ - this, false, "builders-use-substitutes", + this, + false, + "builders-use-substitutes", R"( If set to `true`, Nix instructs [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available. @@ -382,11 +411,13 @@ public: This can drastically reduce build times if the network connection between the local machine and the remote build host is slow. )"}; - Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", - "Amount of reserved disk space for the garbage collector."}; + Setting reservedSize{ + this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; Setting fsyncMetadata{ - this, true, "fsync-metadata", + this, + true, + "fsync-metadata", R"( If set to `true`, changes to the Nix store metadata (in `/nix/var/nix/db`) are synchronously flushed to disk. This improves @@ -394,24 +425,28 @@ public: default is `true`. )"}; - Setting fsyncStorePaths{this, false, "fsync-store-paths", + Setting fsyncStorePaths{ + this, + false, + "fsync-store-paths", R"( Whether to call `fsync()` on store paths before registering them, to flush them to disk. This improves robustness in case of system crashes, but reduces performance. The default is `false`. )"}; - Setting useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal", - "Whether SQLite should use WAL mode."}; + Setting useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal", "Whether SQLite should use WAL mode."}; #ifndef _WIN32 // FIXME: remove this option, `fsync-store-paths` is faster. - Setting syncBeforeRegistering{this, false, "sync-before-registering", - "Whether to call `sync()` before registering a path as valid."}; + Setting syncBeforeRegistering{ + this, false, "sync-before-registering", "Whether to call `sync()` before registering a path as valid."}; #endif Setting useSubstitutes{ - this, true, "substitute", + this, + true, + "substitute", R"( If set to `true` (default), Nix uses binary substitutes if available. This option can be disabled to force building from @@ -420,7 +455,9 @@ public: {"build-use-substitutes"}}; Setting buildUsersGroup{ - this, "", "build-users-group", + this, + "", + "build-users-group", R"( This options specifies the Unix group containing the Nix build user accounts. In multi-user Nix installations, builds should not be @@ -454,37 +491,48 @@ public: Defaults to `nixbld` when running as root, *empty* otherwise. )", - {}, false}; + {}, + false}; - Setting autoAllocateUids{this, false, "auto-allocate-uids", + Setting autoAllocateUids{ + this, + false, + "auto-allocate-uids", R"( Whether to select UIDs for builds automatically, instead of using the users in `build-users-group`. UIDs are allocated starting at 872415232 (0x34000000) on Linux and 56930 on macOS. - )", {}, true, Xp::AutoAllocateUids}; + )", + {}, + true, + Xp::AutoAllocateUids}; - Setting startId{this, - #ifdef __linux__ + Setting startId{ + this, +#ifdef __linux__ 0x34000000, - #else +#else 56930, - #endif +#endif "start-id", "The first UID and GID to use for dynamic ID allocation."}; - Setting uidCount{this, - #ifdef __linux__ + Setting uidCount{ + this, +#ifdef __linux__ maxIdsPerBuild * 128, - #else +#else 128, - #endif +#endif "id-count", "The number of UIDs/GIDs to use for dynamic ID allocation."}; - #ifdef __linux__ +#ifdef __linux__ Setting useCgroups{ - this, false, "use-cgroups", + this, + false, + "use-cgroups", R"( Whether to execute builds inside cgroups. This is only supported on Linux. @@ -492,14 +540,19 @@ public: Cgroups are required and enabled automatically for derivations that require the `uid-range` system feature. )"}; - #endif +#endif - Setting impersonateLinux26{this, false, "impersonate-linux-26", + Setting impersonateLinux26{ + this, + false, + "impersonate-linux-26", "Whether to impersonate a Linux 2.6 machine on newer kernels.", {"build-impersonate-linux-26"}}; Setting keepLog{ - this, true, "keep-build-log", + this, + true, + "keep-build-log", R"( If set to `true` (the default), Nix writes the build log of a derivation (i.e. the standard output and error of its builder) to @@ -509,7 +562,9 @@ public: {"build-keep-log"}}; Setting compressLog{ - this, true, "compress-build-log", + this, + true, + "compress-build-log", R"( If set to `true` (the default), build logs written to `/nix/var/log/nix/drvs` are compressed on the fly using bzip2. @@ -518,7 +573,9 @@ public: {"build-compress-log"}}; Setting maxLogSize{ - this, 0, "max-build-log-size", + this, + 0, + "max-build-log-size", R"( This option defines the maximum number of bytes that a builder can write to its stdout/stderr. If the builder exceeds this limit, it’s @@ -526,11 +583,12 @@ public: )", {"build-max-log-size"}}; - Setting pollInterval{this, 5, "build-poll-interval", - "How often (in seconds) to poll for locks."}; + Setting pollInterval{this, 5, "build-poll-interval", "How often (in seconds) to poll for locks."}; Setting gcKeepOutputs{ - this, false, "keep-outputs", + this, + false, + "keep-outputs", R"( If `true`, the garbage collector keeps the outputs of non-garbage derivations. If `false` (default), outputs are @@ -546,7 +604,9 @@ public: {"gc-keep-outputs"}}; Setting gcKeepDerivations{ - this, true, "keep-derivations", + this, + true, + "keep-derivations", R"( If `true` (default), the garbage collector keeps the derivations from which non-garbage store paths were built. If `false`, they are @@ -562,7 +622,9 @@ public: {"gc-keep-derivations"}}; Setting autoOptimiseStore{ - this, false, "auto-optimise-store", + this, + false, + "auto-optimise-store", R"( If set to `true`, Nix automatically detects files in the store that have identical contents, and replaces them with hard links to @@ -572,7 +634,9 @@ public: )"}; Setting envKeepDerivations{ - this, false, "keep-env-derivations", + this, + false, + "keep-env-derivations", R"( If `false` (default), derivations are not stored in Nix user environments. That is, the derivations of any build-time-only @@ -594,12 +658,13 @@ public: Setting sandboxMode{ this, - #ifdef __linux__ - smEnabled - #else - smDisabled - #endif - , "sandbox", +#ifdef __linux__ + smEnabled +#else + smDisabled +#endif + , + "sandbox", R"( If set to `true`, builds are performed in a *sandboxed environment*, i.e., they’re isolated from the normal file system @@ -628,7 +693,9 @@ public: {"build-use-chroot", "build-use-sandbox"}}; Setting sandboxPaths{ - this, {}, "sandbox-paths", + this, + {}, + "sandbox-paths", R"( A list of paths bind-mounted into Nix sandbox environments. You can use the syntax `target=source` to mount a path in a different @@ -646,11 +713,14 @@ public: )", {"build-chroot-dirs", "build-sandbox-paths"}}; - Setting sandboxFallback{this, true, "sandbox-fallback", - "Whether to disable sandboxing when the kernel doesn't allow it."}; + Setting sandboxFallback{ + this, true, "sandbox-fallback", "Whether to disable sandboxing when the kernel doesn't allow it."}; #ifndef _WIN32 - Setting requireDropSupplementaryGroups{this, isRootUser(), "require-drop-supplementary-groups", + Setting requireDropSupplementaryGroups{ + this, + isRootUser(), + "require-drop-supplementary-groups", R"( Following the principle of least privilege, Nix attempts to drop supplementary groups when building with sandboxing. @@ -671,7 +741,9 @@ public: #ifdef __linux__ Setting sandboxShmSize{ - this, "50%", "sandbox-dev-shm-size", + this, + "50%", + "sandbox-dev-shm-size", R"( *Linux only* @@ -683,7 +755,10 @@ public: #endif #if defined(__linux__) || defined(__FreeBSD__) - Setting sandboxBuildDir{this, "/build", "sandbox-build-dir", + Setting sandboxBuildDir{ + this, + "/build", + "sandbox-build-dir", R"( *Linux only* @@ -693,21 +768,32 @@ public: )"}; #endif - Setting> buildDir{this, std::nullopt, "build-dir", + Setting> buildDir{ + this, + std::nullopt, + "build-dir", R"( Override the `build-dir` store setting for all stores that have this setting. )"}; - Setting allowedImpureHostPrefixes{this, {}, "allowed-impure-host-deps", + Setting allowedImpureHostPrefixes{ + this, + {}, + "allowed-impure-host-deps", "Which prefixes to allow derivations to ask for access to (primarily for Darwin)."}; #ifdef __APPLE__ - Setting darwinLogSandboxViolations{this, false, "darwin-log-sandbox-violations", + Setting darwinLogSandboxViolations{ + this, + false, + "darwin-log-sandbox-violations", "Whether to log Darwin sandbox access violations to the system log."}; #endif Setting runDiffHook{ - this, false, "run-diff-hook", + this, + false, + "run-diff-hook", R"( If true, enable the execution of the `diff-hook` program. @@ -717,7 +803,9 @@ public: )"}; OptionalPathSetting diffHook{ - this, std::nullopt, "diff-hook", + this, + std::nullopt, + "diff-hook", R"( Absolute path to an executable capable of diffing build results. The hook is executed if `run-diff-hook` is true, and the @@ -765,7 +853,9 @@ public: {"binary-cache-public-keys"}}; Setting secretKeyFiles{ - this, {}, "secret-key-files", + this, + {}, + "secret-key-files", R"( A whitespace-separated list of files containing secret (private) keys. These are used to sign locally-built paths. They can be @@ -775,7 +865,9 @@ public: )"}; Setting tarballTtl{ - this, 60 * 60, "tarball-ttl", + this, + 60 * 60, + "tarball-ttl", R"( The number of seconds a downloaded tarball is considered fresh. If the cached tarball is stale, Nix checks whether it is still up @@ -792,7 +884,9 @@ public: )"}; Setting requireSigs{ - this, true, "require-sigs", + this, + true, + "require-sigs", R"( If set to `true` (the default), any non-content-addressed path added or copied to the Nix store (e.g. when substituting from a binary @@ -901,7 +995,9 @@ public: {"binary-caches"}}; Setting trustedSubstituters{ - this, {}, "trusted-substituters", + this, + {}, + "trusted-substituters", R"( A list of [Nix store URLs](@docroot@/store/types/index.md#store-url-format), separated by whitespace. These are not used by default, but users of the Nix daemon can enable them by specifying [`substituters`](#conf-substituters). @@ -911,7 +1007,9 @@ public: {"trusted-binary-caches"}}; Setting ttlNegativeNarInfoCache{ - this, 3600, "narinfo-cache-negative-ttl", + this, + 3600, + "narinfo-cache-negative-ttl", R"( The TTL in seconds for negative lookups. If a store path is queried from a [substituter](#conf-substituters) but was not found, a negative lookup is cached in the local disk cache database for the specified duration. @@ -927,7 +1025,9 @@ public: )"}; Setting ttlPositiveNarInfoCache{ - this, 30 * 24 * 3600, "narinfo-cache-positive-ttl", + this, + 30 * 24 * 3600, + "narinfo-cache-positive-ttl", R"( The TTL in seconds for positive lookups. If a store path is queried from a substituter, the result of the query is cached in the @@ -939,11 +1039,13 @@ public: mismatch if the build isn't reproducible. )"}; - Setting printMissing{this, true, "print-missing", - "Whether to print what paths need to be built or downloaded."}; + Setting printMissing{ + this, true, "print-missing", "Whether to print what paths need to be built or downloaded."}; Setting preBuildHook{ - this, "", "pre-build-hook", + this, + "", + "pre-build-hook", R"( If set, the path to a program that can set extra derivation-specific settings for this system. This is used for settings that can't be @@ -962,7 +1064,9 @@ public: )"}; Setting postBuildHook{ - this, "", "post-build-hook", + this, + "", + "post-build-hook", R"( Optional. The path to a program to execute after each build. @@ -1006,15 +1110,19 @@ public: /nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`. )"}; - Setting downloadSpeed { - this, 0, "download-speed", + Setting downloadSpeed{ + this, + 0, + "download-speed", R"( Specify the maximum transfer rate in kilobytes per second you want Nix to use for downloads. )"}; Setting netrcFile{ - this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file", + this, + fmt("%s/%s", nixConfDir, "netrc"), + "netrc-file", R"( If set to an absolute path to a `netrc` file, Nix uses the HTTP authentication credentials in this file when trying to download from @@ -1039,7 +1147,9 @@ public: )"}; Setting caFile{ - this, getDefaultSSLCertFile(), "ssl-cert-file", + this, + getDefaultSSLCertFile(), + "ssl-cert-file", R"( The path of a file containing CA certificates used to authenticate `https://` downloads. Nix by default uses @@ -1060,7 +1170,9 @@ public: #ifdef __linux__ Setting filterSyscalls{ - this, true, "filter-syscalls", + this, + true, + "filter-syscalls", R"( Whether to prevent certain dangerous system calls, such as creation of setuid/setgid files or adding ACLs or extended @@ -1069,7 +1181,9 @@ public: )"}; Setting allowNewPrivileges{ - this, false, "allow-new-privileges", + this, + false, + "allow-new-privileges", R"( (Linux-specific.) By default, builders on Linux cannot acquire new privileges by calling setuid/setgid programs or programs that have @@ -1085,7 +1199,9 @@ public: #if NIX_SUPPORT_ACL Setting ignoredAcls{ - this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls", + this, + {"security.selinux", "system.nfs4_acl", "security.csm"}, + "ignored-acls", R"( A list of ACLs that should be ignored, normally Nix attempts to remove all ACLs from files and directories in the Nix store, but @@ -1095,7 +1211,9 @@ public: #endif Setting hashedMirrors{ - this, {}, "hashed-mirrors", + this, + {}, + "hashed-mirrors", R"( A list of web servers used by `builtins.fetchurl` to obtain files by hash. Given a hash algorithm *ha* and a base-16 hash *h*, Nix tries to @@ -1117,7 +1235,9 @@ public: )"}; Setting minFree{ - this, 0, "min-free", + this, + 0, + "min-free", R"( When free disk space in `/nix/store` drops below `min-free` during a build, Nix performs a garbage-collection until `max-free` bytes are @@ -1125,25 +1245,28 @@ public: disables this feature. )"}; - Setting maxFree{ - // n.b. this is deliberately int64 max rather than uint64 max because - // this goes through the Nix language JSON parser and thus needs to be - // representable in Nix language integers. - this, std::numeric_limits::max(), "max-free", - R"( + Setting maxFree{// n.b. this is deliberately int64 max rather than uint64 max because + // this goes through the Nix language JSON parser and thus needs to be + // representable in Nix language integers. + this, + std::numeric_limits::max(), + "max-free", + R"( When a garbage collection is triggered by the `min-free` option, it stops as soon as `max-free` bytes are available. The default is infinity (i.e. delete all garbage). )"}; - Setting minFreeCheckInterval{this, 5, "min-free-check-interval", - "Number of seconds between checking free disk space."}; + Setting minFreeCheckInterval{ + this, 5, "min-free-check-interval", "Number of seconds between checking free disk space."}; - Setting narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size", - "Maximum size of NARs before spilling them to disk."}; + Setting narBufferSize{ + this, 32 * 1024 * 1024, "nar-buffer-size", "Maximum size of NARs before spilling them to disk."}; Setting allowSymlinkedStore{ - this, false, "allow-symlinked-store", + this, + false, + "allow-symlinked-store", R"( If set to `true`, Nix stops complaining if the store directory (typically `/nix/store`) contains symlink components. @@ -1156,7 +1279,9 @@ public: )"}; Setting useXDGBaseDirectories{ - this, false, "use-xdg-base-directories", + this, + false, + "use-xdg-base-directories", R"( If set to `true`, Nix conforms to the [XDG Base Directory Specification] for files in `$HOME`. The environment variables used to implement this are documented in the [Environment Variables section](@docroot@/command-ref/env-common.md). @@ -1185,10 +1310,12 @@ public: mv $HOME/.nix-defexpr $nix_state_home/defexpr mv $HOME/.nix-channels $nix_state_home/channels ``` - )" - }; + )"}; - Setting impureEnv {this, {}, "impure-env", + Setting impureEnv{ + this, + {}, + "impure-env", R"( A list of items, each in the format of: @@ -1202,10 +1329,9 @@ public: fixed-output derivations and in a multi-user Nix installation, or setting private access tokens when fetching a private repository. )", - {}, // aliases + {}, // aliases true, // document default - Xp::ConfigurableImpureEnv - }; + Xp::ConfigurableImpureEnv}; Setting upgradeNixStorePathUrl{ this, @@ -1214,8 +1340,7 @@ public: R"( Used by `nix upgrade-nix`, the URL of the file that contains the store paths of the latest Nix release. - )" - }; + )"}; Setting warnLargePathThreshold{ this, @@ -1226,11 +1351,9 @@ public: (as determined by its NAR serialisation). Default is 0, which disables the warning. Set it to 1 to warn on all paths. - )" - }; + )"}; }; - // FIXME: don't use a global variable. extern Settings settings; @@ -1268,4 +1391,4 @@ void initLibStore(bool loadConfig = true); */ void assertLibStoreInitialized(); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index 66ec5f8d2..f0d85a119 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -25,4 +25,4 @@ struct HttpBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/indirect-root-store.hh b/src/libstore/include/nix/store/indirect-root-store.hh index bbdad83f3..c39e8ea69 100644 --- a/src/libstore/include/nix/store/indirect-root-store.hh +++ b/src/libstore/include/nix/store/indirect-root-store.hh @@ -72,4 +72,4 @@ protected: void makeSymlink(const Path & link, const Path & target); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index 65f29d649..b64189af9 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -14,10 +14,7 @@ struct LegacySSHStoreConfig : std::enable_shared_from_this { using CommonSSHStoreConfig::CommonSSHStoreConfig; - LegacySSHStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params); + LegacySSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params); #ifndef _WIN32 // Hack for getting remote build log output. @@ -28,11 +25,10 @@ struct LegacySSHStoreConfig : std::enable_shared_from_this Descriptor logFD = INVALID_DESCRIPTOR; #endif - const Setting remoteProgram{this, {"nix-store"}, "remote-program", - "Path to the `nix-store` executable on the remote machine."}; + const Setting remoteProgram{ + this, {"nix-store"}, "remote-program", "Path to the `nix-store` executable on the remote machine."}; - const Setting maxConnections{this, 1, "max-connections", - "Maximum number of concurrent SSH connections."}; + const Setting maxConnections{this, 1, "max-connections", "Maximum number of concurrent SSH connections."}; /** * Hack for hydra @@ -44,9 +40,15 @@ struct LegacySSHStoreConfig : std::enable_shared_from_this */ std::optional connPipeSize; - static const std::string name() { return "SSH Store"; } + static const std::string name() + { + return "SSH Store"; + } - static StringSet uriSchemes() { return {"ssh"}; } + static StringSet uriSchemes() + { + return {"ssh"}; + } static std::string doc(); @@ -71,14 +73,12 @@ struct LegacySSHStore : public virtual Store std::string getUri() override; - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override; + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override; - std::map queryPathInfosUncached( - const StorePathSet & paths); + std::map queryPathInfosUncached(const StorePathSet & paths); - void addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) override; + void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; void narFromPath(const StorePath & path, Sink & sink) override; @@ -93,7 +93,9 @@ struct LegacySSHStore : public virtual Store void narFromPath(const StorePath & path, std::function fun); std::optional queryPathFromHashPart(const std::string & hashPart) override - { unsupported("queryPathFromHashPart"); } + { + unsupported("queryPathFromHashPart"); + } StorePath addToStore( std::string_view name, @@ -103,7 +105,9 @@ struct LegacySSHStore : public virtual Store const StorePathSet & references, PathFilter & filter, RepairFlag repair) override - { unsupported("addToStore"); } + { + unsupported("addToStore"); + } virtual StorePath addToStoreFromDump( Source & dump, @@ -113,12 +117,13 @@ struct LegacySSHStore : public virtual Store HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override - { unsupported("addToStore"); } + { + unsupported("addToStore"); + } public: - BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) override; + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; /** * Note, the returned function must only be called once, or we'll @@ -127,16 +132,20 @@ public: * @todo Use C++23 `std::move_only_function`. */ std::function buildDerivationAsync( - const StorePath & drvPath, const BasicDerivation & drv, - const ServeProto::BuildOptions & options); + const StorePath & drvPath, const BasicDerivation & drv, const ServeProto::BuildOptions & options); - void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override; + void buildPaths( + const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override; void ensurePath(const StorePath & path) override - { unsupported("ensurePath"); } + { + unsupported("ensurePath"); + } virtual ref getFSAccessor(bool requireValidPath) override - { unsupported("getFSAccessor"); } + { + unsupported("getFSAccessor"); + } /** * The default instance would schedule the work on the client side, but @@ -147,14 +156,18 @@ public: * without it being a breaking change. */ void repairPath(const StorePath & path) override - { unsupported("repairPath"); } + { + unsupported("repairPath"); + } - void computeFSClosure(const StorePathSet & paths, - StorePathSet & out, bool flipDirection = false, - bool includeOutputs = false, bool includeDerivers = false) override; + void computeFSClosure( + const StorePathSet & paths, + StorePathSet & out, + bool flipDirection = false, + bool includeOutputs = false, + bool includeDerivers = false) override; - StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute) override; + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; /** * Custom variation that atomically creates temp locks on the remote @@ -164,9 +177,7 @@ public: * garbage-collects paths that are already there. Optionally, ask * the remote host to substitute missing paths. */ - StorePathSet queryValidPaths(const StorePathSet & paths, - bool lock, - SubstituteFlag maybeSubstitute = NoSubstitute); + StorePathSet queryValidPaths(const StorePathSet & paths, bool lock, SubstituteFlag maybeSubstitute = NoSubstitute); /** * Just exists because this is exactly what Hydra was doing, and we @@ -178,7 +189,8 @@ public: unsigned int getProtocol() override; - struct ConnectionStats { + struct ConnectionStats + { size_t bytesReceived, bytesSent; }; @@ -192,10 +204,12 @@ public: */ std::optional isTrustedClient() override; - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override + void + queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept override // TODO: Implement - { unsupported("queryRealisation"); } + { + unsupported("queryRealisation"); + } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/length-prefixed-protocol-helper.hh b/src/libstore/include/nix/store/length-prefixed-protocol-helper.hh index a83635aa4..035019340 100644 --- a/src/libstore/include/nix/store/length-prefixed-protocol-helper.hh +++ b/src/libstore/include/nix/store/length-prefixed-protocol-helper.hh @@ -30,23 +30,24 @@ struct StoreDirConfig; template struct LengthPrefixedProtoHelper; -#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \ - struct LengthPrefixedProtoHelper< Inner, T > \ - { \ - static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \ +#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \ + struct LengthPrefixedProtoHelper \ + { \ + static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \ static void write(const StoreDirConfig & store, typename Inner::WriteConn conn, const T & str); \ - private: \ - /*! \ - * Read this as simply `using S = Inner::Serialise;`. \ - * \ - * It would be nice to use that directly, but C++ doesn't seem to allow \ - * it. The `typename` keyword needed to refer to `Inner` seems to greedy \ - * (low precedence), and then C++ complains that `Serialise` is not a \ - * type parameter but a real type. \ - * \ - * Making this `S` alias seems to be the only way to avoid these issues. \ - */ \ - template using S = typename Inner::template Serialise; \ + private: \ + /*! \ + * Read this as simply `using S = Inner::Serialise;`. \ + * \ + * It would be nice to use that directly, but C++ doesn't seem to allow \ + * it. The `typename` keyword needed to refer to `Inner` seems to greedy \ + * (low precedence), and then C++ complains that `Serialise` is not a \ + * type parameter but a real type. \ + * \ + * Making this `S` alias seems to be the only way to avoid these issues. \ + */ \ + template \ + using S = typename Inner::template Serialise; \ } template @@ -66,8 +67,7 @@ LENGTH_PREFIXED_PROTO_HELPER(Inner, LENGTH_PREFIXED_PROTO_HELPER_X); template std::vector -LengthPrefixedProtoHelper>::read( - const StoreDirConfig & store, typename Inner::ReadConn conn) +LengthPrefixedProtoHelper>::read(const StoreDirConfig & store, typename Inner::ReadConn conn) { std::vector resSet; auto size = readNum(conn.from); @@ -78,8 +78,7 @@ LengthPrefixedProtoHelper>::read( } template -void -LengthPrefixedProtoHelper>::write( +void LengthPrefixedProtoHelper>::write( const StoreDirConfig & store, typename Inner::WriteConn conn, const std::vector & resSet) { conn.to << resSet.size(); @@ -112,8 +111,7 @@ void LengthPrefixedProtoHelper>::write( template std::map -LengthPrefixedProtoHelper>::read( - const StoreDirConfig & store, typename Inner::ReadConn conn) +LengthPrefixedProtoHelper>::read(const StoreDirConfig & store, typename Inner::ReadConn conn) { std::map resMap; auto size = readNum(conn.from); @@ -126,8 +124,7 @@ LengthPrefixedProtoHelper>::read( } template -void -LengthPrefixedProtoHelper>::write( +void LengthPrefixedProtoHelper>::write( const StoreDirConfig & store, typename Inner::WriteConn conn, const std::map & resMap) { conn.to << resMap.size(); @@ -139,22 +136,18 @@ LengthPrefixedProtoHelper>::write( template std::tuple -LengthPrefixedProtoHelper>::read( - const StoreDirConfig & store, typename Inner::ReadConn conn) +LengthPrefixedProtoHelper>::read(const StoreDirConfig & store, typename Inner::ReadConn conn) { - return std::tuple { + return std::tuple{ S::read(store, conn)..., }; } template -void -LengthPrefixedProtoHelper>::write( +void LengthPrefixedProtoHelper>::write( const StoreDirConfig & store, typename Inner::WriteConn conn, const std::tuple & res) { - std::apply([&](const Us &... args) { - (S::write(store, conn, args), ...); - }, res); + std::apply([&](const Us &... args) { (S::write(store, conn, args), ...); }, res); } -} +} // namespace nix diff --git a/src/libstore/include/nix/store/local-binary-cache-store.hh b/src/libstore/include/nix/store/local-binary-cache-store.hh index 780eaf480..3561131d4 100644 --- a/src/libstore/include/nix/store/local-binary-cache-store.hh +++ b/src/libstore/include/nix/store/local-binary-cache-store.hh @@ -28,4 +28,4 @@ struct LocalBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/local-fs-store.hh b/src/libstore/include/nix/store/local-fs-store.hh index d5fafb0c6..cae50e762 100644 --- a/src/libstore/include/nix/store/local-fs-store.hh +++ b/src/libstore/include/nix/store/local-fs-store.hh @@ -20,29 +20,25 @@ struct LocalFSStoreConfig : virtual StoreConfig */ LocalFSStoreConfig(PathView path, const Params & params); - OptionalPathSetting rootDir{this, std::nullopt, - "root", - "Directory prefixed to all other paths."}; + OptionalPathSetting rootDir{this, std::nullopt, "root", "Directory prefixed to all other paths."}; - PathSetting stateDir{this, + PathSetting stateDir{ + this, rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir, "state", "Directory where Nix stores state."}; - PathSetting logDir{this, + PathSetting logDir{ + this, rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir, "log", "directory where Nix stores log files."}; - PathSetting realStoreDir{this, - rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real", - "Physical path of the Nix store."}; + PathSetting realStoreDir{ + this, rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real", "Physical path of the Nix store."}; }; -struct LocalFSStore : - virtual Store, - virtual GcStore, - virtual LogStore +struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore { using Config = LocalFSStoreConfig; @@ -73,7 +69,10 @@ struct LocalFSStore : */ virtual Path addPermRoot(const StorePath & storePath, const Path & gcRoot) = 0; - virtual Path getRealStoreDir() { return config.realStoreDir; } + virtual Path getRealStoreDir() + { + return config.realStoreDir; + } Path toRealPath(const Path & storePath) override { @@ -82,7 +81,6 @@ struct LocalFSStore : } std::optional getBuildLogExact(const StorePath & path) override; - }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/local-overlay-store.hh b/src/libstore/include/nix/store/local-overlay-store.hh index 6077d9e53..e5097f3e4 100644 --- a/src/libstore/include/nix/store/local-overlay-store.hh +++ b/src/libstore/include/nix/store/local-overlay-store.hh @@ -9,7 +9,8 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig { LocalOverlayStoreConfig(const StringMap & params) : LocalOverlayStoreConfig("local-overlay", "", params) - { } + { + } LocalOverlayStoreConfig(std::string_view scheme, PathView path, const Params & params) : StoreConfig(params) @@ -18,7 +19,10 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig { } - const Setting lowerStoreUri{(StoreConfig*) this, "", "lower-store", + const Setting lowerStoreUri{ + (StoreConfig *) this, + "", + "lower-store", R"( [Store URL](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) for the lower store. The default is `auto` (i.e. use the Nix daemon or `/nix/store` directly). @@ -27,12 +31,18 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig Must be used as OverlayFS lower layer for this store's store dir. )"}; - const PathSetting upperLayer{(StoreConfig*) this, "", "upper-layer", + const PathSetting upperLayer{ + (StoreConfig *) this, + "", + "upper-layer", R"( Directory containing the OverlayFS upper layer for this store's store dir. )"}; - Setting checkMount{(StoreConfig*) this, true, "check-mount", + Setting checkMount{ + (StoreConfig *) this, + true, + "check-mount", R"( Check that the overlay filesystem is correctly mounted. @@ -43,7 +53,10 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig default, but can be disabled if needed. )"}; - const PathSetting remountHook{(StoreConfig*) this, "", "remount-hook", + const PathSetting remountHook{ + (StoreConfig *) this, + "", + "remount-hook", R"( Script or other executable to run when overlay filesystem needs remounting. @@ -56,7 +69,10 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig The store directory is passed as an argument to the invoked executable. )"}; - static const std::string name() { return "Experimental Local Overlay Store"; } + static const std::string name() + { + return "Experimental Local Overlay Store"; + } static std::optional experimentalFeature() { @@ -65,7 +81,7 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig static StringSet uriSchemes() { - return { "local-overlay" }; + return {"local-overlay"}; } static std::string doc(); @@ -124,8 +140,8 @@ private: /** * Check lower store if upper DB does not have. */ - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override; + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override; /** * Check lower store if upper DB does not have. @@ -159,8 +175,8 @@ private: /** * Check lower store if upper DB does not have. */ - void queryRealisationUncached(const DrvOutput&, - Callback> callback) noexcept override; + void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept override; /** * Call `remountIfNecessary` after collecting garbage normally. @@ -217,4 +233,4 @@ private: std::atomic_bool _remountRequired = false; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index abaf1b981..461562ef1 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -13,10 +13,8 @@ #include #include - namespace nix { - /** * Nix store and database schema version. * @@ -27,7 +25,6 @@ namespace nix { */ const int nixSchemaVersion = 10; - struct OptimiseStats { unsigned long filesLinked = 0; @@ -41,7 +38,10 @@ private: /** Input for computing the build directory. See `getBuildDir()`. */ - Setting> buildDir{this, std::nullopt, "build-dir", + Setting> buildDir{ + this, + std::nullopt, + "build-dir", R"( The directory on the host, in which derivations' temporary build directories are created. @@ -66,21 +66,22 @@ public: Path getBuildDir() const; }; -struct LocalStoreConfig : std::enable_shared_from_this, virtual LocalFSStoreConfig, virtual LocalBuildStoreConfig +struct LocalStoreConfig : std::enable_shared_from_this, + virtual LocalFSStoreConfig, + virtual LocalBuildStoreConfig { using LocalFSStoreConfig::LocalFSStoreConfig; - LocalStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params); + LocalStoreConfig(std::string_view scheme, std::string_view authority, const Params & params); - Setting requireSigs{this, + Setting requireSigs{ + this, settings.requireSigs, "require-sigs", "Whether store paths copied into this store should have a trusted signature."}; - Setting readOnly{this, + Setting readOnly{ + this, false, "read-only", R"( @@ -97,19 +98,22 @@ struct LocalStoreConfig : std::enable_shared_from_this, virtua > While the filesystem the database resides on might appear to be read-only, consider whether another user or system might have write access to it. )"}; - static const std::string name() { return "Local Store"; } + static const std::string name() + { + return "Local Store"; + } static StringSet uriSchemes() - { return {"local"}; } + { + return {"local"}; + } static std::string doc(); ref openStore() const override; }; -class LocalStore : - public virtual IndirectRootStore, - public virtual GcStore +class LocalStore : public virtual IndirectRootStore, public virtual GcStore { public: @@ -196,29 +200,28 @@ public: bool isValidPathUncached(const StorePath & path) override; - StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute) override; + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; StorePathSet queryAllValidPaths() override; - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override; + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override; void queryReferrers(const StorePath & path, StorePathSet & referrers) override; StorePathSet queryValidDerivers(const StorePath & path) override; - std::map> queryStaticPartialDerivationOutputMap(const StorePath & path) override; + std::map> + queryStaticPartialDerivationOutputMap(const StorePath & path) override; std::optional queryPathFromHashPart(const std::string & hashPart) override; StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; bool pathInfoIsUntrusted(const ValidPathInfo &) override; - bool realisationIsUntrusted(const Realisation & ) override; + bool realisationIsUntrusted(const Realisation &) override; - void addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) override; + void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; StorePath addToStoreFromDump( Source & dump, @@ -312,7 +315,8 @@ protected: /** * Result of `verifyAllValidPaths` */ - struct VerificationResult { + struct VerificationResult + { /** * Whether any errors were encountered */ @@ -365,22 +369,24 @@ public: void registerDrvOutput(const Realisation & info) override; void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override; void cacheDrvOutputMapping( - State & state, - const uint64_t deriver, - const std::string & outputName, - const StorePath & output); + State & state, const uint64_t deriver, const std::string & outputName, const StorePath & output); std::optional queryRealisation_(State & state, const DrvOutput & id); std::optional> queryRealisationCore_(State & state, const DrvOutput & id); - void queryRealisationUncached(const DrvOutput&, - Callback> callback) noexcept override; + void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept override; std::optional getVersion() override; protected: - void verifyPath(const StorePath & path, std::function existsInStoreDir, - StorePathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors); + void verifyPath( + const StorePath & path, + std::function existsInStoreDir, + StorePathSet & done, + StorePathSet & validPaths, + RepairFlag repair, + bool & errors); private: @@ -426,7 +432,8 @@ private: InodeHash loadInodeHash(); Strings readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash); - void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash, RepairFlag repair); + void + optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash, RepairFlag repair); // Internal versions that are not wrapped in retry_sqlite. bool isValidPath_(State & state, const StorePath & path); @@ -438,4 +445,4 @@ private: friend struct DerivationGoal; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/log-store.hh b/src/libstore/include/nix/store/log-store.hh index fc12b0c47..2d81d02b1 100644 --- a/src/libstore/include/nix/store/log-store.hh +++ b/src/libstore/include/nix/store/log-store.hh @@ -3,7 +3,6 @@ #include "nix/store/store-api.hh" - namespace nix { struct LogStore : public virtual Store @@ -23,4 +22,4 @@ struct LogStore : public virtual Store static LogStore & require(Store & store); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/machines.hh b/src/libstore/include/nix/store/machines.hh index 2bf7408f6..1f7bb669a 100644 --- a/src/libstore/include/nix/store/machines.hh +++ b/src/libstore/include/nix/store/machines.hh @@ -12,7 +12,8 @@ struct Machine; typedef std::vector Machines; -struct Machine { +struct Machine +{ const StoreReference storeUri; const StringSet systemTypes; @@ -85,4 +86,4 @@ struct Machine { */ Machines getMachines(); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/make-content-addressed.hh b/src/libstore/include/nix/store/make-content-addressed.hh index 3881b6d40..09e7dd98a 100644 --- a/src/libstore/include/nix/store/make-content-addressed.hh +++ b/src/libstore/include/nix/store/make-content-addressed.hh @@ -7,18 +7,12 @@ namespace nix { /** Rewrite a closure of store paths to be completely content addressed. */ -std::map makeContentAddressed( - Store & srcStore, - Store & dstStore, - const StorePathSet & rootPaths); +std::map makeContentAddressed(Store & srcStore, Store & dstStore, const StorePathSet & rootPaths); /** Rewrite a closure of a store path to be completely content addressed. * * This is a convenience function for the case where you only have one root path. */ -StorePath makeContentAddressed( - Store & srcStore, - Store & dstStore, - const StorePath & rootPath); +StorePath makeContentAddressed(Store & srcStore, Store & dstStore, const StorePath & rootPath); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/names.hh b/src/libstore/include/nix/store/names.hh index ab315de63..23d935270 100644 --- a/src/libstore/include/nix/store/names.hh +++ b/src/libstore/include/nix/store/names.hh @@ -28,9 +28,8 @@ private: typedef std::list DrvNames; -std::string_view nextComponent(std::string_view::const_iterator & p, - const std::string_view::const_iterator end); +std::string_view nextComponent(std::string_view::const_iterator & p, const std::string_view::const_iterator end); std::strong_ordering compareVersions(const std::string_view v1, const std::string_view v2); DrvNames drvNamesFromArgs(const Strings & opArgs); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/nar-accessor.hh b/src/libstore/include/nix/store/nar-accessor.hh index 199d525cb..0e69d436e 100644 --- a/src/libstore/include/nix/store/nar-accessor.hh +++ b/src/libstore/include/nix/store/nar-accessor.hh @@ -27,9 +27,7 @@ ref makeNarAccessor(Source & source); */ using GetNarBytes = std::function; -ref makeLazyNarAccessor( - const std::string & listing, - GetNarBytes getNarBytes); +ref makeLazyNarAccessor(const std::string & listing, GetNarBytes getNarBytes); /** * Write a JSON representation of the contents of a NAR (except file @@ -37,4 +35,4 @@ ref makeLazyNarAccessor( */ nlohmann::json listNar(ref accessor, const CanonPath & path, bool recurse); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/nar-info-disk-cache.hh b/src/libstore/include/nix/store/nar-info-disk-cache.hh index a7fde1fbf..253487b30 100644 --- a/src/libstore/include/nix/store/nar-info-disk-cache.hh +++ b/src/libstore/include/nix/store/nar-info-disk-cache.hh @@ -12,10 +12,9 @@ class NarInfoDiskCache public: typedef enum { oValid, oInvalid, oUnknown } Outcome; - virtual ~NarInfoDiskCache() { } + virtual ~NarInfoDiskCache() {} - virtual int createCache(const std::string & uri, const Path & storeDir, - bool wantMassQuery, int priority) = 0; + virtual int createCache(const std::string & uri, const Path & storeDir, bool wantMassQuery, int priority) = 0; struct CacheInfo { @@ -26,21 +25,16 @@ public: virtual std::optional upToDateCacheExists(const std::string & uri) = 0; - virtual std::pair> lookupNarInfo( - const std::string & uri, const std::string & hashPart) = 0; + virtual std::pair> + lookupNarInfo(const std::string & uri, const std::string & hashPart) = 0; - virtual void upsertNarInfo( - const std::string & uri, const std::string & hashPart, - std::shared_ptr info) = 0; + virtual void + upsertNarInfo(const std::string & uri, const std::string & hashPart, std::shared_ptr info) = 0; - virtual void upsertRealisation( - const std::string & uri, - const Realisation & realisation) = 0; - virtual void upsertAbsentRealisation( - const std::string & uri, - const DrvOutput & id) = 0; - virtual std::pair> lookupRealisation( - const std::string & uri, const DrvOutput & id) = 0; + virtual void upsertRealisation(const std::string & uri, const Realisation & realisation) = 0; + virtual void upsertAbsentRealisation(const std::string & uri, const DrvOutput & id) = 0; + virtual std::pair> + lookupRealisation(const std::string & uri, const DrvOutput & id) = 0; }; /** @@ -51,4 +45,4 @@ ref getNarInfoDiskCache(); ref getTestNarInfoDiskCache(Path dbPath); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index d66b6e058..805d46248 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -17,27 +17,32 @@ struct NarInfo : ValidPathInfo uint64_t fileSize = 0; NarInfo() = delete; + NarInfo(const Store & store, std::string name, ContentAddressWithReferences ca, Hash narHash) : ValidPathInfo(store, std::move(name), std::move(ca), narHash) - { } - NarInfo(StorePath path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { } - NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { } + { + } + + NarInfo(StorePath path, Hash narHash) + : ValidPathInfo(std::move(path), narHash) + { + } + + NarInfo(const ValidPathInfo & info) + : ValidPathInfo(info) + { + } + NarInfo(const Store & store, const std::string & s, const std::string & whence); - bool operator ==(const NarInfo &) const = default; + bool operator==(const NarInfo &) const = default; // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet - //auto operator <=>(const NarInfo &) const = default; + // auto operator <=>(const NarInfo &) const = default; std::string to_string(const Store & store) const; - nlohmann::json toJSON( - const Store & store, - bool includeImpureInfo, - HashFormat hashFormat) const override; - static NarInfo fromJSON( - const Store & store, - const StorePath & path, - const nlohmann::json & json); + nlohmann::json toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const override; + static NarInfo fromJSON(const Store & store, const StorePath & path, const nlohmann::json & json); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/outputs-spec.hh b/src/libstore/include/nix/store/outputs-spec.hh index 4e874a6f1..5482c0e24 100644 --- a/src/libstore/include/nix/store/outputs-spec.hh +++ b/src/libstore/include/nix/store/outputs-spec.hh @@ -23,11 +23,13 @@ typedef std::string OutputName; */ typedef std::string_view OutputNameView; -struct OutputsSpec { +struct OutputsSpec +{ /** * A non-empty set of outputs, specified by name */ - struct Names : std::set> { + struct Names : std::set> + { private: using BaseType = std::set>; @@ -38,14 +40,18 @@ struct OutputsSpec { Names(const BaseType & s) : BaseType(s) - { assert(!empty()); } + { + assert(!empty()); + } /** * Needs to be "inherited manually" */ Names(BaseType && s) : BaseType(std::move(s)) - { assert(!empty()); } + { + assert(!empty()); + } /* This set should always be non-empty, so we delete this constructor in order make creating empty ones by mistake harder. @@ -56,15 +62,18 @@ struct OutputsSpec { /** * The set of all outputs, without needing to name them explicitly */ - struct All : std::monostate { }; + struct All : std::monostate + {}; typedef std::variant Raw; Raw raw; - bool operator == (const OutputsSpec &) const = default; + bool operator==(const OutputsSpec &) const = default; + // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. - bool operator < (const OutputsSpec & other) const { + bool operator<(const OutputsSpec & other) const + { return raw < other.raw; } @@ -97,17 +106,20 @@ struct OutputsSpec { std::string to_string() const; }; -struct ExtendedOutputsSpec { - struct Default : std::monostate { }; +struct ExtendedOutputsSpec +{ + struct Default : std::monostate + {}; + using Explicit = OutputsSpec; typedef std::variant Raw; Raw raw; - bool operator == (const ExtendedOutputsSpec &) const = default; + bool operator==(const ExtendedOutputsSpec &) const = default; // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. - bool operator < (const ExtendedOutputsSpec &) const; + bool operator<(const ExtendedOutputsSpec &) const; MAKE_WRAPPER_CONSTRUCTOR(ExtendedOutputsSpec); @@ -126,7 +138,7 @@ struct ExtendedOutputsSpec { std::string to_string() const; }; -} +} // namespace nix JSON_IMPL(OutputsSpec) JSON_IMPL(ExtendedOutputsSpec) diff --git a/src/libstore/include/nix/store/parsed-derivations.hh b/src/libstore/include/nix/store/parsed-derivations.hh index a7c053a8f..ecc2f7e61 100644 --- a/src/libstore/include/nix/store/parsed-derivations.hh +++ b/src/libstore/include/nix/store/parsed-derivations.hh @@ -40,4 +40,4 @@ struct StructuredAttrs static std::string writeShell(const nlohmann::json & prepared); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index 690f0f813..91244361b 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -11,10 +11,8 @@ namespace nix { - class Store; - struct SubstitutablePathInfo { std::optional deriver; @@ -31,7 +29,6 @@ struct SubstitutablePathInfo using SubstitutablePathInfos = std::map; - /** * Information about a store object. * @@ -103,35 +100,32 @@ struct UnkeyedValidPathInfo UnkeyedValidPathInfo(const UnkeyedValidPathInfo & other) = default; - UnkeyedValidPathInfo(Hash narHash) : narHash(narHash) { }; + UnkeyedValidPathInfo(Hash narHash) + : narHash(narHash) {}; - bool operator == (const UnkeyedValidPathInfo &) const noexcept; + bool operator==(const UnkeyedValidPathInfo &) const noexcept; /** * @todo return `std::strong_ordering` once `id` is removed */ - std::weak_ordering operator <=> (const UnkeyedValidPathInfo &) const noexcept; + std::weak_ordering operator<=>(const UnkeyedValidPathInfo &) const noexcept; - virtual ~UnkeyedValidPathInfo() { } + virtual ~UnkeyedValidPathInfo() {} /** * @param includeImpureInfo If true, variable elements such as the * registration time are included. */ - virtual nlohmann::json toJSON( - const Store & store, - bool includeImpureInfo, - HashFormat hashFormat) const; - static UnkeyedValidPathInfo fromJSON( - const Store & store, - const nlohmann::json & json); + virtual nlohmann::json toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const; + static UnkeyedValidPathInfo fromJSON(const Store & store, const nlohmann::json & json); }; -struct ValidPathInfo : UnkeyedValidPathInfo { +struct ValidPathInfo : UnkeyedValidPathInfo +{ StorePath path; - bool operator == (const ValidPathInfo &) const = default; - auto operator <=> (const ValidPathInfo &) const = default; + bool operator==(const ValidPathInfo &) const = default; + auto operator<=>(const ValidPathInfo &) const = default; /** * Return a fingerprint of the store path to be used in binary @@ -177,11 +171,14 @@ struct ValidPathInfo : UnkeyedValidPathInfo { */ Strings shortRefs() const; - ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { }; - ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(path) { }; + ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) + : UnkeyedValidPathInfo(info) + , path(std::move(path)) {}; + ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) + : UnkeyedValidPathInfo(info) + , path(path) {}; - ValidPathInfo(const Store & store, - std::string_view name, ContentAddressWithReferences && ca, Hash narHash); + ValidPathInfo(const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); }; static_assert(std::is_move_assignable_v); @@ -191,4 +188,4 @@ static_assert(std::is_move_constructible_v); using ValidPathInfos = std::map; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/path-references.hh b/src/libstore/include/nix/store/path-references.hh index b8d0b4dd0..fad1e57a3 100644 --- a/src/libstore/include/nix/store/path-references.hh +++ b/src/libstore/include/nix/store/path-references.hh @@ -23,4 +23,4 @@ public: StorePathSet getResultPaths(); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/path-regex.hh b/src/libstore/include/nix/store/path-regex.hh index e34a305c5..2fbe0ba6b 100644 --- a/src/libstore/include/nix/store/path-regex.hh +++ b/src/libstore/include/nix/store/path-regex.hh @@ -5,11 +5,11 @@ namespace nix { - static constexpr std::string_view nameRegexStr = // This uses a negative lookahead: (?!\.\.?(-|$)) // - deny ".", "..", or those strings followed by '-' - // - when it's not those, start again at the start of the input and apply the next regex, which is [0-9a-zA-Z\+\-\._\?=]+ + // - when it's not those, start again at the start of the input and apply the next regex, which is + // [0-9a-zA-Z\+\-\._\?=]+ R"((?!\.\.?(-|$))[0-9a-zA-Z\+\-\._\?=]+)"; } diff --git a/src/libstore/include/nix/store/path-with-outputs.hh b/src/libstore/include/nix/store/path-with-outputs.hh index 368667c47..b93da082b 100644 --- a/src/libstore/include/nix/store/path-with-outputs.hh +++ b/src/libstore/include/nix/store/path-with-outputs.hh @@ -45,4 +45,4 @@ class Store; StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/path.hh b/src/libstore/include/nix/store/path.hh index 279e9dba4..784298daa 100644 --- a/src/libstore/include/nix/store/path.hh +++ b/src/libstore/include/nix/store/path.hh @@ -12,7 +12,8 @@ struct Hash; /** * Check whether a name is a valid store path name. * - * @throws BadStorePathName if the name is invalid. The message is of the format "name %s is not valid, for this specific reason". + * @throws BadStorePathName if the name is invalid. The message is of the format "name %s is not valid, for this + * specific reason". */ void checkName(std::string_view name); @@ -49,8 +50,8 @@ public: return baseName; } - bool operator == (const StorePath & other) const noexcept = default; - auto operator <=> (const StorePath & other) const noexcept = default; + bool operator==(const StorePath & other) const noexcept = default; + auto operator<=>(const StorePath & other) const noexcept = default; /** * Check whether a file name ends with the extension for derivations. @@ -86,15 +87,17 @@ typedef std::vector StorePaths; */ constexpr std::string_view drvExtension = ".drv"; -} +} // namespace nix namespace std { -template<> struct hash { +template<> +struct hash +{ std::size_t operator()(const nix::StorePath & path) const noexcept { - return * (std::size_t *) path.to_string().data(); + return *(std::size_t *) path.to_string().data(); } }; -} +} // namespace std diff --git a/src/libstore/include/nix/store/pathlocks.hh b/src/libstore/include/nix/store/pathlocks.hh index 33cad7868..05c7e079a 100644 --- a/src/libstore/include/nix/store/pathlocks.hh +++ b/src/libstore/include/nix/store/pathlocks.hh @@ -30,11 +30,8 @@ private: public: PathLocks(); - PathLocks(const PathSet & paths, - const std::string & waitMsg = ""); - bool lockPaths(const PathSet & _paths, - const std::string & waitMsg = "", - bool wait = true); + PathLocks(const PathSet & paths, const std::string & waitMsg = ""); + bool lockPaths(const PathSet & _paths, const std::string & waitMsg = "", bool wait = true); ~PathLocks(); void unlock(); void setDeletion(bool deletePaths); @@ -54,4 +51,4 @@ struct FdLock } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/posix-fs-canonicalise.hh b/src/libstore/include/nix/store/posix-fs-canonicalise.hh index 1d6696023..629759cfe 100644 --- a/src/libstore/include/nix/store/posix-fs-canonicalise.hh +++ b/src/libstore/include/nix/store/posix-fs-canonicalise.hh @@ -12,7 +12,6 @@ namespace nix { typedef std::pair Inode; typedef std::set InodesSeen; - /** * "Fix", or canonicalise, the meta-data of the files in a store path * after it has been built. In particular: @@ -40,12 +39,13 @@ void canonicalisePathMetaData( void canonicalisePathMetaData( const Path & path #ifndef _WIN32 - , std::optional> uidRange = std::nullopt + , + std::optional> uidRange = std::nullopt #endif - ); +); void canonicaliseTimestampAndPermissions(const Path & path); MakeError(PathInUse, Error); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/profiles.hh b/src/libstore/include/nix/store/profiles.hh index e20e1198e..75cd11340 100644 --- a/src/libstore/include/nix/store/profiles.hh +++ b/src/libstore/include/nix/store/profiles.hh @@ -13,12 +13,10 @@ #include #include - namespace nix { class StorePath; - /** * A positive number identifying a generation for a given profile. * @@ -66,7 +64,6 @@ struct Generation */ typedef std::list Generations; - /** * Find all generations for the given profile. * @@ -119,7 +116,8 @@ void deleteGeneration(const Path & profile, GenerationNumber gen); /** * Delete the given set of generations. * - * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete. + * @param profile The profile, specified by its name and location combined into a path, whose generations we want to + * delete. * * @param gensToDelete The generations to delete, specified by a set of * numbers. @@ -135,7 +133,8 @@ void deleteGenerations(const Path & profile, const std::set & /** * Delete generations older than `max` passed the current generation. * - * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete. + * @param profile The profile, specified by its name and location combined into a path, whose generations we want to + * delete. * * @param max How many generations to keep up to the current one. Must * be at least 1 so we don't delete the current one. @@ -148,7 +147,8 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo /** * Delete all generations other than the current one * - * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete. + * @param profile The profile, specified by its name and location combined into a path, whose generations we want to + * delete. * * @param dryRun Log what would be deleted instead of actually doing * so. @@ -159,7 +159,8 @@ void deleteOldGenerations(const Path & profile, bool dryRun); * Delete generations older than `t`, except for the most recent one * older than `t`. * - * @param profile The profile, specified by its name and location combined into a path, whose generations we want to delete. + * @param profile The profile, specified by its name and location combined into a path, whose generations we want to + * delete. * * @param dryRun Log what would be deleted instead of actually doing * so. @@ -185,10 +186,7 @@ void switchLink(Path link, Path target); * Roll back a profile to the specified generation, or to the most * recent one older than the current. */ -void switchGeneration( - const Path & profile, - std::optional dstGen, - bool dryRun); +void switchGeneration(const Path & profile, std::optional dstGen, bool dryRun); /** * Ensure exclusive access to a profile. Any command that modifies @@ -237,4 +235,4 @@ Path rootChannelsDir(); */ Path getDefaultProfile(); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/realisation.hh b/src/libstore/include/nix/store/realisation.hh index b93ae37b6..f653d517b 100644 --- a/src/libstore/include/nix/store/realisation.hh +++ b/src/libstore/include/nix/store/realisation.hh @@ -21,7 +21,8 @@ struct OutputsSpec; * This is similar to a `DerivedPath::Opaque`, but the derivation is * identified by its "hash modulo" instead of by its store path. */ -struct DrvOutput { +struct DrvOutput +{ /** * The hash modulo of the derivation. * @@ -39,14 +40,17 @@ struct DrvOutput { std::string to_string() const; std::string strHash() const - { return drvHash.to_string(HashFormat::Base16, true); } + { + return drvHash.to_string(HashFormat::Base16, true); + } static DrvOutput parse(const std::string &); GENERATE_CMP(DrvOutput, me->drvHash, me->outputName); }; -struct Realisation { +struct Realisation +{ DrvOutput id; StorePath outPath; @@ -61,7 +65,7 @@ struct Realisation { std::map dependentRealisations; nlohmann::json toJSON() const; - static Realisation fromJSON(const nlohmann::json& json, const std::string& whence); + static Realisation fromJSON(const nlohmann::json & json, const std::string & whence); std::string fingerprint() const; void sign(const Signer &); @@ -73,7 +77,10 @@ struct Realisation { bool isCompatibleWith(const Realisation & other) const; - StorePath getPath() const { return outPath; } + StorePath getPath() const + { + return outPath; + } GENERATE_CMP(Realisation, me->id, me->outPath); }; @@ -100,22 +107,25 @@ typedef std::map DrvOutputs; * * Moves the `outputs` input. */ -SingleDrvOutputs filterDrvOutputs(const OutputsSpec&, SingleDrvOutputs&&); +SingleDrvOutputs filterDrvOutputs(const OutputsSpec &, SingleDrvOutputs &&); - -struct OpaquePath { +struct OpaquePath +{ StorePath path; - StorePath getPath() const { return path; } + StorePath getPath() const + { + return path; + } GENERATE_CMP(OpaquePath, me->path); }; - /** * A store path with all the history of how it went into the store */ -struct RealisedPath { +struct RealisedPath +{ /* * A path is either the result of the realisation of a derivation or * an opaque blob that has been directly added to the store @@ -125,17 +135,24 @@ struct RealisedPath { using Set = std::set; - RealisedPath(StorePath path) : raw(OpaquePath{path}) {} - RealisedPath(Realisation r) : raw(r) {} + RealisedPath(StorePath path) + : raw(OpaquePath{path}) + { + } + + RealisedPath(Realisation r) + : raw(r) + { + } /** * Get the raw store path associated to this */ StorePath path() const; - void closure(Store& store, Set& ret) const; - static void closure(Store& store, const Set& startPaths, Set& ret); - Set closure(Store& store) const; + void closure(Store & store, Set & ret) const; + static void closure(Store & store, const Set & startPaths, Set & ret); + Set closure(Store & store) const; GENERATE_CMP(RealisedPath, me->raw); }; @@ -145,13 +162,17 @@ class MissingRealisation : public Error public: MissingRealisation(DrvOutput & outputId) : MissingRealisation(outputId.outputName, outputId.strHash()) - {} + { + } + MissingRealisation(std::string_view drv, OutputName outputName) - : Error( "cannot operate on output '%s' of the " - "unbuilt derivation '%s'", - outputName, - drv) - {} + : Error( + "cannot operate on output '%s' of the " + "unbuilt derivation '%s'", + outputName, + drv) + { + } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/remote-fs-accessor.hh b/src/libstore/include/nix/store/remote-fs-accessor.hh index 75bb40dfb..fa0555d9b 100644 --- a/src/libstore/include/nix/store/remote-fs-accessor.hh +++ b/src/libstore/include/nix/store/remote-fs-accessor.hh @@ -27,9 +27,8 @@ class RemoteFSAccessor : public SourceAccessor public: - RemoteFSAccessor(ref store, - bool requireValidPath = true, - const /* FIXME: use std::optional */ Path & cacheDir = ""); + RemoteFSAccessor( + ref store, bool requireValidPath = true, const /* FIXME: use std::optional */ Path & cacheDir = ""); std::optional maybeLstat(const CanonPath & path) override; @@ -40,4 +39,4 @@ public: std::string readLink(const CanonPath & path) override; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/remote-store-connection.hh b/src/libstore/include/nix/store/remote-store-connection.hh index 33ec265c2..c2010818c 100644 --- a/src/libstore/include/nix/store/remote-store-connection.hh +++ b/src/libstore/include/nix/store/remote-store-connection.hh @@ -15,8 +15,7 @@ namespace nix { * Contains `Source` and `Sink` for actual communication, along with * other information learned when negotiating the connection. */ -struct RemoteStore::Connection : WorkerProto::BasicClientConnection, - WorkerProto::ClientHandshakeInfo +struct RemoteStore::Connection : WorkerProto::BasicClientConnection, WorkerProto::ClientHandshakeInfo { /** * Time this connection was established. @@ -38,20 +37,29 @@ struct RemoteStore::ConnectionHandle ConnectionHandle(Pool::Handle && handle) : handle(std::move(handle)) - { } + { + } ConnectionHandle(ConnectionHandle && h) noexcept : handle(std::move(h.handle)) - { } + { + } ~ConnectionHandle(); - RemoteStore::Connection & operator * () { return *handle; } - RemoteStore::Connection * operator -> () { return &*handle; } + RemoteStore::Connection & operator*() + { + return *handle; + } + + RemoteStore::Connection * operator->() + { + return &*handle; + } void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true, bool block = true); void withFramedSink(std::function fun); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/remote-store.hh b/src/libstore/include/nix/store/remote-store.hh index 18c02456f..76591cf93 100644 --- a/src/libstore/include/nix/store/remote-store.hh +++ b/src/libstore/include/nix/store/remote-store.hh @@ -8,24 +8,24 @@ #include "nix/store/gc-store.hh" #include "nix/store/log-store.hh" - namespace nix { - class Pipe; class Pid; struct FdSink; struct FdSource; -template class Pool; +template +class Pool; struct RemoteStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - const Setting maxConnections{this, 1, "max-connections", - "Maximum number of concurrent connections to the Nix daemon."}; + const Setting maxConnections{ + this, 1, "max-connections", "Maximum number of concurrent connections to the Nix daemon."}; - const Setting maxConnectionAge{this, + const Setting maxConnectionAge{ + this, std::numeric_limits::max(), "max-connection-age", "Maximum age of a connection before it is closed."}; @@ -35,10 +35,7 @@ struct RemoteStoreConfig : virtual StoreConfig * \todo RemoteStore is a misnomer - should be something like * DaemonStore. */ -struct RemoteStore : - public virtual Store, - public virtual GcStore, - public virtual LogStore +struct RemoteStore : public virtual Store, public virtual GcStore, public virtual LogStore { using Config = RemoteStoreConfig; @@ -50,13 +47,12 @@ struct RemoteStore : bool isValidPathUncached(const StorePath & path) override; - StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute) override; + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; StorePathSet queryAllValidPaths() override; - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override; + void queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept override; void queryReferrers(const StorePath & path, StorePathSet & referrers) override; @@ -64,24 +60,24 @@ struct RemoteStore : StorePathSet queryDerivationOutputs(const StorePath & path) override; - std::map> queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr) override; + std::map> + queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr) override; std::optional queryPathFromHashPart(const std::string & hashPart) override; StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; - void querySubstitutablePathInfos(const StorePathCAMap & paths, - SubstitutablePathInfos & infos) override; + void querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos) override; /** * Add a content-addressable store path. `dump` will be drained. */ ref addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashAlgorithm hashAlgo, - const StorePathSet & references, - RepairFlag repair); + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair); /** * Add a content-addressable store path. `dump` will be drained. @@ -95,34 +91,25 @@ struct RemoteStore : const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override; - void addToStore(const ValidPathInfo & info, Source & nar, - RepairFlag repair, CheckSigsFlag checkSigs) override; + void addToStore(const ValidPathInfo & info, Source & nar, RepairFlag repair, CheckSigsFlag checkSigs) override; - void addMultipleToStore( - Source & source, - RepairFlag repair, - CheckSigsFlag checkSigs) override; + void addMultipleToStore(Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; - void addMultipleToStore( - PathsSource && pathsToCopy, - Activity & act, - RepairFlag repair, - CheckSigsFlag checkSigs) override; + void + addMultipleToStore(PathsSource && pathsToCopy, Activity & act, RepairFlag repair, CheckSigsFlag checkSigs) override; void registerDrvOutput(const Realisation & info) override; - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override; + void queryRealisationUncached( + const DrvOutput &, Callback> callback) noexcept override; - void buildPaths(const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) override; + void + buildPaths(const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) override; std::vector buildPathsWithResults( - const std::vector & paths, - BuildMode buildMode, - std::shared_ptr evalStore) override; + const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) override; - BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) override; + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; void ensurePath(const StorePath & path) override; @@ -145,7 +132,9 @@ struct RemoteStore : * without it being a breaking change. */ void repairPath(const StorePath & path) override - { unsupported("repairPath"); } + { + unsupported("repairPath"); + } void addSignatures(const StorePath & storePath, const StringSet & sigs) override; @@ -193,9 +182,7 @@ private: std::atomic_bool failed{false}; - void copyDrvsFromEvalStore( - const std::vector & paths, - std::shared_ptr evalStore); + void copyDrvsFromEvalStore(const std::vector & paths, std::shared_ptr evalStore); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/restricted-store.hh b/src/libstore/include/nix/store/restricted-store.hh index 6f2122c7b..b5680da4d 100644 --- a/src/libstore/include/nix/store/restricted-store.hh +++ b/src/libstore/include/nix/store/restricted-store.hh @@ -57,4 +57,4 @@ struct RestrictionContext */ ref makeRestrictedStore(ref config, ref next, RestrictionContext & context); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index c38591e60..584488070 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -130,6 +130,6 @@ struct S3BinaryCacheStore : virtual BinaryCacheStore virtual const Stats & getS3Stats() = 0; }; -} +} // namespace nix #endif diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index 9c159ba0f..f0ed2fefd 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -3,13 +3,22 @@ #include "store-config-private.hh" #if NIX_WITH_S3_SUPPORT -#include "nix/util/ref.hh" +# include "nix/util/ref.hh" -#include -#include +# include +# include -namespace Aws { namespace Client { struct ClientConfiguration; } } -namespace Aws { namespace S3 { class S3Client; } } +namespace Aws { +namespace Client { +struct ClientConfiguration; +} +} // namespace Aws + +namespace Aws { +namespace S3 { +class S3Client; +} +} // namespace Aws namespace nix { @@ -18,9 +27,14 @@ struct S3Helper ref config; ref client; - S3Helper(const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint); + S3Helper( + const std::string & profile, + const std::string & region, + const std::string & scheme, + const std::string & endpoint); - ref makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint); + ref + makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint); struct FileTransferResult { @@ -28,10 +42,9 @@ struct S3Helper unsigned int durationMs; }; - FileTransferResult getObject( - const std::string & bucketName, const std::string & key); + FileTransferResult getObject(const std::string & bucketName, const std::string & key); }; -} +} // namespace nix #endif diff --git a/src/libstore/include/nix/store/serve-protocol-connection.hh b/src/libstore/include/nix/store/serve-protocol-connection.hh index 5822b4990..fa50132c8 100644 --- a/src/libstore/include/nix/store/serve-protocol-connection.hh +++ b/src/libstore/include/nix/store/serve-protocol-connection.hh @@ -105,4 +105,4 @@ struct ServeProto::BasicServerConnection static ServeProto::Version handshake(BufferedSink & to, Source & from, ServeProto::Version localVersion); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/serve-protocol-impl.hh b/src/libstore/include/nix/store/serve-protocol-impl.hh index 4e66ca542..a9617165a 100644 --- a/src/libstore/include/nix/store/serve-protocol-impl.hh +++ b/src/libstore/include/nix/store/serve-protocol-impl.hh @@ -15,14 +15,15 @@ namespace nix { /* protocol-agnostic templates */ -#define SERVE_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T ServeProto::Serialise< T >::read(const StoreDirConfig & store, ServeProto::ReadConn conn) \ - { \ - return LengthPrefixedProtoHelper::read(store, conn); \ - } \ - TEMPLATE void ServeProto::Serialise< T >::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) \ - { \ - LengthPrefixedProtoHelper::write(store, conn, t); \ +#define SERVE_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T ServeProto::Serialise::read(const StoreDirConfig & store, ServeProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void ServeProto::Serialise::write( \ + const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ } SERVE_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) @@ -44,17 +45,15 @@ struct ServeProto::Serialise { static T read(const StoreDirConfig & store, ServeProto::ReadConn conn) { - return CommonProto::Serialise::read(store, - CommonProto::ReadConn { .from = conn.from }); + return CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = conn.from}); } + static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) { - CommonProto::Serialise::write(store, - CommonProto::WriteConn { .to = conn.to }, - t); + CommonProto::Serialise::write(store, CommonProto::WriteConn{.to = conn.to}, t); } }; /* protocol-specific templates */ -} +} // namespace nix diff --git a/src/libstore/include/nix/store/serve-protocol.hh b/src/libstore/include/nix/store/serve-protocol.hh index 6f6bf6b60..c8f3560d1 100644 --- a/src/libstore/include/nix/store/serve-protocol.hh +++ b/src/libstore/include/nix/store/serve-protocol.hh @@ -12,7 +12,6 @@ namespace nix { #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) - struct StoreDirConfig; struct Source; @@ -20,7 +19,6 @@ struct Source; struct BuildResult; struct UnkeyedValidPathInfo; - /** * The "serve protocol", used by ssh:// stores. * @@ -45,7 +43,8 @@ struct ServeProto * A unidirectional read connection, to be used by the read half of the * canonical serializers below. */ - struct ReadConn { + struct ReadConn + { Source & from; Version version; }; @@ -54,7 +53,8 @@ struct ServeProto * A unidirectional write connection, to be used by the write half of the * canonical serializers below. */ - struct WriteConn { + struct WriteConn + { Sink & to; Version version; }; @@ -104,8 +104,7 @@ struct ServeProto struct BuildOptions; }; -enum struct ServeProto::Command : uint64_t -{ +enum struct ServeProto::Command : uint64_t { QueryValidPaths = 1, QueryPathInfos = 2, DumpStorePath = 3, @@ -117,8 +116,8 @@ enum struct ServeProto::Command : uint64_t AddToStoreNar = 9, }; - -struct ServeProto::BuildOptions { +struct ServeProto::BuildOptions +{ /** * Default value in this and every other field is so tests pass when * testing older deserialisers which do not set all the fields. @@ -130,7 +129,7 @@ struct ServeProto::BuildOptions { bool enforceDeterminism = -1; bool keepFailed = -1; - bool operator == (const ServeProto::BuildOptions &) const = default; + bool operator==(const ServeProto::BuildOptions &) const = default; }; /** @@ -139,7 +138,7 @@ struct ServeProto::BuildOptions { * @todo Switch to using `ServeProto::Serialize` instead probably. But * this was not done at this time so there would be less churn. */ -inline Sink & operator << (Sink & sink, ServeProto::Command op) +inline Sink & operator<<(Sink & sink, ServeProto::Command op) { return sink << (uint64_t) op; } @@ -149,7 +148,7 @@ inline Sink & operator << (Sink & sink, ServeProto::Command op) * * @todo Perhaps render known opcodes more nicely. */ -inline std::ostream & operator << (std::ostream & s, ServeProto::Command op) +inline std::ostream & operator<<(std::ostream & s, ServeProto::Command op) { return s << (uint64_t) op; } @@ -164,10 +163,10 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op) * be legal specialization syntax. See below for what that looks like in * practice. */ -#define DECLARE_SERVE_SERIALISER(T) \ - struct ServeProto::Serialise< T > \ - { \ - static T read(const StoreDirConfig & store, ServeProto::ReadConn conn); \ +#define DECLARE_SERVE_SERIALISER(T) \ + struct ServeProto::Serialise \ + { \ + static T read(const StoreDirConfig & store, ServeProto::ReadConn conn); \ static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t); \ }; @@ -190,4 +189,4 @@ template DECLARE_SERVE_SERIALISER(std::map); #undef COMMA_ -} +} // namespace nix diff --git a/src/libstore/include/nix/store/sqlite.hh b/src/libstore/include/nix/store/sqlite.hh index 266930d75..e6d8a818a 100644 --- a/src/libstore/include/nix/store/sqlite.hh +++ b/src/libstore/include/nix/store/sqlite.hh @@ -38,14 +38,27 @@ enum class SQLiteOpenMode { struct SQLite { sqlite3 * db = 0; - SQLite() { } + + SQLite() {} + SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal); SQLite(const SQLite & from) = delete; - SQLite& operator = (const SQLite & from) = delete; + SQLite & operator=(const SQLite & from) = delete; + // NOTE: This is noexcept since we are only copying and assigning raw pointers. - SQLite& operator = (SQLite && from) noexcept { db = from.db; from.db = 0; return *this; } + SQLite & operator=(SQLite && from) noexcept + { + db = from.db; + from.db = 0; + return *this; + } + ~SQLite(); - operator sqlite3 * () { return db; } + + operator sqlite3 *() + { + return db; + } /** * Disable synchronous mode, set truncate journal mode. @@ -65,11 +78,21 @@ struct SQLiteStmt sqlite3 * db = 0; sqlite3_stmt * stmt = 0; std::string sql; - SQLiteStmt() { } - SQLiteStmt(sqlite3 * db, const std::string & sql) { create(db, sql); } + + SQLiteStmt() {} + + SQLiteStmt(sqlite3 * db, const std::string & sql) + { + create(db, sql); + } + void create(sqlite3 * db, const std::string & s); ~SQLiteStmt(); - operator sqlite3_stmt * () { return stmt; } + + operator sqlite3_stmt *() + { + return stmt; + } /** * Helper for binding / executing statements. @@ -89,9 +112,9 @@ struct SQLiteStmt /** * Bind the next parameter. */ - Use & operator () (std::string_view value, bool notNull = true); - Use & operator () (const unsigned char * data, size_t len, bool notNull = true); - Use & operator () (int64_t value, bool notNull = true); + Use & operator()(std::string_view value, bool notNull = true); + Use & operator()(const unsigned char * data, size_t len, bool notNull = true); + Use & operator()(int64_t value, bool notNull = true); Use & bind(); // null int step(); @@ -134,7 +157,6 @@ struct SQLiteTxn ~SQLiteTxn(); }; - struct SQLiteError : Error { std::string path; @@ -142,21 +164,29 @@ struct SQLiteError : Error int errNo, extendedErrNo, offset; template - [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) { + [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args &... args) + { throw_(db, HintFmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf); + SQLiteError(const char * path, const char * errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf); protected: template - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...)) - { } + SQLiteError( + const char * path, + const char * errMsg, + int errNo, + int extendedErrNo, + int offset, + const std::string & fs, + const Args &... args) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...)) + { + } [[noreturn]] static void throw_(sqlite3 * db, HintFmt && hf); - }; MakeError(SQLiteBusy, SQLiteError); @@ -181,4 +211,4 @@ T retrySQLite(F && fun) } } -} +} // namespace nix diff --git a/src/libstore/include/nix/store/ssh-store.hh b/src/libstore/include/nix/store/ssh-store.hh index fde165445..17fea39d5 100644 --- a/src/libstore/include/nix/store/ssh-store.hh +++ b/src/libstore/include/nix/store/ssh-store.hh @@ -60,4 +60,4 @@ struct MountedSSHStoreConfig : virtual SSHStoreConfig, virtual LocalFSStoreConfi ref openStore() const override; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/ssh.hh b/src/libstore/include/nix/store/ssh.hh index be9cf0c48..998312ddf 100644 --- a/src/libstore/include/nix/store/ssh.hh +++ b/src/libstore/include/nix/store/ssh.hh @@ -46,7 +46,9 @@ public: std::string_view host, std::string_view keyFile, std::string_view sshPublicHostKey, - bool useMaster, bool compress, Descriptor logFD = INVALID_DESCRIPTOR); + bool useMaster, + bool compress, + Descriptor logFD = INVALID_DESCRIPTOR); struct Connection { @@ -75,9 +77,7 @@ public: * execute). Will not be used when "fake SSHing" to the local * machine. */ - std::unique_ptr startCommand( - Strings && command, - Strings && extraSshArgs = {}); + std::unique_ptr startCommand(Strings && command, Strings && extraSshArgs = {}); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index e0a3e67d1..3fbb539a1 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -23,7 +23,6 @@ #include #include - namespace nix { MakeError(SubstError, Error); @@ -49,11 +48,10 @@ struct SourceAccessor; class NarInfoDiskCache; class Store; - typedef std::map OutputPathMap; - enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true }; + enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; /** @@ -61,14 +59,13 @@ enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; */ const uint32_t exportMagic = 0x4558494e; - enum BuildMode : uint8_t { bmNormal, bmRepair, bmCheck }; + enum TrustedFlag : bool { NotTrusted = false, Trusted = true }; struct BuildResult; struct KeyedBuildResult; - typedef std::map> StorePathCAMap; /** @@ -117,7 +114,7 @@ struct StoreConfig : public StoreDirConfig StoreConfig() = delete; - virtual ~StoreConfig() { } + virtual ~StoreConfig() {} static StringSet getDefaultSystemFeatures(); @@ -138,10 +135,13 @@ struct StoreConfig : public StoreDirConfig return std::nullopt; } - Setting pathInfoCacheSize{this, 65536, "path-info-cache-size", - "Size of the in-memory store path metadata cache."}; + Setting pathInfoCacheSize{ + this, 65536, "path-info-cache-size", "Size of the in-memory store path metadata cache."}; - Setting isTrusted{this, false, "trusted", + Setting isTrusted{ + this, + false, + "trusted", R"( Whether paths from this store can be used as substitutes even if they are not signed by a key listed in the @@ -149,18 +149,26 @@ struct StoreConfig : public StoreDirConfig setting. )"}; - Setting priority{this, 0, "priority", + Setting priority{ + this, + 0, + "priority", R"( Priority of this store when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters). A lower value means a higher priority. )"}; - Setting wantMassQuery{this, false, "want-mass-query", + Setting wantMassQuery{ + this, + false, + "want-mass-query", R"( Whether this store can be queried efficiently for path validity when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters). )"}; - Setting systemFeatures{this, getDefaultSystemFeatures(), + Setting systemFeatures{ + this, + getDefaultSystemFeatures(), "system-features", R"( Optional [system features](@docroot@/command-ref/conf-file.md#conf-system-features) available on the system this store uses to build derivations. @@ -200,11 +208,15 @@ public: /** * @note Avoid churn, since we used to inherit from `Config`. */ - operator const Config &() const { return config; } + operator const Config &() const + { + return config; + } protected: - struct PathInfoCacheValue { + struct PathInfoCacheValue + { /** * Time of cache entry creation or update @@ -226,8 +238,9 @@ protected: * Past tense, because a path can only be assumed to exists when * isKnownNow() && didExist() */ - inline bool didExist() { - return value != nullptr; + inline bool didExist() + { + return value != nullptr; } }; @@ -249,7 +262,7 @@ public: */ virtual void init() {}; - virtual ~Store() { } + virtual ~Store() {} /** * @todo move to `StoreConfig` one we store enough information in @@ -290,8 +303,7 @@ public: * Query which of the given paths is valid. Optionally, try to * substitute missing paths. */ - virtual StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute); + virtual StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute); /** * Query the set of all valid paths. Note that for some store @@ -302,7 +314,9 @@ public: * `std::variant` to get rid of this hack. */ virtual StorePathSet queryAllValidPaths() - { unsupported("queryAllValidPaths"); } + { + unsupported("queryAllValidPaths"); + } constexpr static const char * MissingName = "x"; @@ -315,8 +329,7 @@ public: /** * Asynchronous version of queryPathInfo(). */ - void queryPathInfo(const StorePath & path, - Callback> callback) noexcept; + void queryPathInfo(const StorePath & path, Callback> callback) noexcept; /** * Version of queryPathInfo() that only queries the local narinfo cache and not @@ -336,9 +349,7 @@ public: /** * Asynchronous version of queryRealisation(). */ - void queryRealisation(const DrvOutput &, - Callback> callback) noexcept; - + void queryRealisation(const DrvOutput &, Callback> callback) noexcept; /** * Check whether the given valid path info is sufficiently attested, by @@ -356,17 +367,17 @@ public: return true; } - virtual bool realisationIsUntrusted(const Realisation & ) + virtual bool realisationIsUntrusted(const Realisation &) { return true; } protected: - virtual void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept = 0; - virtual void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept = 0; + virtual void + queryPathInfoUncached(const StorePath & path, Callback> callback) noexcept = 0; + virtual void + queryRealisationUncached(const DrvOutput &, Callback> callback) noexcept = 0; public: @@ -375,7 +386,9 @@ public: * The result is not cleared. */ virtual void queryReferrers(const StorePath & path, StorePathSet & referrers) - { unsupported("queryReferrers"); } + { + unsupported("queryReferrers"); + } /** * @return all currently valid derivations that have `path` as an @@ -385,7 +398,10 @@ public: * was actually used to produce `path`, which may not exist * anymore.) */ - virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; }; + virtual StorePathSet queryValidDerivers(const StorePath & path) + { + return {}; + }; /** * Query the outputs of the derivation denoted by `path`. @@ -397,9 +413,8 @@ public: * derivation. All outputs are mentioned so ones missing the mapping * are mapped to `std::nullopt`. */ - virtual std::map> queryPartialDerivationOutputMap( - const StorePath & path, - Store * evalStore = nullptr); + virtual std::map> + queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr); /** * Like `queryPartialDerivationOutputMap` but only considers @@ -409,8 +424,8 @@ public: * Just a helper function for implementing * `queryPartialDerivationOutputMap`. */ - virtual std::map> queryStaticPartialDerivationOutputMap( - const StorePath & path); + virtual std::map> + queryStaticPartialDerivationOutputMap(const StorePath & path); /** * Query the mapping outputName=>outputPath for the given derivation. @@ -427,7 +442,10 @@ public: /** * Query which of the given paths have substitutes. */ - virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; }; + virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) + { + return {}; + }; /** * Query substitute info (i.e. references, derivers and download @@ -436,14 +454,16 @@ public: * If a path does not have substitute info, it's omitted from the * resulting ‘infos’ map. */ - virtual void querySubstitutablePathInfos(const StorePathCAMap & paths, - SubstitutablePathInfos & infos); + virtual void querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos); /** * Import a path into the store. */ - virtual void addToStore(const ValidPathInfo & info, Source & narSource, - RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0; + virtual void addToStore( + const ValidPathInfo & info, + Source & narSource, + RepairFlag repair = NoRepair, + CheckSigsFlag checkSigs = CheckSigs) = 0; /** * A list of paths infos along with a source providing the content @@ -454,16 +474,10 @@ public: /** * Import multiple paths into the store. */ - virtual void addMultipleToStore( - Source & source, - RepairFlag repair = NoRepair, - CheckSigsFlag checkSigs = CheckSigs); + virtual void addMultipleToStore(Source & source, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); virtual void addMultipleToStore( - PathsSource && pathsToCopy, - Activity & act, - RepairFlag repair = NoRepair, - CheckSigsFlag checkSigs = CheckSigs); + PathsSource && pathsToCopy, Activity & act, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); /** * Copy the contents of a path to the store and register the @@ -531,9 +545,14 @@ public: * retrieve this information otherwise. */ virtual void registerDrvOutput(const Realisation & output) - { unsupported("registerDrvOutput"); } + { + unsupported("registerDrvOutput"); + } + virtual void registerDrvOutput(const Realisation & output, CheckSigsFlag checkSigs) - { return registerDrvOutput(output); } + { + return registerDrvOutput(output); + } /** * Write a NAR dump of a store path. @@ -601,8 +620,8 @@ public: * up with multiple different versions of dependencies without * explicitly choosing to allow it). */ - virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode = bmNormal); + virtual BuildResult + buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode = bmNormal); /** * Ensure that a path is valid. If it is not currently valid, it @@ -616,28 +635,32 @@ public: * The root disappears as soon as we exit. */ virtual void addTempRoot(const StorePath & path) - { debug("not creating temporary root, store doesn't support GC"); } + { + debug("not creating temporary root, store doesn't support GC"); + } /** * @return a string representing information about the path that * can be loaded into the database using `nix-store --load-db` or * `nix-store --register-validity`. */ - std::string makeValidityRegistration(const StorePathSet & paths, - bool showDerivers, bool showHash); + std::string makeValidityRegistration(const StorePathSet & paths, bool showDerivers, bool showHash); /** * Optimise the disk space usage of the Nix store by hard-linking files * with the same contents. */ - virtual void optimiseStore() { }; + virtual void optimiseStore() {}; /** * Check the integrity of the Nix store. * * @return true if errors remain. */ - virtual bool verifyStore(bool checkContents, RepairFlag repair = NoRepair) { return false; }; + virtual bool verifyStore(bool checkContents, RepairFlag repair = NoRepair) + { + return false; + }; /** * @return An object to access files in the Nix store. @@ -655,7 +678,9 @@ public: * not verified. */ virtual void addSignatures(const StorePath & storePath, const StringSet & sigs) - { unsupported("addSignatures"); } + { + unsupported("addSignatures"); + } /** * Add signatures to a ValidPathInfo or Realisation using the secret keys @@ -693,13 +718,19 @@ public: * `referrers` relation instead of the `references` relation is * returned. */ - virtual void computeFSClosure(const StorePathSet & paths, - StorePathSet & out, bool flipDirection = false, - bool includeOutputs = false, bool includeDerivers = false); + virtual void computeFSClosure( + const StorePathSet & paths, + StorePathSet & out, + bool flipDirection = false, + bool includeOutputs = false, + bool includeDerivers = false); - void computeFSClosure(const StorePath & path, - StorePathSet & out, bool flipDirection = false, - bool includeOutputs = false, bool includeDerivers = false); + void computeFSClosure( + const StorePath & path, + StorePathSet & out, + bool flipDirection = false, + bool includeOutputs = false, + bool includeDerivers = false); /** * Given a set of paths that are to be built, return the set of @@ -774,7 +805,7 @@ public: * Establish a connection to the store, for store types that have * a notion of connection. Otherwise this is a no-op. */ - virtual void connect() { }; + virtual void connect() {}; /** * Get the protocol version of this store or it's connection. @@ -794,7 +825,6 @@ public: */ virtual std::optional isTrustedClient() = 0; - virtual Path toRealPath(const Path & storePath) { return storePath; @@ -809,9 +839,12 @@ public: * Synchronises the options of the client with those of the daemon * (a no-op when there’s no daemon) */ - virtual void setOptions() { } + virtual void setOptions() {} - virtual std::optional getVersion() { return {}; } + virtual std::optional getVersion() + { + return {}; + } protected: @@ -828,10 +861,8 @@ protected: { throw Unsupported("operation '%s' is not supported by store '%s'", op, getUri()); } - }; - /** * Copy a path from one store to another. */ @@ -842,7 +873,6 @@ void copyStorePath( RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); - /** * Copy store paths from one store to another. The paths may be copied * in parallel. They are copied in a topologically sorted order (i.e. if @@ -852,14 +882,16 @@ void copyStorePath( * @return a map of what each path was copied to the dstStore as. */ std::map copyPaths( - Store & srcStore, Store & dstStore, + Store & srcStore, + Store & dstStore, const std::set &, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); std::map copyPaths( - Store & srcStore, Store & dstStore, + Store & srcStore, + Store & dstStore, const StorePathSet & paths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, @@ -869,14 +901,16 @@ std::map copyPaths( * Copy the closure of `paths` from `srcStore` to `dstStore`. */ void copyClosure( - Store & srcStore, Store & dstStore, + Store & srcStore, + Store & dstStore, const std::set & paths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); void copyClosure( - Store & srcStore, Store & dstStore, + Store & srcStore, + Store & dstStore, const StorePathSet & paths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, @@ -889,7 +923,6 @@ void copyClosure( */ void removeTempRoots(); - /** * Resolve the derived path completely, failing if any derivation output * is unknown. @@ -897,25 +930,18 @@ void removeTempRoots(); StorePath resolveDerivedPath(Store &, const SingleDerivedPath &, Store * evalStore = nullptr); OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * evalStore = nullptr); - /** * Display a set of paths in human-readable form (i.e., between quotes * and separated by commas). */ std::string showPaths(const PathSet & paths); - -std::optional decodeValidPathInfo( - const Store & store, - std::istream & str, - std::optional hashGiven = std::nullopt); +std::optional +decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven = std::nullopt); const ContentAddress * getDerivationCA(const BasicDerivation & drv); -std::map drvOutputReferences( - Store & store, - const Derivation & drv, - const StorePath & outputPath, - Store * evalStore = nullptr); +std::map +drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore = nullptr); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-cast.hh b/src/libstore/include/nix/store/store-cast.hh index 0bf61bb77..89775599a 100644 --- a/src/libstore/include/nix/store/store-cast.hh +++ b/src/libstore/include/nix/store/store-cast.hh @@ -21,4 +21,4 @@ T & require(Store & store) return *castedStore; } -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-dir-config.hh b/src/libstore/include/nix/store/store-dir-config.hh index 14e3e7db8..bc2944b0b 100644 --- a/src/libstore/include/nix/store/store-dir-config.hh +++ b/src/libstore/include/nix/store/store-dir-config.hh @@ -10,7 +10,6 @@ #include #include - namespace nix { struct SourcePath; @@ -75,13 +74,10 @@ struct MixStoreDirMethods /** * Constructs a unique store path name. */ - StorePath makeStorePath(std::string_view type, - std::string_view hash, std::string_view name) const; - StorePath makeStorePath(std::string_view type, - const Hash & hash, std::string_view name) const; + StorePath makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const; + StorePath makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const; - StorePath makeOutputPath(std::string_view id, - const Hash & hash, std::string_view name) const; + StorePath makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const; StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const; @@ -108,7 +104,9 @@ struct StoreDirConfigBase : Config { using Config::Config; - const PathSetting storeDir_{this, settings.nixStore, + const PathSetting storeDir_{ + this, + settings.nixStore, "store", R"( Logical location of the Nix store, usually @@ -134,4 +132,4 @@ struct StoreDirConfig : StoreDirConfigBase, MixStoreDirMethods virtual ~StoreDirConfig() = default; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-open.hh b/src/libstore/include/nix/store/store-open.hh index 7c1cda5be..0e8724990 100644 --- a/src/libstore/include/nix/store/store-open.hh +++ b/src/libstore/include/nix/store/store-open.hh @@ -40,4 +40,4 @@ ref openStore( */ std::list> getDefaultSubstituters(); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-reference.hh b/src/libstore/include/nix/store/store-reference.hh index c1b681ba1..fff3b5c5c 100644 --- a/src/libstore/include/nix/store/store-reference.hh +++ b/src/libstore/include/nix/store/store-reference.hh @@ -88,4 +88,4 @@ struct StoreReference */ std::pair splitUriAndParams(const std::string & uri); -} +} // namespace nix diff --git a/src/libstore/include/nix/store/store-registration.hh b/src/libstore/include/nix/store/store-registration.hh index 17298118e..8b0f344ba 100644 --- a/src/libstore/include/nix/store/store-registration.hh +++ b/src/libstore/include/nix/store/store-registration.hh @@ -85,4 +85,4 @@ struct RegisterStoreImplementation } }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index e9793a9ee..e4d0187c8 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -7,10 +7,9 @@ namespace nix { -struct UDSRemoteStoreConfig : - std::enable_shared_from_this, - virtual LocalFSStoreConfig, - virtual RemoteStoreConfig +struct UDSRemoteStoreConfig : std::enable_shared_from_this, + virtual LocalFSStoreConfig, + virtual RemoteStoreConfig { // TODO(fzakaria): Delete this constructor once moved over to the factory pattern // outlined in https://github.com/NixOS/nix/issues/10766 @@ -20,14 +19,14 @@ struct UDSRemoteStoreConfig : /** * @param authority is the socket path. */ - UDSRemoteStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params); + UDSRemoteStoreConfig(std::string_view scheme, std::string_view authority, const Params & params); UDSRemoteStoreConfig(const Params & params); - static const std::string name() { return "Local Daemon Store"; } + static const std::string name() + { + return "Local Daemon Store"; + } static std::string doc(); @@ -40,14 +39,14 @@ struct UDSRemoteStoreConfig : Path path; static StringSet uriSchemes() - { return {"unix"}; } + { + return {"unix"}; + } ref openStore() const override; }; -struct UDSRemoteStore : - virtual IndirectRootStore, - virtual RemoteStore +struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore { using Config = UDSRemoteStoreConfig; @@ -58,10 +57,14 @@ struct UDSRemoteStore : std::string getUri() override; ref getFSAccessor(bool requireValidPath = true) override - { return LocalFSStore::getFSAccessor(requireValidPath); } + { + return LocalFSStore::getFSAccessor(requireValidPath); + } void narFromPath(const StorePath & path, Sink & sink) override - { LocalFSStore::narFromPath(path, sink); } + { + LocalFSStore::narFromPath(path, sink); + } /** * Implementation of `IndirectRootStore::addIndirectRoot()` which @@ -84,4 +87,4 @@ private: ref openConnection() override; }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/worker-protocol-connection.hh b/src/libstore/include/nix/store/worker-protocol-connection.hh index ce7e9aef4..f7ddfea4f 100644 --- a/src/libstore/include/nix/store/worker-protocol-connection.hh +++ b/src/libstore/include/nix/store/worker-protocol-connection.hh @@ -162,4 +162,4 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection void postHandshake(const StoreDirConfig & store, const ClientHandshakeInfo & info); }; -} +} // namespace nix diff --git a/src/libstore/include/nix/store/worker-protocol-impl.hh b/src/libstore/include/nix/store/worker-protocol-impl.hh index 23e6068e9..26f6b9d44 100644 --- a/src/libstore/include/nix/store/worker-protocol-impl.hh +++ b/src/libstore/include/nix/store/worker-protocol-impl.hh @@ -15,14 +15,15 @@ namespace nix { /* protocol-agnostic templates */ -#define WORKER_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T WorkerProto::Serialise< T >::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) \ - { \ - return LengthPrefixedProtoHelper::read(store, conn); \ - } \ - TEMPLATE void WorkerProto::Serialise< T >::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t) \ - { \ - LengthPrefixedProtoHelper::write(store, conn, t); \ +#define WORKER_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void WorkerProto::Serialise::write( \ + const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ } WORKER_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) @@ -44,17 +45,15 @@ struct WorkerProto::Serialise { static T read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { - return CommonProto::Serialise::read(store, - CommonProto::ReadConn { .from = conn.from }); + return CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = conn.from}); } + static void write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t) { - CommonProto::Serialise::write(store, - CommonProto::WriteConn { .to = conn.to }, - t); + CommonProto::Serialise::write(store, CommonProto::WriteConn{.to = conn.to}, t); } }; /* protocol-specific templates */ -} +} // namespace nix diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index 9630a88c0..c7f8d5891 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -7,7 +7,6 @@ namespace nix { - #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f @@ -17,16 +16,14 @@ namespace nix { #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) - -#define STDERR_NEXT 0x6f6c6d67 -#define STDERR_READ 0x64617461 // data needed from source +#define STDERR_NEXT 0x6f6c6d67 +#define STDERR_READ 0x64617461 // data needed from source #define STDERR_WRITE 0x64617416 // data for sink -#define STDERR_LAST 0x616c7473 +#define STDERR_LAST 0x616c7473 #define STDERR_ERROR 0x63787470 #define STDERR_START_ACTIVITY 0x53545254 -#define STDERR_STOP_ACTIVITY 0x53544f50 -#define STDERR_RESULT 0x52534c54 - +#define STDERR_STOP_ACTIVITY 0x53544f50 +#define STDERR_RESULT 0x52534c54 struct StoreDirConfig; struct Source; @@ -40,7 +37,6 @@ struct UnkeyedValidPathInfo; enum BuildMode : uint8_t; enum TrustedFlag : bool; - /** * The "worker protocol", used by unix:// and ssh-ng:// stores. * @@ -65,7 +61,8 @@ struct WorkerProto * A unidirectional read connection, to be used by the read half of the * canonical serializers below. */ - struct ReadConn { + struct ReadConn + { Source & from; Version version; }; @@ -74,7 +71,8 @@ struct WorkerProto * A unidirectional write connection, to be used by the write half of the * canonical serializers below. */ - struct WriteConn { + struct WriteConn + { Sink & to; Version version; }; @@ -140,11 +138,10 @@ struct WorkerProto static const FeatureSet allFeatures; }; -enum struct WorkerProto::Op : uint64_t -{ +enum struct WorkerProto::Op : uint64_t { IsValidPath = 1, HasSubstitutes = 3, - QueryPathHash = 4, // obsolete + QueryPathHash = 4, // obsolete QueryReferences = 5, // obsolete QueryReferrers = 6, AddToStore = 7, @@ -155,7 +152,7 @@ enum struct WorkerProto::Op : uint64_t AddIndirectRoot = 12, SyncWithGC = 13, FindRoots = 14, - ExportPath = 16, // obsolete + ExportPath = 16, // obsolete QueryDeriver = 18, // obsolete SetOptions = 19, CollectGarbage = 20, @@ -165,7 +162,7 @@ enum struct WorkerProto::Op : uint64_t QueryFailedPaths = 24, ClearFailedPaths = 25, QueryPathInfo = 26, - ImportPaths = 27, // obsolete + ImportPaths = 27, // obsolete QueryDerivationOutputNames = 28, // obsolete QueryPathFromHashPart = 29, QuerySubstitutablePathInfos = 30, @@ -211,7 +208,7 @@ struct WorkerProto::ClientHandshakeInfo */ std::optional remoteTrustsUs; - bool operator == (const ClientHandshakeInfo &) const = default; + bool operator==(const ClientHandshakeInfo &) const = default; }; /** @@ -220,7 +217,7 @@ struct WorkerProto::ClientHandshakeInfo * @todo Switch to using `WorkerProto::Serialise` instead probably. But * this was not done at this time so there would be less churn. */ -inline Sink & operator << (Sink & sink, WorkerProto::Op op) +inline Sink & operator<<(Sink & sink, WorkerProto::Op op) { return sink << static_cast(op); } @@ -230,7 +227,7 @@ inline Sink & operator << (Sink & sink, WorkerProto::Op op) * * @todo Perhaps render known opcodes more nicely. */ -inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op) +inline std::ostream & operator<<(std::ostream & s, WorkerProto::Op op) { return s << static_cast(op); } @@ -245,10 +242,10 @@ inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op) * be legal specialization syntax. See below for what that looks like in * practice. */ -#define DECLARE_WORKER_SERIALISER(T) \ - struct WorkerProto::Serialise< T > \ - { \ - static T read(const StoreDirConfig & store, WorkerProto::ReadConn conn); \ +#define DECLARE_WORKER_SERIALISER(T) \ + struct WorkerProto::Serialise \ + { \ + static T read(const StoreDirConfig & store, WorkerProto::ReadConn conn); \ static void write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t); \ }; @@ -283,4 +280,4 @@ template DECLARE_WORKER_SERIALISER(std::map); #undef COMMA_ -} +} // namespace nix diff --git a/src/libstore/indirect-root-store.cc b/src/libstore/indirect-root-store.cc index e23c01e5d..b882b2568 100644 --- a/src/libstore/indirect-root-store.cc +++ b/src/libstore/indirect-root-store.cc @@ -42,4 +42,4 @@ Path IndirectRootStore::addPermRoot(const StorePath & storePath, const Path & _g return gcRoot; } -} +} // namespace nix diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc index 9abea9520..8b02e7a66 100644 --- a/src/libstore/keys.cc +++ b/src/libstore/keys.cc @@ -28,4 +28,4 @@ PublicKeys getDefaultPublicKeys() return publicKeys; } -} +} // namespace nix diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 9ec9e6eec..09bea1ca3 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -16,10 +16,7 @@ namespace nix { -LegacySSHStoreConfig::LegacySSHStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params) +LegacySSHStoreConfig::LegacySSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) : StoreConfig(params) , CommonSSHStoreConfig(scheme, authority, params) { @@ -28,34 +25,31 @@ LegacySSHStoreConfig::LegacySSHStoreConfig( std::string LegacySSHStoreConfig::doc() { return - #include "legacy-ssh-store.md" - ; +#include "legacy-ssh-store.md" + ; } - struct LegacySSHStore::Connection : public ServeProto::BasicClientConnection { std::unique_ptr sshConn; bool good = true; }; - LegacySSHStore::LegacySSHStore(ref config) : Store{*config} , config{config} - , connections(make_ref>( - std::max(1, (int) config->maxConnections), - [this]() { return openConnection(); }, - [](const ref & r) { return r->good; } - )) + , connections( + make_ref>( + std::max(1, (int) config->maxConnections), + [this]() { return openConnection(); }, + [](const ref & r) { return r->good; })) , master(config->createSSHMaster( - // Use SSH master only if using more than 1 connection. - connections->capacity() > 1, - config->logFD)) + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1, + config->logFD)) { } - ref LegacySSHStore::openConnection() { auto conn = make_ref(); @@ -76,8 +70,8 @@ ref LegacySSHStore::openConnection() StringSink saved; TeeSource tee(conn->from, saved); try { - conn->remoteVersion = ServeProto::BasicClientConnection::handshake( - conn->to, tee, SERVE_PROTOCOL_VERSION, config->host); + conn->remoteVersion = + ServeProto::BasicClientConnection::handshake(conn->to, tee, SERVE_PROTOCOL_VERSION, config->host); } catch (SerialisationError & e) { // in.close(): Don't let the remote block on us not writing. conn->sshConn->in.close(); @@ -85,8 +79,7 @@ ref LegacySSHStore::openConnection() NullSink nullSink; tee.drainInto(nullSink); } - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - config->host, chomp(saved.s)); + throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", config->host, chomp(saved.s)); } catch (EndOfFile & e) { throw Error("cannot connect to '%1%'", config->host); } @@ -94,14 +87,12 @@ ref LegacySSHStore::openConnection() return conn; }; - std::string LegacySSHStore::getUri() { return *Config::uriSchemes().begin() + "://" + config->host; } -std::map LegacySSHStore::queryPathInfosUncached( - const StorePathSet & paths) +std::map LegacySSHStore::queryPathInfosUncached(const StorePathSet & paths) { auto conn(connections->get()); @@ -120,8 +111,8 @@ std::map LegacySSHStore::queryPathInfosUncached return infos; } -void LegacySSHStore::queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept +void LegacySSHStore::queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept { try { auto infos = queryPathInfosUncached({path}); @@ -133,20 +124,17 @@ void LegacySSHStore::queryPathInfoUncached(const StorePath & path, auto & [path2, info] = *infos.begin(); assert(path == path2); - return callback(std::make_shared( - std::move(path), - std::move(info) - )); + return callback(std::make_shared(std::move(path), std::move(info))); } default: throw Error("More path infos returned than queried"); } - } catch (...) { callback.rethrow(); } + } catch (...) { + callback.rethrow(); + } } - -void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) +void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { debug("adding path '%s' to remote host '%s'", printStorePath(info.path), config->host); @@ -154,18 +142,12 @@ void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { - conn->to - << ServeProto::Command::AddToStoreNar - << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); + conn->to << ServeProto::Command::AddToStoreNar << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(HashFormat::Base16, false); ServeProto::write(*this, *conn, info.references); - conn->to - << info.registrationTime - << info.narSize - << info.ultimate - << info.sigs - << renderContentAddress(info.ca); + conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs + << renderContentAddress(info.ca); try { copyNAR(source, conn->to); } catch (...) { @@ -186,35 +168,24 @@ void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, conn->good = false; throw; } - sink - << exportMagic - << printStorePath(info.path); + sink << exportMagic << printStorePath(info.path); ServeProto::write(*this, *conn, info.references); - sink - << (info.deriver ? printStorePath(*info.deriver) : "") - << 0 - << 0; + sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 << 0; }); - } } - void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) { - narFromPath(path, [&](auto & source) { - copyNAR(source, sink); - }); + narFromPath(path, [&](auto & source) { copyNAR(source, sink); }); } - void LegacySSHStore::narFromPath(const StorePath & path, std::function fun) { auto conn(connections->get()); conn->narFromPath(*this, path, fun); } - static ServeProto::BuildOptions buildSettings() { return { @@ -227,9 +198,7 @@ static ServeProto::BuildOptions buildSettings() }; } - -BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) +BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { auto conn(connections->get()); @@ -239,20 +208,17 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas } std::function LegacySSHStore::buildDerivationAsync( - const StorePath & drvPath, const BasicDerivation & drv, - const ServeProto::BuildOptions & options) + const StorePath & drvPath, const BasicDerivation & drv, const ServeProto::BuildOptions & options) { // Until we have C++23 std::move_only_function auto conn = std::make_shared::Handle>(connections->get()); (*conn)->putBuildDerivationRequest(*this, drvPath, drv, options); - return [this,conn]() -> BuildResult { - return (*conn)->getBuildDerivationResponse(*this); - }; + return [this, conn]() -> BuildResult { return (*conn)->getBuildDerivationResponse(*this); }; } - -void LegacySSHStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) +void LegacySSHStore::buildPaths( + const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) { if (evalStore && evalStore.get() != this) throw Error("building on an SSH store is incompatible with '--eval-store'"); @@ -263,17 +229,20 @@ void LegacySSHStore::buildPaths(const std::vector & drvPaths, Build Strings ss; for (auto & p : drvPaths) { auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); - std::visit(overloaded { - [&](const StorePathWithOutputs & s) { - ss.push_back(s.to_string(*this)); + std::visit( + overloaded{ + [&](const StorePathWithOutputs & s) { ss.push_back(s.to_string(*this)); }, + [&](const StorePath & drvPath) { + throw Error( + "wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", + printStorePath(drvPath)); + }, + [&](std::monostate) { + throw Error( + "wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); + }, }, - [&](const StorePath & drvPath) { - throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); - }, - [&](std::monostate) { - throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); - }, - }, sOrDrvPath); + sOrDrvPath); } conn->to << ss; @@ -290,10 +259,8 @@ void LegacySSHStore::buildPaths(const std::vector & drvPaths, Build } } - -void LegacySSHStore::computeFSClosure(const StorePathSet & paths, - StorePathSet & out, bool flipDirection, - bool includeOutputs, bool includeDerivers) +void LegacySSHStore::computeFSClosure( + const StorePathSet & paths, StorePathSet & out, bool flipDirection, bool includeOutputs, bool includeDerivers) { if (flipDirection || includeDerivers) { Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); @@ -302,9 +269,7 @@ void LegacySSHStore::computeFSClosure(const StorePathSet & paths, auto conn(connections->get()); - conn->to - << ServeProto::Command::QueryClosure - << includeOutputs; + conn->to << ServeProto::Command::QueryClosure << includeOutputs; ServeProto::write(*this, *conn, paths); conn->to.flush(); @@ -312,25 +277,18 @@ void LegacySSHStore::computeFSClosure(const StorePathSet & paths, out.insert(i); } - -StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute) +StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(connections->get()); - return conn->queryValidPaths(*this, - false, paths, maybeSubstitute); + return conn->queryValidPaths(*this, false, paths, maybeSubstitute); } - -StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, - bool lock, SubstituteFlag maybeSubstitute) +StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, bool lock, SubstituteFlag maybeSubstitute) { auto conn(connections->get()); - return conn->queryValidPaths(*this, - lock, paths, maybeSubstitute); + return conn->queryValidPaths(*this, lock, paths, maybeSubstitute); } - void LegacySSHStore::addMultipleToStoreLegacy(Store & srcStore, const StorePathSet & paths) { auto conn(connections->get()); @@ -347,20 +305,17 @@ void LegacySSHStore::addMultipleToStoreLegacy(Store & srcStore, const StorePathS throw Error("remote machine failed to import closure"); } - void LegacySSHStore::connect() { auto conn(connections->get()); } - unsigned int LegacySSHStore::getProtocol() { auto conn(connections->get()); return conn->remoteVersion; } - pid_t LegacySSHStore::getConnectionPid() { auto conn(connections->get()); @@ -372,7 +327,6 @@ pid_t LegacySSHStore::getConnectionPid() #endif } - LegacySSHStore::ConnectionStats LegacySSHStore::getConnectionStats() { auto conn(connections->get()); @@ -382,7 +336,6 @@ LegacySSHStore::ConnectionStats LegacySSHStore::getConnectionStats() }; } - /** * The legacy ssh protocol doesn't support checking for trusted-user. * Try using ssh-ng:// instead if you want to know. @@ -392,12 +345,11 @@ std::optional LegacySSHStore::isTrustedClient() return std::nullopt; } - -ref LegacySSHStore::Config::openStore() const { +ref LegacySSHStore::Config::openStore() const +{ return make_ref(ref{shared_from_this()}); } - static RegisterStoreImplementation regLegacySSHStore; -} +} // namespace nix diff --git a/src/libstore/linux/include/nix/store/personality.hh b/src/libstore/linux/include/nix/store/personality.hh index 6a6376f8f..01bf2bf33 100644 --- a/src/libstore/linux/include/nix/store/personality.hh +++ b/src/libstore/linux/include/nix/store/personality.hh @@ -8,5 +8,3 @@ namespace nix::linux { void setPersonality(std::string_view system); } - - diff --git a/src/libstore/linux/personality.cc b/src/libstore/linux/personality.cc index e87006d86..d268706b2 100644 --- a/src/libstore/linux/personality.cc +++ b/src/libstore/linux/personality.cc @@ -10,32 +10,31 @@ namespace nix::linux { void setPersonality(std::string_view system) { - /* Change the personality to 32-bit if we're doing an - i686-linux build on an x86_64-linux machine. */ - struct utsname utsbuf; - uname(&utsbuf); - if ((system == "i686-linux" - && (std::string_view(NIX_LOCAL_SYSTEM) == "x86_64-linux" - || (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64")))) - || system == "armv7l-linux" - || system == "armv6l-linux" - || system == "armv5tel-linux") - { - if (personality(PER_LINUX32) == -1) - throw SysError("cannot set 32-bit personality"); - } + /* Change the personality to 32-bit if we're doing an + i686-linux build on an x86_64-linux machine. */ + struct utsname utsbuf; + uname(&utsbuf); + if ((system == "i686-linux" + && (std::string_view(NIX_LOCAL_SYSTEM) == "x86_64-linux" + || (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64")))) + || system == "armv7l-linux" || system == "armv6l-linux" || system == "armv5tel-linux") { + if (personality(PER_LINUX32) == -1) + throw SysError("cannot set 32-bit personality"); + } - /* Impersonate a Linux 2.6 machine to get some determinism in - builds that depend on the kernel version. */ - if ((system == "i686-linux" || system == "x86_64-linux") && settings.impersonateLinux26) { - int cur = personality(0xffffffff); - if (cur != -1) personality(cur | 0x0020000 /* == UNAME26 */); - } - - /* Disable address space randomization for improved - determinism. */ + /* Impersonate a Linux 2.6 machine to get some determinism in + builds that depend on the kernel version. */ + if ((system == "i686-linux" || system == "x86_64-linux") && settings.impersonateLinux26) { int cur = personality(0xffffffff); - if (cur != -1) personality(cur | ADDR_NO_RANDOMIZE); + if (cur != -1) + personality(cur | 0x0020000 /* == UNAME26 */); + } + + /* Disable address space randomization for improved + determinism. */ + int cur = personality(0xffffffff); + if (cur != -1) + personality(cur | ADDR_NO_RANDOMIZE); } -} +} // namespace nix::linux diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index 03a9bd055..f7511fdce 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -9,26 +9,21 @@ namespace nix { LocalBinaryCacheStoreConfig::LocalBinaryCacheStoreConfig( - std::string_view scheme, - PathView binaryCacheDir, - const StoreReference::Params & params) + std::string_view scheme, PathView binaryCacheDir, const StoreReference::Params & params) : Store::Config{params} , BinaryCacheStoreConfig{params} , binaryCacheDir(binaryCacheDir) { } - std::string LocalBinaryCacheStoreConfig::doc() { return - #include "local-binary-cache-store.md" - ; +#include "local-binary-cache-store.md" + ; } - -struct LocalBinaryCacheStore : - virtual BinaryCacheStore +struct LocalBinaryCacheStore : virtual BinaryCacheStore { using Config = LocalBinaryCacheStoreConfig; @@ -52,7 +47,8 @@ protected: bool fileExists(const std::string & path) override; - void upsertFile(const std::string & path, + void upsertFile( + const std::string & path, std::shared_ptr> istream, const std::string & mimeType) override { @@ -84,12 +80,9 @@ protected: for (auto & entry : DirectoryIterator{config->binaryCacheDir}) { checkInterrupt(); auto name = entry.path().filename().string(); - if (name.size() != 40 || - !hasSuffix(name, ".narinfo")) + if (name.size() != 40 || !hasSuffix(name, ".narinfo")) continue; - paths.insert(parseStorePath( - storeDir + "/" + name.substr(0, name.size() - 8) - + "-" + MissingName)); + paths.insert(parseStorePath(storeDir + "/" + name.substr(0, name.size() - 8) + "-" + MissingName)); } return paths; @@ -124,15 +117,15 @@ StringSet LocalBinaryCacheStoreConfig::uriSchemes() return {"file"}; } -ref LocalBinaryCacheStoreConfig::openStore() const { - auto store = make_ref(ref{ - // FIXME we shouldn't actually need a mutable config - std::const_pointer_cast(shared_from_this()) - }); +ref LocalBinaryCacheStoreConfig::openStore() const +{ + auto store = make_ref( + ref{// FIXME we shouldn't actually need a mutable config + std::const_pointer_cast(shared_from_this())}); store->init(); return store; } static RegisterStoreImplementation regLocalBinaryCacheStore; -} +} // namespace nix diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index add3b04d2..fd1fe4459 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -13,12 +13,10 @@ LocalFSStoreConfig::LocalFSStoreConfig(PathView rootDir, const Params & params) // Default `?root` from `rootDir` if non set // FIXME don't duplicate description once we don't have root setting , rootDir{ - this, - !rootDir.empty() && params.count("root") == 0 - ? (std::optional{rootDir}) - : std::nullopt, - "root", - "Directory prefixed to all other paths."} + this, + !rootDir.empty() && params.count("root") == 0 ? (std::optional{rootDir}) : std::nullopt, + "root", + "Directory prefixed to all other paths."} { } @@ -40,7 +38,6 @@ struct LocalStoreAccessor : PosixSourceAccessor { } - void requireStoreObject(const CanonPath & path) { auto [storePath, rest] = store->toStorePath(store->storeDir + path.abs()); @@ -53,7 +50,7 @@ struct LocalStoreAccessor : PosixSourceAccessor /* Also allow `path` to point to the entire store, which is needed for resolving symlinks. */ if (path.isRoot()) - return Stat{ .type = tDirectory }; + return Stat{.type = tDirectory}; requireStoreObject(path); return PosixSourceAccessor::maybeLstat(path); @@ -65,10 +62,7 @@ struct LocalStoreAccessor : PosixSourceAccessor return PosixSourceAccessor::readDirectory(path); } - void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) override + void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override { requireStoreObject(path); return PosixSourceAccessor::readFile(path, sink, sizeCallback); @@ -83,9 +77,8 @@ struct LocalStoreAccessor : PosixSourceAccessor ref LocalFSStore::getFSAccessor(bool requireValidPath) { - return make_ref(ref( - std::dynamic_pointer_cast(shared_from_this())), - requireValidPath); + return make_ref( + ref(std::dynamic_pointer_cast(shared_from_this())), requireValidPath); } void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) @@ -104,9 +97,8 @@ std::optional LocalFSStore::getBuildLogExact(const StorePath & path for (int j = 0; j < 2; j++) { Path logPath = - j == 0 - ? fmt("%s/%s/%s/%s", config.logDir.get(), drvsLogDir, baseName.substr(0, 2), baseName.substr(2)) - : fmt("%s/%s/%s", config.logDir.get(), drvsLogDir, baseName); + j == 0 ? fmt("%s/%s/%s/%s", config.logDir.get(), drvsLogDir, baseName.substr(0, 2), baseName.substr(2)) + : fmt("%s/%s/%s", config.logDir.get(), drvsLogDir, baseName); Path logBz2Path = logPath + ".bz2"; if (pathExists(logPath)) @@ -115,12 +107,12 @@ std::optional LocalFSStore::getBuildLogExact(const StorePath & path else if (pathExists(logBz2Path)) { try { return decompress("bzip2", readFile(logBz2Path)); - } catch (Error &) { } + } catch (Error &) { + } } - } return std::nullopt; } -} +} // namespace nix diff --git a/src/libstore/local-overlay-store.cc b/src/libstore/local-overlay-store.cc index e40c5fa6e..1e8d1429c 100644 --- a/src/libstore/local-overlay-store.cc +++ b/src/libstore/local-overlay-store.cc @@ -13,24 +13,21 @@ namespace nix { std::string LocalOverlayStoreConfig::doc() { return - #include "local-overlay-store.md" +#include "local-overlay-store.md" ; } ref LocalOverlayStoreConfig::openStore() const { - return make_ref(ref{ - std::dynamic_pointer_cast(shared_from_this()) - }); + return make_ref( + ref{std::dynamic_pointer_cast(shared_from_this())}); } - Path LocalOverlayStoreConfig::toUpperPath(const StorePath & path) const { return upperLayer + "/" + path.to_string(); } - LocalOverlayStore::LocalOverlayStore(ref config) : Store{*config} , LocalFSStore{*config} @@ -60,13 +57,11 @@ LocalOverlayStore::LocalOverlayStore(ref config) debug("expected lowerdir: %s", expectedLowerDir); debug("expected upperdir: %s", config->upperLayer); debug("actual mount: %s", mountInfo); - throw Error("overlay filesystem '%s' mounted incorrectly", - config->realStoreDir.get()); + throw Error("overlay filesystem '%s' mounted incorrectly", config->realStoreDir.get()); } } } - void LocalOverlayStore::registerDrvOutput(const Realisation & info) { // First do queryRealisation on lower layer to populate DB @@ -77,14 +72,13 @@ void LocalOverlayStore::registerDrvOutput(const Realisation & info) LocalStore::registerDrvOutput(info); } - -void LocalOverlayStore::queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept +void LocalOverlayStore::queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept { auto callbackPtr = std::make_shared(std::move(callback)); - LocalStore::queryPathInfoUncached(path, - {[this, path, callbackPtr](std::future> fut) { + LocalStore::queryPathInfoUncached( + path, {[this, path, callbackPtr](std::future> fut) { try { auto info = fut.get(); if (info) @@ -93,25 +87,23 @@ void LocalOverlayStore::queryPathInfoUncached(const StorePath & path, return callbackPtr->rethrow(); } // If we don't have it, check lower store - lowerStore->queryPathInfo(path, - {[path, callbackPtr](std::future> fut) { - try { - (*callbackPtr)(fut.get().get_ptr()); - } catch (...) { - return callbackPtr->rethrow(); - } - }}); + lowerStore->queryPathInfo(path, {[path, callbackPtr](std::future> fut) { + try { + (*callbackPtr)(fut.get().get_ptr()); + } catch (...) { + return callbackPtr->rethrow(); + } + }}); }}); } - -void LocalOverlayStore::queryRealisationUncached(const DrvOutput & drvOutput, - Callback> callback) noexcept +void LocalOverlayStore::queryRealisationUncached( + const DrvOutput & drvOutput, Callback> callback) noexcept { auto callbackPtr = std::make_shared(std::move(callback)); - LocalStore::queryRealisationUncached(drvOutput, - {[this, drvOutput, callbackPtr](std::future> fut) { + LocalStore::queryRealisationUncached( + drvOutput, {[this, drvOutput, callbackPtr](std::future> fut) { try { auto info = fut.get(); if (info) @@ -120,8 +112,8 @@ void LocalOverlayStore::queryRealisationUncached(const DrvOutput & drvOutput, return callbackPtr->rethrow(); } // If we don't have it, check lower store - lowerStore->queryRealisation(drvOutput, - {[callbackPtr](std::future> fut) { + lowerStore->queryRealisation( + drvOutput, {[callbackPtr](std::future> fut) { try { (*callbackPtr)(fut.get()); } catch (...) { @@ -131,11 +123,11 @@ void LocalOverlayStore::queryRealisationUncached(const DrvOutput & drvOutput, }}); } - bool LocalOverlayStore::isValidPathUncached(const StorePath & path) { auto res = LocalStore::isValidPathUncached(path); - if (res) return res; + if (res) + return res; res = lowerStore->isValidPath(path); if (res) { // Get path info from lower store so upper DB genuinely has it. @@ -149,20 +141,17 @@ bool LocalOverlayStore::isValidPathUncached(const StorePath & path) return res; } - void LocalOverlayStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { LocalStore::queryReferrers(path, referrers); lowerStore->queryReferrers(path, referrers); } - void LocalOverlayStore::queryGCReferrers(const StorePath & path, StorePathSet & referrers) { LocalStore::queryReferrers(path, referrers); } - StorePathSet LocalOverlayStore::queryValidDerivers(const StorePath & path) { auto res = LocalStore::queryValidDerivers(path); @@ -171,7 +160,6 @@ StorePathSet LocalOverlayStore::queryValidDerivers(const StorePath & path) return res; } - std::optional LocalOverlayStore::queryPathFromHashPart(const std::string & hashPart) { auto res = LocalStore::queryPathFromHashPart(hashPart); @@ -181,7 +169,6 @@ std::optional LocalOverlayStore::queryPathFromHashPart(const std::str return lowerStore->queryPathFromHashPart(hashPart); } - void LocalOverlayStore::registerValidPaths(const ValidPathInfos & infos) { // First, get any from lower store so we merge @@ -200,7 +187,6 @@ void LocalOverlayStore::registerValidPaths(const ValidPathInfos & infos) LocalStore::registerValidPaths(infos); } - void LocalOverlayStore::collectGarbage(const GCOptions & options, GCResults & results) { LocalStore::collectGarbage(options, results); @@ -208,7 +194,6 @@ void LocalOverlayStore::collectGarbage(const GCOptions & options, GCResults & re remountIfNecessary(); } - void LocalOverlayStore::deleteStorePath(const Path & path, uint64_t & bytesFreed) { auto mergedDir = config->realStoreDir.get() + "/"; @@ -236,7 +221,6 @@ void LocalOverlayStore::deleteStorePath(const Path & path, uint64_t & bytesFreed } } - void LocalOverlayStore::optimiseStore() { Activity act(*logger, actOptimiseStore); @@ -261,7 +245,6 @@ void LocalOverlayStore::optimiseStore() remountIfNecessary(); } - LocalStore::VerificationResult LocalOverlayStore::verifyAllValidPaths(RepairFlag repair) { StorePathSet done; @@ -282,10 +265,10 @@ LocalStore::VerificationResult LocalOverlayStore::verifyAllValidPaths(RepairFlag }; } - void LocalOverlayStore::remountIfNecessary() { - if (!_remountRequired) return; + if (!_remountRequired) + return; if (config->remountHook.get().empty()) { warn("'%s' needs remounting, set remount-hook to do this automatically", config->realStoreDir.get()); @@ -296,7 +279,6 @@ void LocalOverlayStore::remountIfNecessary() _remountRequired = false; } - static RegisterStoreImplementation regLocalOverlayStore; -} +} // namespace nix diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 0d2d96e61..49c499e3f 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -37,17 +37,17 @@ #include #ifndef _WIN32 -# include +# include #endif #ifdef __linux__ -# include -# include -# include +# include +# include +# include #endif #ifdef __CYGWIN__ -# include +# include #endif #include @@ -58,13 +58,9 @@ #include "store-config-private.hh" - namespace nix { -LocalStoreConfig::LocalStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params) +LocalStoreConfig::LocalStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) : StoreConfig(params) , LocalFSStoreConfig(authority, params) { @@ -73,18 +69,15 @@ LocalStoreConfig::LocalStoreConfig( std::string LocalStoreConfig::doc() { return - #include "local-store.md" +#include "local-store.md" ; } Path LocalBuildStoreConfig::getBuildDir() const { - return - settings.buildDir.get().has_value() - ? *settings.buildDir.get() - : buildDir.get().has_value() - ? *buildDir.get() - : stateDir.get() + "/builds"; + return settings.buildDir.get().has_value() ? *settings.buildDir.get() + : buildDir.get().has_value() ? *buildDir.get() + : stateDir.get() + "/builds"; } ref LocalStore::Config::openStore() const @@ -92,7 +85,8 @@ ref LocalStore::Config::openStore() const return make_ref(ref{shared_from_this()}); } -struct LocalStore::State::Stmts { +struct LocalStore::State::Stmts +{ /* Some precompiled SQLite statements. */ SQLiteStmt RegisterValidPath; SQLiteStmt UpdatePathInfo; @@ -164,7 +158,8 @@ LocalStore::LocalStore(ref config) struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str()); if (!gr) - printError("warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup); + printError( + "warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup); else if (!config->readOnly) { struct stat st; if (stat(config->realStoreDir.get().c_str(), &st)) @@ -187,9 +182,9 @@ LocalStore::LocalStore(ref config) while (path != root) { if (std::filesystem::is_symlink(path)) throw Error( - "the path '%1%' is a symlink; " - "this is not allowed for the Nix store and its parent directories", - path); + "the path '%1%' is a symlink; " + "this is not allowed for the Nix store and its parent directories", + path); path = path.parent_path(); } } @@ -200,14 +195,15 @@ LocalStore::LocalStore(ref config) before doing a garbage collection. */ try { struct stat st; - if (stat(reservedPath.c_str(), &st) == -1 || - st.st_size != settings.reservedSize) - { - AutoCloseFD fd = toDescriptor(open(reservedPath.c_str(), O_WRONLY | O_CREAT + if (stat(reservedPath.c_str(), &st) == -1 || st.st_size != settings.reservedSize) { + AutoCloseFD fd = toDescriptor(open( + reservedPath.c_str(), + O_WRONLY | O_CREAT #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - , 0600)); + , + 0600)); int res = -1; #if HAVE_POSIX_FALLOCATE res = posix_fallocate(fd.get(), 0, settings.reservedSize); @@ -245,14 +241,13 @@ LocalStore::LocalStore(ref config) if (config->readOnly && curSchema < nixSchemaVersion) { debug("current schema version: %d", curSchema); debug("supported schema version: %d", nixSchemaVersion); - throw Error(curSchema == 0 ? - "database does not exist, and cannot be created in read-only mode" : - "database schema needs migrating, but this cannot be done in read-only mode"); + throw Error( + curSchema == 0 ? "database does not exist, and cannot be created in read-only mode" + : "database schema needs migrating, but this cannot be done in read-only mode"); } if (curSchema > nixSchemaVersion) - throw Error("current Nix store schema is version %1%, but I only support %2%", - curSchema, nixSchemaVersion); + throw Error("current Nix store schema is version %1%, but I only support %2%", curSchema, nixSchemaVersion); else if (curSchema == 0) { /* new store */ curSchema = nixSchemaVersion; @@ -275,7 +270,8 @@ LocalStore::LocalStore(ref config) if (!lockFile(globalLock.get(), ltWrite, false)) { printInfo("waiting for exclusive access to the Nix store..."); - lockFile(globalLock.get(), ltNone, false); // We have acquired a shared lock; release it to prevent deadlocks + lockFile( + globalLock.get(), ltNone, false); // We have acquired a shared lock; release it to prevent deadlocks lockFile(globalLock.get(), ltWrite, true); } @@ -313,44 +309,46 @@ LocalStore::LocalStore(ref config) lockFile(globalLock.get(), ltRead, true); } - else openDB(*state, false); + else + openDB(*state, false); upgradeDBSchema(*state); /* Prepare SQL statements. */ - state->stmts->RegisterValidPath.create(state->db, + state->stmts->RegisterValidPath.create( + state->db, "insert into ValidPaths (path, hash, registrationTime, deriver, narSize, ultimate, sigs, ca) values (?, ?, ?, ?, ?, ?, ?, ?);"); - state->stmts->UpdatePathInfo.create(state->db, - "update ValidPaths set narSize = ?, hash = ?, ultimate = ?, sigs = ?, ca = ? where path = ?;"); - state->stmts->AddReference.create(state->db, - "insert or replace into Refs (referrer, reference) values (?, ?);"); - state->stmts->QueryPathInfo.create(state->db, + state->stmts->UpdatePathInfo.create( + state->db, "update ValidPaths set narSize = ?, hash = ?, ultimate = ?, sigs = ?, ca = ? where path = ?;"); + state->stmts->AddReference.create(state->db, "insert or replace into Refs (referrer, reference) values (?, ?);"); + state->stmts->QueryPathInfo.create( + state->db, "select id, hash, registrationTime, deriver, narSize, ultimate, sigs, ca from ValidPaths where path = ?;"); - state->stmts->QueryReferences.create(state->db, - "select path from Refs join ValidPaths on reference = id where referrer = ?;"); - state->stmts->QueryReferrers.create(state->db, + state->stmts->QueryReferences.create( + state->db, "select path from Refs join ValidPaths on reference = id where referrer = ?;"); + state->stmts->QueryReferrers.create( + state->db, "select path from Refs join ValidPaths on referrer = id where reference = (select id from ValidPaths where path = ?);"); - state->stmts->InvalidatePath.create(state->db, - "delete from ValidPaths where path = ?;"); - state->stmts->AddDerivationOutput.create(state->db, - "insert or replace into DerivationOutputs (drv, id, path) values (?, ?, ?);"); - state->stmts->QueryValidDerivers.create(state->db, - "select v.id, v.path from DerivationOutputs d join ValidPaths v on d.drv = v.id where d.path = ?;"); - state->stmts->QueryDerivationOutputs.create(state->db, - "select id, path from DerivationOutputs where drv = ?;"); + state->stmts->InvalidatePath.create(state->db, "delete from ValidPaths where path = ?;"); + state->stmts->AddDerivationOutput.create( + state->db, "insert or replace into DerivationOutputs (drv, id, path) values (?, ?, ?);"); + state->stmts->QueryValidDerivers.create( + state->db, "select v.id, v.path from DerivationOutputs d join ValidPaths v on d.drv = v.id where d.path = ?;"); + state->stmts->QueryDerivationOutputs.create(state->db, "select id, path from DerivationOutputs where drv = ?;"); // Use "path >= ?" with limit 1 rather than "path like '?%'" to // ensure efficient lookup. - state->stmts->QueryPathFromHashPart.create(state->db, - "select path from ValidPaths where path >= ? limit 1;"); + state->stmts->QueryPathFromHashPart.create(state->db, "select path from ValidPaths where path >= ? limit 1;"); state->stmts->QueryValidPaths.create(state->db, "select path from ValidPaths"); if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - state->stmts->RegisterRealisedOutput.create(state->db, + state->stmts->RegisterRealisedOutput.create( + state->db, R"( insert into Realisations (drvPath, outputName, outputPath, signatures) values (?, ?, (select id from ValidPaths where path = ?), ?) ; )"); - state->stmts->UpdateRealisedOutput.create(state->db, + state->stmts->UpdateRealisedOutput.create( + state->db, R"( update Realisations set signatures = ? @@ -359,27 +357,31 @@ LocalStore::LocalStore(ref config) outputName = ? ; )"); - state->stmts->QueryRealisedOutput.create(state->db, + state->stmts->QueryRealisedOutput.create( + state->db, R"( select Realisations.id, Output.path, Realisations.signatures from Realisations inner join ValidPaths as Output on Output.id = Realisations.outputPath where drvPath = ? and outputName = ? ; )"); - state->stmts->QueryAllRealisedOutputs.create(state->db, + state->stmts->QueryAllRealisedOutputs.create( + state->db, R"( select outputName, Output.path from Realisations inner join ValidPaths as Output on Output.id = Realisations.outputPath where drvPath = ? ; )"); - state->stmts->QueryRealisationReferences.create(state->db, + state->stmts->QueryRealisationReferences.create( + state->db, R"( select drvPath, outputName from Realisations join RealisationsRefs on realisationReference = Realisations.id where referrer = ?; )"); - state->stmts->AddRealisationReference.create(state->db, + state->stmts->AddRealisationReference.create( + state->db, R"( insert or replace into RealisationsRefs (referrer, realisationReference) values ( @@ -389,27 +391,27 @@ LocalStore::LocalStore(ref config) } } - AutoCloseFD LocalStore::openGCLock() { Path fnGCLock = config->stateDir + "/gc.lock"; - auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT + auto fdGCLock = open( + fnGCLock.c_str(), + O_RDWR | O_CREAT #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - , 0600); + , + 0600); if (!fdGCLock) throw SysError("opening global GC lock '%1%'", fnGCLock); return toDescriptor(fdGCLock); } - void LocalStore::deleteStorePath(const Path & path, uint64_t & bytesFreed) { deletePath(path, bytesFreed); } - LocalStore::~LocalStore() { std::shared_future future; @@ -436,13 +438,11 @@ LocalStore::~LocalStore() } } - std::string LocalStore::getUri() { return "local"; } - int LocalStore::getSchema() { int curSchema = 0; @@ -469,8 +469,8 @@ void LocalStore::openDB(State & state, bool create) std::string dbPath = dbDir + "/db.sqlite"; auto & db(state.db); auto openMode = config->readOnly ? SQLiteOpenMode::Immutable - : create ? SQLiteOpenMode::Normal - : SQLiteOpenMode::NoCreate; + : create ? SQLiteOpenMode::Normal + : SQLiteOpenMode::NoCreate; state.db = SQLite(dbPath, openMode); #ifdef __CYGWIN__ @@ -504,8 +504,8 @@ void LocalStore::openDB(State & state, bool create) SQLiteError::throw_(db, "querying journal mode"); prevMode = std::string((const char *) sqlite3_column_text(stmt, 0)); } - if (prevMode != mode && - sqlite3_exec(db, ("pragma main.journal_mode = " + mode + ";").c_str(), 0, 0, 0) != SQLITE_OK) + if (prevMode != mode + && sqlite3_exec(db, ("pragma main.journal_mode = " + mode + ";").c_str(), 0, 0, 0) != SQLITE_OK) SQLiteError::throw_(db, "setting journal mode"); if (mode == "wal") { @@ -536,7 +536,6 @@ void LocalStore::openDB(State & state, bool create) } } - void LocalStore::upgradeDBSchema(State & state) { state.db.exec("create table if not exists SchemaMigrations (migration text primary key not null);"); @@ -551,8 +550,7 @@ void LocalStore::upgradeDBSchema(State & state) schemaMigrations.insert(useQuerySchemaMigrations.getStr(0)); } - auto doUpgrade = [&](const std::string & migrationName, const std::string & stmt) - { + auto doUpgrade = [&](const std::string & migrationName, const std::string & stmt) { if (schemaMigrations.contains(migrationName)) return; @@ -568,17 +566,17 @@ void LocalStore::upgradeDBSchema(State & state) if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) doUpgrade( "20220326-ca-derivations", - #include "ca-specific-schema.sql.gen.hh" - ); +#include "ca-specific-schema.sql.gen.hh" + ); } - /* To improve purity, users may want to make the Nix store a read-only bind mount. So make the Nix store writable for this process. */ void LocalStore::makeStoreWritable() { #ifdef __linux__ - if (!isRootUser()) return; + if (!isRootUser()) + return; /* Check if /nix/store is on a read-only mount. */ struct statvfs stat; if (statvfs(config->realStoreDir.get().c_str(), &stat) != 0) @@ -591,14 +589,14 @@ void LocalStore::makeStoreWritable() #endif } - void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) { experimentalFeatureSettings.require(Xp::CaDerivations); if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info)) registerDrvOutput(info); else - throw Error("cannot register realisation '%s' because it lacks a signature by a trusted key", info.outPath.to_string()); + throw Error( + "cannot register realisation '%s' because it lacks a signature by a trusted key", info.outPath.to_string()); } void LocalStore::registerDrvOutput(const Realisation & info) @@ -609,84 +607,68 @@ void LocalStore::registerDrvOutput(const Realisation & info) if (auto oldR = queryRealisation_(*state, info.id)) { if (info.isCompatibleWith(*oldR)) { auto combinedSignatures = oldR->signatures; - combinedSignatures.insert(info.signatures.begin(), - info.signatures.end()); - state->stmts->UpdateRealisedOutput.use() - (concatStringsSep(" ", combinedSignatures)) - (info.id.strHash()) - (info.id.outputName) + combinedSignatures.insert(info.signatures.begin(), info.signatures.end()); + state->stmts->UpdateRealisedOutput + .use()(concatStringsSep(" ", combinedSignatures))(info.id.strHash())(info.id.outputName) .exec(); } else { - throw Error("Trying to register a realisation of '%s', but we already " - "have another one locally.\n" - "Local: %s\n" - "Remote: %s", + throw Error( + "Trying to register a realisation of '%s', but we already " + "have another one locally.\n" + "Local: %s\n" + "Remote: %s", info.id.to_string(), printStorePath(oldR->outPath), - printStorePath(info.outPath) - ); + printStorePath(info.outPath)); } } else { - state->stmts->RegisterRealisedOutput.use() - (info.id.strHash()) - (info.id.outputName) - (printStorePath(info.outPath)) - (concatStringsSep(" ", info.signatures)) + state->stmts->RegisterRealisedOutput + .use()(info.id.strHash())(info.id.outputName)(printStorePath(info.outPath))( + concatStringsSep(" ", info.signatures)) .exec(); } for (auto & [outputId, depPath] : info.dependentRealisations) { auto localRealisation = queryRealisationCore_(*state, outputId); if (!localRealisation) - throw Error("unable to register the derivation '%s' as it " - "depends on the non existent '%s'", - info.id.to_string(), outputId.to_string()); + throw Error( + "unable to register the derivation '%s' as it " + "depends on the non existent '%s'", + info.id.to_string(), + outputId.to_string()); if (localRealisation->second.outPath != depPath) - throw Error("unable to register the derivation '%s' as it " - "depends on a realisation of '%s' that doesn’t" - "match what we have locally", - info.id.to_string(), outputId.to_string()); - state->stmts->AddRealisationReference.use() - (info.id.strHash()) - (info.id.outputName) - (outputId.strHash()) - (outputId.outputName) + throw Error( + "unable to register the derivation '%s' as it " + "depends on a realisation of '%s' that doesn’t" + "match what we have locally", + info.id.to_string(), + outputId.to_string()); + state->stmts->AddRealisationReference + .use()(info.id.strHash())(info.id.outputName)(outputId.strHash())(outputId.outputName) .exec(); } }); } void LocalStore::cacheDrvOutputMapping( - State & state, - const uint64_t deriver, - const std::string & outputName, - const StorePath & output) + State & state, const uint64_t deriver, const std::string & outputName, const StorePath & output) { - retrySQLite([&]() { - state.stmts->AddDerivationOutput.use() - (deriver) - (outputName) - (printStorePath(output)) - .exec(); - }); + retrySQLite( + [&]() { state.stmts->AddDerivationOutput.use()(deriver)(outputName) (printStorePath(output)).exec(); }); } - -uint64_t LocalStore::addValidPath(State & state, - const ValidPathInfo & info, bool checkOutputs) +uint64_t LocalStore::addValidPath(State & state, const ValidPathInfo & info, bool checkOutputs) { if (info.ca.has_value() && !info.isContentAddressed(*this)) - throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", + throw Error( + "cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", printStorePath(info.path)); - state.stmts->RegisterValidPath.use() - (printStorePath(info.path)) - (info.narHash.to_string(HashFormat::Base16, true)) - (info.registrationTime == 0 ? time(0) : info.registrationTime) - (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) - (info.narSize, info.narSize != 0) - (info.ultimate ? 1 : 0, info.ultimate) - (concatStringsSep(" ", info.sigs), !info.sigs.empty()) - (renderContentAddress(info.ca), (bool) info.ca) + state.stmts->RegisterValidPath + .use()(printStorePath(info.path))(info.narHash.to_string(HashFormat::Base16, true))( + info.registrationTime == 0 ? time(0) : info.registrationTime)( + info.deriver ? printStorePath(*info.deriver) : "", + (bool) info.deriver)(info.narSize, info.narSize != 0)(info.ultimate ? 1 : 0, info.ultimate)( + concatStringsSep(" ", info.sigs), !info.sigs.empty())(renderContentAddress(info.ca), (bool) info.ca) .exec(); uint64_t id = state.db.getLastInsertedRowId(); @@ -702,7 +684,8 @@ uint64_t LocalStore::addValidPath(State & state, derivations). Note that if this throws an error, then the DB transaction is rolled back, so the path validity registration above is undone. */ - if (checkOutputs) drv.checkInvariants(*this, info.path); + if (checkOutputs) + drv.checkInvariants(*this, info.path); for (auto & i : drv.outputsAndOptPaths(*this)) { /* Floating CA derivations have indeterminate output paths until @@ -714,16 +697,16 @@ uint64_t LocalStore::addValidPath(State & state, { auto state_(Store::state.lock()); - state_->pathInfoCache.upsert(std::string(info.path.to_string()), - PathInfoCacheValue{ .value = std::make_shared(info) }); + state_->pathInfoCache.upsert( + std::string(info.path.to_string()), + PathInfoCacheValue{.value = std::make_shared(info)}); } return id; } - -void LocalStore::queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept +void LocalStore::queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept { try { callback(retrySQLite>([&]() { @@ -731,10 +714,11 @@ void LocalStore::queryPathInfoUncached(const StorePath & path, return queryPathInfoInternal(*state, path); })); - } catch (...) { callback.rethrow(); } + } catch (...) { + callback.rethrow(); + } } - std::shared_ptr LocalStore::queryPathInfoInternal(State & state, const StorePath & path) { /* Get the path info. */ @@ -759,7 +743,8 @@ std::shared_ptr LocalStore::queryPathInfoInternal(State & s info->registrationTime = useQueryPathInfo.getInt(2); auto s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 3); - if (s) info->deriver = parseStorePath(s); + if (s) + info->deriver = parseStorePath(s); /* Note that narSize = NULL yields 0. */ info->narSize = useQueryPathInfo.getInt(4); @@ -767,10 +752,12 @@ std::shared_ptr LocalStore::queryPathInfoInternal(State & s info->ultimate = useQueryPathInfo.getInt(5) == 1; s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 6); - if (s) info->sigs = tokenizeString(s, " "); + if (s) + info->sigs = tokenizeString(s, " "); s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 7); - if (s) info->ca = ContentAddress::parseOpt(s); + if (s) + info->ca = ContentAddress::parseOpt(s); /* Get the references. */ auto useQueryReferences(state.stmts->QueryReferences.use()(info->id)); @@ -781,21 +768,16 @@ std::shared_ptr LocalStore::queryPathInfoInternal(State & s return info; } - /* Update path info in the database. */ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info) { - state.stmts->UpdatePathInfo.use() - (info.narSize, info.narSize != 0) - (info.narHash.to_string(HashFormat::Base16, true)) - (info.ultimate ? 1 : 0, info.ultimate) - (concatStringsSep(" ", info.sigs), !info.sigs.empty()) - (renderContentAddress(info.ca), (bool) info.ca) - (printStorePath(info.path)) + state.stmts->UpdatePathInfo + .use()(info.narSize, info.narSize != 0)(info.narHash.to_string(HashFormat::Base16, true))( + info.ultimate ? 1 : 0, info.ultimate)(concatStringsSep(" ", info.sigs), !info.sigs.empty())( + renderContentAddress(info.ca), (bool) info.ca)(printStorePath(info.path)) .exec(); } - uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path) { auto use(state.stmts->QueryPathInfo.use()(printStorePath(path))); @@ -804,13 +786,11 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path) return use.getInt(0); } - bool LocalStore::isValidPath_(State & state, const StorePath & path) { return state.stmts->QueryPathInfo.use()(printStorePath(path)).next(); } - bool LocalStore::isValidPathUncached(const StorePath & path) { return retrySQLite([&]() { @@ -819,28 +799,27 @@ bool LocalStore::isValidPathUncached(const StorePath & path) }); } - StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { StorePathSet res; for (auto & i : paths) - if (isValidPath(i)) res.insert(i); + if (isValidPath(i)) + res.insert(i); return res; } - StorePathSet LocalStore::queryAllValidPaths() { return retrySQLite([&]() { auto state(_state.lock()); auto use(state->stmts->QueryValidPaths.use()); StorePathSet res; - while (use.next()) res.insert(parseStorePath(use.getStr(0))); + while (use.next()) + res.insert(parseStorePath(use.getStr(0))); return res; }); } - void LocalStore::queryReferrers(State & state, const StorePath & path, StorePathSet & referrers) { auto useQueryReferrers(state.stmts->QueryReferrers.use()(printStorePath(path))); @@ -849,7 +828,6 @@ void LocalStore::queryReferrers(State & state, const StorePath & path, StorePath referrers.insert(parseStorePath(useQueryReferrers.getStr(0))); } - void LocalStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { return retrySQLite([&]() { @@ -858,7 +836,6 @@ void LocalStore::queryReferrers(const StorePath & path, StorePathSet & referrers }); } - StorePathSet LocalStore::queryValidDerivers(const StorePath & path) { return retrySQLite([&]() { @@ -874,7 +851,6 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path) }); } - std::map> LocalStore::queryStaticPartialDerivationOutputMap(const StorePath & path) { @@ -885,8 +861,7 @@ LocalStore::queryStaticPartialDerivationOutputMap(const StorePath & path) drvId = queryValidPathId(*state, path); auto use(state->stmts->QueryDerivationOutputs.use()(drvId)); while (use.next()) - outputs.insert_or_assign( - use.getStr(0), parseStorePath(use.getStr(1))); + outputs.insert_or_assign(use.getStr(0), parseStorePath(use.getStr(1))); return outputs; }); @@ -894,7 +869,8 @@ LocalStore::queryStaticPartialDerivationOutputMap(const StorePath & path) std::optional LocalStore::queryPathFromHashPart(const std::string & hashPart) { - if (hashPart.size() != StorePath::HashLen) throw Error("invalid hash part"); + if (hashPart.size() != StorePath::HashLen) + throw Error("invalid hash part"); Path prefix = storeDir + "/" + hashPart; @@ -903,7 +879,8 @@ std::optional LocalStore::queryPathFromHashPart(const std::string & h auto useQueryPathFromHashPart(state->stmts->QueryPathFromHashPart.use()(prefix)); - if (!useQueryPathFromHashPart.next()) return {}; + if (!useQueryPathFromHashPart.next()) + return {}; const char * s = (const char *) sqlite3_column_text(state->stmts->QueryPathFromHashPart, 0); if (s && prefix.compare(0, prefix.size(), s, prefix.size()) == 0) @@ -912,10 +889,10 @@ std::optional LocalStore::queryPathFromHashPart(const std::string & h }); } - StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths) { - if (!settings.useSubstitutes) return StorePathSet(); + if (!settings.useSubstitutes) + return StorePathSet(); StorePathSet remaining; for (auto & i : paths) @@ -924,9 +901,12 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths) StorePathSet res; for (auto & sub : getDefaultSubstituters()) { - if (remaining.empty()) break; - if (sub->storeDir != storeDir) continue; - if (!sub->config.wantMassQuery) continue; + if (remaining.empty()) + break; + if (sub->storeDir != storeDir) + continue; + if (!sub->config.wantMassQuery) + continue; auto valid = sub->queryValidPaths(remaining); @@ -943,13 +923,11 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths) return res; } - void LocalStore::registerValidPath(const ValidPathInfo & info) { registerValidPaths({{info.path, info}}); } - void LocalStore::registerValidPaths(const ValidPathInfos & infos) { #ifndef _WIN32 @@ -957,7 +935,8 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) be fsync-ed. So some may want to fsync them before registering the validity, at the expense of some speed of the path registering operation. */ - if (settings.syncBeforeRegistering) sync(); + if (settings.syncBeforeRegistering) + sync(); #endif return retrySQLite([&]() { @@ -994,23 +973,21 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) error if a cycle is detected and roll back the transaction. Cycles can only occur when a derivation has multiple outputs. */ - topoSort(paths, + topoSort( + paths, {[&](const StorePath & path) { auto i = infos.find(path); return i == infos.end() ? StorePathSet() : i->second.references; }}, {[&](const StorePath & path, const StorePath & parent) { return BuildError( - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); + "cycle detected in the references of '%s' from '%s'", printStorePath(path), printStorePath(parent)); }}); txn.commit(); }); } - /* Invalidate a path. The caller is responsible for checking that there are no referrers. */ void LocalStore::invalidatePath(State & state, const StorePath & path) @@ -1046,8 +1023,7 @@ bool LocalStore::realisationIsUntrusted(const Realisation & realisation) return config->requireSigs && !realisation.checkSignatures(getPublicKeys()); } -void LocalStore::addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) +void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { if (checkSigs && pathInfoIsUntrusted(info)) throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path)); @@ -1089,7 +1065,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, of the NAR. */ HashSink hashSink(HashAlgorithm::SHA256); - TeeSource wrapperSource { source, hashSink }; + TeeSource wrapperSource{source, hashSink}; narRead = true; restorePath(realPath, wrapperSource, settings.fsyncStorePaths); @@ -1097,27 +1073,32 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, auto hashResult = hashSink.finish(); if (hashResult.first != info.narHash) - throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true)); + throw Error( + "hash mismatch importing path '%s';\n specified: %s\n got: %s", + printStorePath(info.path), + info.narHash.to_string(HashFormat::Nix32, true), + hashResult.first.to_string(HashFormat::Nix32, true)); if (hashResult.second != info.narSize) - throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narSize, hashResult.second); + throw Error( + "size mismatch importing path '%s';\n specified: %s\n got: %s", + printStorePath(info.path), + info.narSize, + hashResult.second); if (info.ca) { auto & specified = *info.ca; auto actualHash = ({ auto accessor = getFSAccessor(false); - CanonPath path { info.path.to_string() }; - Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ + CanonPath path{info.path.to_string()}; + Hash h{HashAlgorithm::SHA256}; // throwaway def to appease C++ auto fim = specified.method.getFileIngestionMethod(); switch (fim) { case FileIngestionMethod::Flat: - case FileIngestionMethod::NixArchive: - { - HashModuloSink caSink { + case FileIngestionMethod::NixArchive: { + HashModuloSink caSink{ specified.hash.algo, - std::string { info.path.hashPart() }, + std::string{info.path.hashPart()}, }; dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim); h = caSink.finish().first; @@ -1127,13 +1108,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, h = git::dumpHash(specified.hash.algo, {accessor, path}).hash; break; } - ContentAddress { + ContentAddress{ .method = specified.method, .hash = std::move(h), }; }); if (specified.hash != actualHash.hash) { - throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", + throw Error( + "ca hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), specified.hash.to_string(HashFormat::Nix32, true), actualHash.hash.to_string(HashFormat::Nix32, true)); @@ -1162,7 +1144,6 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, checkInterrupt(); } - StorePath LocalStore::addToStoreFromDump( Source & source0, std::string_view name, @@ -1174,7 +1155,7 @@ StorePath LocalStore::addToStoreFromDump( { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); - TeeSource source { source0, *hashSink }; + TeeSource source{source0, *hashSink}; /* Read the source path into memory, but only if it's up to narBufferSize bytes. If it's larger, write it to a temporary @@ -1184,9 +1165,14 @@ StorePath LocalStore::addToStoreFromDump( path. */ bool inMemory = false; - struct Free { - void operator()(void* v) { free(v); } + struct Free + { + void operator()(void * v) + { + free(v); + } }; + std::unique_ptr dumpBuffer(nullptr); std::string_view dump; @@ -1199,14 +1185,12 @@ StorePath LocalStore::addToStoreFromDump( auto want = std::min(chunkSize, settings.narBufferSize - oldSize); if (auto tmp = realloc(dumpBuffer.get(), oldSize + want)) { dumpBuffer.release(); - dumpBuffer.reset((char*) tmp); + dumpBuffer.reset((char *) tmp); } else { throw std::bad_alloc(); } auto got = 0; - Finally cleanup([&]() { - dump = {dumpBuffer.get(), dump.size() + got}; - }); + Finally cleanup([&]() { dump = {dumpBuffer.get(), dump.size() + got}; }); try { got = source.read(dumpBuffer.get() + oldSize, want); } catch (EndOfFile &) { @@ -1228,8 +1212,8 @@ StorePath LocalStore::addToStoreFromDump( if (!inMemoryAndDontNeedRestore) { /* Drain what we pulled so far, and then keep on pulling */ - StringSource dumpSource { dump }; - ChainSource bothSource { dumpSource, source }; + StringSource dumpSource{dump}; + ChainSource bothSource{dumpSource, source}; std::tie(tempDir, tempDirFd) = createTempDirInStore(); delTempDir = std::make_unique(tempDir); @@ -1247,9 +1231,8 @@ StorePath LocalStore::addToStoreFromDump( hashMethod, methodsMatch ? dumpHash - : hashPath( - PosixSourceAccessor::createAtRoot(tempPath), - hashMethod.getFileIngestionMethod(), hashAlgo).first, + : hashPath(PosixSourceAccessor::createAtRoot(tempPath), hashMethod.getFileIngestionMethod(), hashAlgo) + .first, { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus @@ -1276,7 +1259,7 @@ StorePath LocalStore::addToStoreFromDump( autoGC(); if (inMemoryAndDontNeedRestore) { - StringSource dumpSource { dump }; + StringSource dumpSource{dump}; /* Restore from the buffer in memory. */ auto fim = hashMethod.getFileIngestionMethod(); switch (fim) { @@ -1296,9 +1279,9 @@ StorePath LocalStore::addToStoreFromDump( /* For computing the nar hash. In recursive SHA-256 mode, this is the same as the store hash, so no need to do it again. */ - auto narHash = std::pair { dumpHash, size }; + auto narHash = std::pair{dumpHash, size}; if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) { - HashSink narSink { HashAlgorithm::SHA256 }; + HashSink narSink{HashAlgorithm::SHA256}; dumpPath(realPath, narSink); narHash = narSink.finish(); } @@ -1312,12 +1295,7 @@ StorePath LocalStore::addToStoreFromDump( syncParent(realPath); } - ValidPathInfo info { - *this, - name, - std::move(desc), - narHash.first - }; + ValidPathInfo info{*this, name, std::move(desc), narHash.first}; info.narSize = narHash.second; registerValidPath(info); } @@ -1328,7 +1306,6 @@ StorePath LocalStore::addToStoreFromDump( return dstPath; } - /* Create a temporary directory in the store that won't be garbage-collected until the returned FD is closed. */ std::pair LocalStore::createTempDirInStore() @@ -1350,7 +1327,6 @@ std::pair LocalStore::createTempDirInStore() return {tmpDirFn, std::move(tmpDirFd)}; } - void LocalStore::invalidatePathChecked(const StorePath & path) { retrySQLite([&]() { @@ -1359,11 +1335,12 @@ void LocalStore::invalidatePathChecked(const StorePath & path) SQLiteTxn txn(state->db); if (isValidPath_(*state, path)) { - StorePathSet referrers; queryReferrers(*state, path, referrers); + StorePathSet referrers; + queryReferrers(*state, path, referrers); referrers.erase(path); /* ignore self-references */ if (!referrers.empty()) - throw PathInUse("cannot delete path '%s' because it is in use by %s", - printStorePath(path), showPaths(referrers)); + throw PathInUse( + "cannot delete path '%s' because it is in use by %s", printStorePath(path), showPaths(referrers)); invalidatePath(*state, path); } @@ -1371,7 +1348,6 @@ void LocalStore::invalidatePathChecked(const StorePath & path) }); } - bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) { printInfo("reading the Nix store..."); @@ -1394,11 +1370,12 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printMsg(lvlTalkative, "checking contents of '%s'", name); PosixSourceAccessor accessor; std::string hash = hashPath( - PosixSourceAccessor::createAtRoot(link.path()), - FileIngestionMethod::NixArchive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false); + PosixSourceAccessor::createAtRoot(link.path()), + FileIngestionMethod::NixArchive, + HashAlgorithm::SHA256) + .first.to_string(HashFormat::Nix32, false); if (hash != name.string()) { - printError("link '%s' was modified! expected hash '%s', got '%s'", - link.path(), name, hash); + printError("link '%s' was modified! expected hash '%s', got '%s'", link.path(), name, hash); if (repair) { std::filesystem::remove(link.path()); printInfo("removed link '%s'", link.path()); @@ -1414,7 +1391,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & i : validPaths) { try { - auto info = std::const_pointer_cast(std::shared_ptr(queryPathInfo(i))); + auto info = + std::const_pointer_cast(std::shared_ptr(queryPathInfo(i))); /* Check the content hash (optionally - slow). */ printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); @@ -1425,9 +1403,15 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) auto current = hashSink.finish(); if (info->narHash != nullHash && info->narHash != current.first) { - printError("path '%s' was modified! expected hash '%s', got '%s'", - printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true)); - if (repair) repairPath(i); else errors = true; + printError( + "path '%s' was modified! expected hash '%s', got '%s'", + printStorePath(i), + info->narHash.to_string(HashFormat::Nix32, true), + current.first.to_string(HashFormat::Nix32, true)); + if (repair) + repairPath(i); + else + errors = true; } else { bool update = false; @@ -1450,7 +1434,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) auto state(_state.lock()); updatePathInfo(*state, *info); } - } } catch (Error & e) { @@ -1468,7 +1451,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) return errors; } - LocalStore::VerificationResult LocalStore::verifyAllValidPaths(RepairFlag repair) { StorePathSet storePathsInStoreDir; @@ -1485,7 +1467,8 @@ LocalStore::VerificationResult LocalStore::verifyAllValidPaths(RepairFlag repair checkInterrupt(); try { storePathsInStoreDir.insert({i.path().filename().string()}); - } catch (BadStorePath &) { } + } catch (BadStorePath &) { + } } /* Check whether all valid paths actually exist. */ @@ -1493,9 +1476,7 @@ LocalStore::VerificationResult LocalStore::verifyAllValidPaths(RepairFlag repair StorePathSet done; - auto existsInStoreDir = [&](const StorePath & storePath) { - return storePathsInStoreDir.count(storePath); - }; + auto existsInStoreDir = [&](const StorePath & storePath) { return storePathsInStoreDir.count(storePath); }; bool errors = false; StorePathSet validPaths; @@ -1509,19 +1490,25 @@ LocalStore::VerificationResult LocalStore::verifyAllValidPaths(RepairFlag repair }; } - -void LocalStore::verifyPath(const StorePath & path, std::function existsInStoreDir, - StorePathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors) +void LocalStore::verifyPath( + const StorePath & path, + std::function existsInStoreDir, + StorePathSet & done, + StorePathSet & validPaths, + RepairFlag repair, + bool & errors) { checkInterrupt(); - if (!done.insert(path).second) return; + if (!done.insert(path).second) + return; if (!existsInStoreDir(path)) { /* Check any referrers first. If we can invalidate them first, then we can invalidate this path as well. */ bool canInvalidate = true; - StorePathSet referrers; queryReferrers(path, referrers); + StorePathSet referrers; + queryReferrers(path, referrers); for (auto & i : referrers) if (i != path) { verifyPath(i, existsInStoreDir, done, validPaths, repair, errors); @@ -1544,7 +1531,8 @@ void LocalStore::verifyPath(const StorePath & path, std::function LocalStore::isTrustedClient() return Trusted; } - void LocalStore::vacuumDB() { auto state(_state.lock()); state->db.exec("vacuum"); } - void LocalStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { retrySQLite([&]() { @@ -1589,35 +1574,26 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si }); } - -std::optional> LocalStore::queryRealisationCore_( - LocalStore::State & state, - const DrvOutput & id) +std::optional> +LocalStore::queryRealisationCore_(LocalStore::State & state, const DrvOutput & id) { - auto useQueryRealisedOutput( - state.stmts->QueryRealisedOutput.use() - (id.strHash()) - (id.outputName)); + auto useQueryRealisedOutput(state.stmts->QueryRealisedOutput.use()(id.strHash())(id.outputName)); if (!useQueryRealisedOutput.next()) return std::nullopt; auto realisationDbId = useQueryRealisedOutput.getInt(0); auto outputPath = parseStorePath(useQueryRealisedOutput.getStr(1)); - auto signatures = - tokenizeString(useQueryRealisedOutput.getStr(2)); + auto signatures = tokenizeString(useQueryRealisedOutput.getStr(2)); - return {{ - realisationDbId, - Realisation{ - .id = id, - .outPath = outputPath, - .signatures = signatures, - } - }}; + return { + {realisationDbId, + Realisation{ + .id = id, + .outPath = outputPath, + .signatures = signatures, + }}}; } -std::optional LocalStore::queryRealisation_( - LocalStore::State & state, - const DrvOutput & id) +std::optional LocalStore::queryRealisation_(LocalStore::State & state, const DrvOutput & id) { auto maybeCore = queryRealisationCore_(state, id); if (!maybeCore) @@ -1625,11 +1601,9 @@ std::optional LocalStore::queryRealisation_( auto [realisationDbId, res] = *maybeCore; std::map dependentRealisations; - auto useRealisationRefs( - state.stmts->QueryRealisationReferences.use() - (realisationDbId)); + auto useRealisationRefs(state.stmts->QueryRealisationReferences.use()(realisationDbId)); while (useRealisationRefs.next()) { - auto depId = DrvOutput { + auto depId = DrvOutput{ Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)), useRealisationRefs.getStr(1), }; @@ -1641,21 +1615,19 @@ std::optional LocalStore::queryRealisation_( res.dependentRealisations = dependentRealisations; - return { res }; + return {res}; } -void LocalStore::queryRealisationUncached(const DrvOutput & id, - Callback> callback) noexcept +void LocalStore::queryRealisationUncached( + const DrvOutput & id, Callback> callback) noexcept { try { - auto maybeRealisation - = retrySQLite>([&]() { - auto state(_state.lock()); - return queryRealisation_(*state, id); - }); + auto maybeRealisation = retrySQLite>([&]() { + auto state(_state.lock()); + return queryRealisation_(*state, id); + }); if (maybeRealisation) - callback( - std::make_shared(maybeRealisation.value())); + callback(std::make_shared(maybeRealisation.value())); else callback(nullptr); @@ -1672,7 +1644,8 @@ void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) auto logPath = fmt("%s/%s/%s/%s.bz2", config->logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2)); - if (pathExists(logPath)) return; + if (pathExists(logPath)) + return; createDirs(dirOf(logPath)); @@ -1690,4 +1663,4 @@ std::optional LocalStore::getVersion() static RegisterStoreImplementation regLocalStore; -} // namespace nix +} // namespace nix diff --git a/src/libstore/log-store.cc b/src/libstore/log-store.cc index 2ef791e19..fd03bb30e 100644 --- a/src/libstore/log-store.cc +++ b/src/libstore/log-store.cc @@ -2,11 +2,12 @@ namespace nix { -std::optional LogStore::getBuildLog(const StorePath & path) { +std::optional LogStore::getBuildLog(const StorePath & path) +{ auto maybePath = getBuildDerivationPath(path); if (!maybePath) return std::nullopt; return getBuildLogExact(maybePath.value()); } -} +} // namespace nix diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 483b337bf..4ae5cd206 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -14,29 +14,24 @@ Machine::Machine( decltype(speedFactor) speedFactor, decltype(supportedFeatures) supportedFeatures, decltype(mandatoryFeatures) mandatoryFeatures, - decltype(sshPublicHostKey) sshPublicHostKey) : - storeUri(StoreReference::parse( - // Backwards compatibility: if the URI is schemeless, is not a path, - // and is not one of the special store connection words, prepend - // ssh://. - storeUri.find("://") != std::string::npos - || storeUri.find("/") != std::string::npos - || storeUri == "auto" - || storeUri == "daemon" - || storeUri == "local" - || hasPrefix(storeUri, "auto?") - || hasPrefix(storeUri, "daemon?") - || hasPrefix(storeUri, "local?") - || hasPrefix(storeUri, "?") - ? storeUri - : "ssh://" + storeUri)), - systemTypes(systemTypes), - sshKey(sshKey), - maxJobs(maxJobs), - speedFactor(speedFactor == 0.0f ? 1.0f : speedFactor), - supportedFeatures(supportedFeatures), - mandatoryFeatures(mandatoryFeatures), - sshPublicHostKey(sshPublicHostKey) + decltype(sshPublicHostKey) sshPublicHostKey) + : storeUri( + StoreReference::parse( + // Backwards compatibility: if the URI is schemeless, is not a path, + // and is not one of the special store connection words, prepend + // ssh://. + storeUri.find("://") != std::string::npos || storeUri.find("/") != std::string::npos || storeUri == "auto" + || storeUri == "daemon" || storeUri == "local" || hasPrefix(storeUri, "auto?") + || hasPrefix(storeUri, "daemon?") || hasPrefix(storeUri, "local?") || hasPrefix(storeUri, "?") + ? storeUri + : "ssh://" + storeUri)) + , systemTypes(systemTypes) + , sshKey(sshKey) + , maxJobs(maxJobs) + , speedFactor(speedFactor == 0.0f ? 1.0f : speedFactor) + , supportedFeatures(supportedFeatures) + , mandatoryFeatures(mandatoryFeatures) + , sshPublicHostKey(sshPublicHostKey) { if (speedFactor < 0.0) throw UsageError("speed factor must be >= 0"); @@ -49,19 +44,16 @@ bool Machine::systemSupported(const std::string & system) const bool Machine::allSupported(const StringSet & features) const { - return std::all_of(features.begin(), features.end(), - [&](const std::string & feature) { - return supportedFeatures.count(feature) || - mandatoryFeatures.count(feature); - }); + return std::all_of(features.begin(), features.end(), [&](const std::string & feature) { + return supportedFeatures.count(feature) || mandatoryFeatures.count(feature); + }); } bool Machine::mandatoryMet(const StringSet & features) const { - return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(), - [&](const std::string & feature) { - return features.count(feature); - }); + return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(), [&](const std::string & feature) { + return features.count(feature); + }); } StoreReference Machine::completeStoreReference() const @@ -86,7 +78,8 @@ StoreReference Machine::completeStoreReference() const auto & fs = storeUri.params["system-features"]; auto append = [&](auto feats) { for (auto & f : feats) { - if (fs.size() > 0) fs += ' '; + if (fs.size() > 0) + fs += ' '; fs += f; } }; @@ -145,7 +138,10 @@ static Machine parseBuilderLine(const StringSet & defaultSystems, const std::str auto parseUnsignedIntField = [&](size_t fieldIndex) { const auto result = string2Int(tokens[fieldIndex]); if (!result) { - throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'unsigned int'", fieldIndex, line); + throw FormatError( + "bad machine specification: failed to convert column #%lu in a row: '%s' to 'unsigned int'", + fieldIndex, + line); } return result.value(); }; @@ -153,7 +149,8 @@ static Machine parseBuilderLine(const StringSet & defaultSystems, const std::str auto parseFloatField = [&](size_t fieldIndex) { const auto result = string2Float(tokens[fieldIndex]); if (!result) { - throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line); + throw FormatError( + "bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line); } return result.value(); }; @@ -170,7 +167,8 @@ static Machine parseBuilderLine(const StringSet & defaultSystems, const std::str }; if (!isSet(0)) - throw FormatError("bad machine specification: store URL was not found at the first column of a row: '%s'", line); + throw FormatError( + "bad machine specification: store URL was not found at the first column of a row: '%s'", line); // TODO use designated initializers, once C++ supports those with // custom constructors. @@ -190,16 +188,15 @@ static Machine parseBuilderLine(const StringSet & defaultSystems, const std::str // `mandatoryFeatures` isSet(6) ? tokenizeString(tokens[6], ",") : StringSet{}, // `sshPublicHostKey` - isSet(7) ? ensureBase64(7) : "" - }; + isSet(7) ? ensureBase64(7) : ""}; } static Machines parseBuilderLines(const StringSet & defaultSystems, const std::vector & builders) { Machines result; - std::transform( - builders.begin(), builders.end(), std::back_inserter(result), - [&](auto && line) { return parseBuilderLine(defaultSystems, line); }); + std::transform(builders.begin(), builders.end(), std::back_inserter(result), [&](auto && line) { + return parseBuilderLine(defaultSystems, line); + }); return result; } @@ -214,4 +211,4 @@ Machines getMachines() return Machine::parseConfig({settings.thisSystem}, settings.builders); } -} +} // namespace nix diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index 606d72866..2de18fe83 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -3,10 +3,7 @@ namespace nix { -std::map makeContentAddressed( - Store & srcStore, - Store & dstStore, - const StorePathSet & storePaths) +std::map makeContentAddressed(Store & srcStore, Store & dstStore, const StorePathSet & storePaths) { StorePathSet closure; srcStore.computeFSClosure(storePaths, closure); @@ -48,10 +45,10 @@ std::map makeContentAddressed( auto narModuloHash = hashModuloSink.finish().first; - ValidPathInfo info { + ValidPathInfo info{ dstStore, path.name(), - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = narModuloHash, .references = std::move(refs), @@ -78,15 +75,12 @@ std::map makeContentAddressed( return remappings; } -StorePath makeContentAddressed( - Store & srcStore, - Store & dstStore, - const StorePath & fromPath) +StorePath makeContentAddressed(Store & srcStore, Store & dstStore, const StorePath & fromPath) { - auto remappings = makeContentAddressed(srcStore, dstStore, StorePathSet { fromPath }); + auto remappings = makeContentAddressed(srcStore, dstStore, StorePathSet{fromPath}); auto i = remappings.find(fromPath); assert(i != remappings.end()); return i->second; } -} +} // namespace nix diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 7c97dbc57..7492204ce 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -15,41 +15,43 @@ namespace nix { -void Store::computeFSClosure(const StorePathSet & startPaths, - StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) +void Store::computeFSClosure( + const StorePathSet & startPaths, + StorePathSet & paths_, + bool flipDirection, + bool includeOutputs, + bool includeDerivers) { std::function(const StorePath & path, std::future> &)> queryDeps; if (flipDirection) - queryDeps = [&](const StorePath& path, - std::future> & fut) { + queryDeps = [&](const StorePath & path, std::future> & fut) { StorePathSet res; StorePathSet referrers; queryReferrers(path, referrers); - for (auto& ref : referrers) + for (auto & ref : referrers) if (ref != path) res.insert(ref); if (includeOutputs) - for (auto& i : queryValidDerivers(path)) + for (auto & i : queryValidDerivers(path)) res.insert(i); if (includeDerivers && path.isDerivation()) - for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path)) + for (auto & [_, maybeOutPath] : queryPartialDerivationOutputMap(path)) if (maybeOutPath && isValidPath(*maybeOutPath)) res.insert(*maybeOutPath); return res; }; else - queryDeps = [&](const StorePath& path, - std::future> & fut) { + queryDeps = [&](const StorePath & path, std::future> & fut) { StorePathSet res; auto info = fut.get(); - for (auto& ref : info->references) + for (auto & ref : info->references) if (ref != path) res.insert(ref); if (includeOutputs && path.isDerivation()) - for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path)) + for (auto & [_, maybeOutPath] : queryPartialDerivationOutputMap(path)) if (maybeOutPath && isValidPath(*maybeOutPath)) res.insert(*maybeOutPath); @@ -59,34 +61,31 @@ void Store::computeFSClosure(const StorePathSet & startPaths, }; computeClosure( - startPaths, paths_, - [&](const StorePath& path, - std::function>&)> - processEdges) { + startPaths, + paths_, + [&](const StorePath & path, std::function> &)> processEdges) { std::promise> promise; - std::function>)> - getDependencies = - [&](std::future> fut) { - try { - promise.set_value(queryDeps(path, fut)); - } catch (...) { - promise.set_exception(std::current_exception()); - } - }; + std::function>)> getDependencies = + [&](std::future> fut) { + try { + promise.set_value(queryDeps(path, fut)); + } catch (...) { + promise.set_exception(std::current_exception()); + } + }; queryPathInfo(path, getDependencies); processEdges(promise); }); } -void Store::computeFSClosure(const StorePath & startPath, - StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) +void Store::computeFSClosure( + const StorePath & startPath, StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) { StorePathSet paths; paths.insert(startPath); computeFSClosure(paths, paths_, flipDirection, includeOutputs, includeDerivers); } - const ContentAddress * getDerivationCA(const BasicDerivation & drv) { auto out = drv.outputs.find("out"); @@ -116,7 +115,11 @@ MissingPaths Store::queryMissing(const std::vector & targets) size_t left; bool done = false; StorePathSet outPaths; - DrvState(size_t left) : left(left) { } + + DrvState(size_t left) + : left(left) + { + } }; Sync state_; @@ -127,11 +130,9 @@ MissingPaths Store::queryMissing(const std::vector & targets) enqueueDerivedPaths = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) - pool.enqueue(std::bind(doPath, DerivedPath::Built { inputDrv, inputNode.value })); + pool.enqueue(std::bind(doPath, DerivedPath::Built{inputDrv, inputNode.value})); for (const auto & [outputName, childNode] : inputNode.childMap) - enqueueDerivedPaths( - make_ref(SingleDerivedPath::Built { inputDrv, outputName }), - childNode); + enqueueDerivedPaths(make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); }; auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { @@ -145,155 +146,161 @@ MissingPaths Store::queryMissing(const std::vector & targets) } }; - auto checkOutput = [&]( - const StorePath & drvPath, ref drv, const StorePath & outPath, ref> drvState_) - { - if (drvState_->lock()->done) return; - - SubstitutablePathInfos infos; - auto * cap = getDerivationCA(*drv); - querySubstitutablePathInfos({ - { - outPath, - cap ? std::optional { *cap } : std::nullopt, - }, - }, infos); - - if (infos.empty()) { - drvState_->lock()->done = true; - mustBuildDrv(drvPath, *drv); - } else { - { - auto drvState(drvState_->lock()); - if (drvState->done) return; - assert(drvState->left); - drvState->left--; - drvState->outPaths.insert(outPath); - if (!drvState->left) { - for (auto & path : drvState->outPaths) - pool.enqueue(std::bind(doPath, DerivedPath::Opaque { path } )); - } - } - } - }; - - doPath = [&](const DerivedPath & req) { - - { - auto state(state_.lock()); - if (!state->done.insert(req.to_string(*this)).second) return; - } - - std::visit(overloaded { - [&](const DerivedPath::Built & bfd) { - auto drvPathP = std::get_if(&*bfd.drvPath); - if (!drvPathP) { - // TODO make work in this case. - warn("Ignoring dynamic derivation %s while querying missing paths; not yet implemented", bfd.drvPath->to_string(*this)); + auto checkOutput = + [&](const StorePath & drvPath, ref drv, const StorePath & outPath, ref> drvState_) { + if (drvState_->lock()->done) return; - } - auto & drvPath = drvPathP->path; - - if (!isValidPath(drvPath)) { - // FIXME: we could try to substitute the derivation. - auto state(state_.lock()); - state->res.unknown.insert(drvPath); - return; - } - - StorePathSet invalid; - /* true for regular derivations, and CA derivations for which we - have a trust mapping for all wanted outputs. */ - auto knownOutputPaths = true; - for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(drvPath)) { - if (!pathOpt) { - knownOutputPaths = false; - break; - } - if (bfd.outputs.contains(outputName) && !isValidPath(*pathOpt)) - invalid.insert(*pathOpt); - } - if (knownOutputPaths && invalid.empty()) return; - - auto drv = make_ref(derivationFromPath(drvPath)); - auto parsedDrv = StructuredAttrs::tryParse(drv->env); - DerivationOptions drvOptions; - try { - // FIXME: this is a lot of work just to get the value - // of `allowSubstitutes`. - drvOptions = DerivationOptions::fromStructuredAttrs( - drv->env, - parsedDrv ? &*parsedDrv : nullptr); - } catch (Error & e) { - e.addTrace({}, "while parsing derivation '%s'", printStorePath(drvPath)); - throw; - } - - if (!knownOutputPaths && settings.useSubstitutes && drvOptions.substitutesAllowed()) { - experimentalFeatureSettings.require(Xp::CaDerivations); - - // If there are unknown output paths, attempt to find if the - // paths are known to substituters through a realisation. - auto outputHashes = staticOutputHashes(*this, *drv); - knownOutputPaths = true; - - for (auto [outputName, hash] : outputHashes) { - if (!bfd.outputs.contains(outputName)) - continue; - - bool found = false; - for (auto &sub : getDefaultSubstituters()) { - auto realisation = sub->queryRealisation({hash, outputName}); - if (!realisation) - continue; - found = true; - if (!isValidPath(realisation->outPath)) - invalid.insert(realisation->outPath); - break; - } - if (!found) { - // Some paths did not have a realisation, this must be built. - knownOutputPaths = false; - break; - } - } - } - - if (knownOutputPaths && settings.useSubstitutes && drvOptions.substitutesAllowed()) { - auto drvState = make_ref>(DrvState(invalid.size())); - for (auto & output : invalid) - pool.enqueue(std::bind(checkOutput, drvPath, drv, output, drvState)); - } else - mustBuildDrv(drvPath, *drv); - - }, - [&](const DerivedPath::Opaque & bo) { - - if (isValidPath(bo.path)) return; SubstitutablePathInfos infos; - querySubstitutablePathInfos({{bo.path, std::nullopt}}, infos); + auto * cap = getDerivationCA(*drv); + querySubstitutablePathInfos( + { + { + outPath, + cap ? std::optional{*cap} : std::nullopt, + }, + }, + infos); if (infos.empty()) { - auto state(state_.lock()); - state->res.unknown.insert(bo.path); + drvState_->lock()->done = true; + mustBuildDrv(drvPath, *drv); + } else { + { + auto drvState(drvState_->lock()); + if (drvState->done) + return; + assert(drvState->left); + drvState->left--; + drvState->outPaths.insert(outPath); + if (!drvState->left) { + for (auto & path : drvState->outPaths) + pool.enqueue(std::bind(doPath, DerivedPath::Opaque{path})); + } + } + } + }; + + doPath = [&](const DerivedPath & req) { + { + auto state(state_.lock()); + if (!state->done.insert(req.to_string(*this)).second) return; - } + } - auto info = infos.find(bo.path); - assert(info != infos.end()); + std::visit( + overloaded{ + [&](const DerivedPath::Built & bfd) { + auto drvPathP = std::get_if(&*bfd.drvPath); + if (!drvPathP) { + // TODO make work in this case. + warn( + "Ignoring dynamic derivation %s while querying missing paths; not yet implemented", + bfd.drvPath->to_string(*this)); + return; + } + auto & drvPath = drvPathP->path; - { - auto state(state_.lock()); - state->res.willSubstitute.insert(bo.path); - state->res.downloadSize += info->second.downloadSize; - state->res.narSize += info->second.narSize; - } + if (!isValidPath(drvPath)) { + // FIXME: we could try to substitute the derivation. + auto state(state_.lock()); + state->res.unknown.insert(drvPath); + return; + } - for (auto & ref : info->second.references) - pool.enqueue(std::bind(doPath, DerivedPath::Opaque { ref })); - }, - }, req.raw()); + StorePathSet invalid; + /* true for regular derivations, and CA derivations for which we + have a trust mapping for all wanted outputs. */ + auto knownOutputPaths = true; + for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(drvPath)) { + if (!pathOpt) { + knownOutputPaths = false; + break; + } + if (bfd.outputs.contains(outputName) && !isValidPath(*pathOpt)) + invalid.insert(*pathOpt); + } + if (knownOutputPaths && invalid.empty()) + return; + + auto drv = make_ref(derivationFromPath(drvPath)); + auto parsedDrv = StructuredAttrs::tryParse(drv->env); + DerivationOptions drvOptions; + try { + // FIXME: this is a lot of work just to get the value + // of `allowSubstitutes`. + drvOptions = + DerivationOptions::fromStructuredAttrs(drv->env, parsedDrv ? &*parsedDrv : nullptr); + } catch (Error & e) { + e.addTrace({}, "while parsing derivation '%s'", printStorePath(drvPath)); + throw; + } + + if (!knownOutputPaths && settings.useSubstitutes && drvOptions.substitutesAllowed()) { + experimentalFeatureSettings.require(Xp::CaDerivations); + + // If there are unknown output paths, attempt to find if the + // paths are known to substituters through a realisation. + auto outputHashes = staticOutputHashes(*this, *drv); + knownOutputPaths = true; + + for (auto [outputName, hash] : outputHashes) { + if (!bfd.outputs.contains(outputName)) + continue; + + bool found = false; + for (auto & sub : getDefaultSubstituters()) { + auto realisation = sub->queryRealisation({hash, outputName}); + if (!realisation) + continue; + found = true; + if (!isValidPath(realisation->outPath)) + invalid.insert(realisation->outPath); + break; + } + if (!found) { + // Some paths did not have a realisation, this must be built. + knownOutputPaths = false; + break; + } + } + } + + if (knownOutputPaths && settings.useSubstitutes && drvOptions.substitutesAllowed()) { + auto drvState = make_ref>(DrvState(invalid.size())); + for (auto & output : invalid) + pool.enqueue(std::bind(checkOutput, drvPath, drv, output, drvState)); + } else + mustBuildDrv(drvPath, *drv); + }, + [&](const DerivedPath::Opaque & bo) { + if (isValidPath(bo.path)) + return; + + SubstitutablePathInfos infos; + querySubstitutablePathInfos({{bo.path, std::nullopt}}, infos); + + if (infos.empty()) { + auto state(state_.lock()); + state->res.unknown.insert(bo.path); + return; + } + + auto info = infos.find(bo.path); + assert(info != infos.end()); + + { + auto state(state_.lock()); + state->res.willSubstitute.insert(bo.path); + state->res.downloadSize += info->second.downloadSize; + state->res.narSize += info->second.narSize; + } + + for (auto & ref : info->second.references) + pool.enqueue(std::bind(doPath, DerivedPath::Opaque{ref})); + }, + }, + req.raw()); }; for (auto & path : targets) @@ -304,10 +311,10 @@ MissingPaths Store::queryMissing(const std::vector & targets) return std::move(state_.lock()->res); } - StorePaths Store::topoSortPaths(const StorePathSet & paths) { - return topoSort(paths, + return topoSort( + paths, {[&](const StorePath & path) { try { return queryPathInfo(path)->references; @@ -317,15 +324,12 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths) }}, {[&](const StorePath & path, const StorePath & parent) { return BuildError( - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); + "cycle detected in the references of '%s' from '%s'", printStorePath(path), printStorePath(parent)); }}); } -std::map drvOutputReferences( - const std::set & inputRealisations, - const StorePathSet & pathReferences) +std::map +drvOutputReferences(const std::set & inputRealisations, const StorePathSet & pathReferences) { std::map res; @@ -338,11 +342,8 @@ std::map drvOutputReferences( return res; } -std::map drvOutputReferences( - Store & store, - const Derivation & drv, - const StorePath & outputPath, - Store * evalStore_) +std::map +drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore_) { auto & evalStore = evalStore_ ? *evalStore_ : store; @@ -352,27 +353,23 @@ std::map drvOutputReferences( accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) { - auto outputHashes = - staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv)); + auto outputHashes = staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv)); for (const auto & outputName : inputNode.value) { auto outputHash = get(outputHashes, outputName); if (!outputHash) throw Error( - "output '%s' of derivation '%s' isn't realised", outputName, - store.printStorePath(inputDrv)); - auto thisRealisation = store.queryRealisation( - DrvOutput{*outputHash, outputName}); + "output '%s' of derivation '%s' isn't realised", outputName, store.printStorePath(inputDrv)); + auto thisRealisation = store.queryRealisation(DrvOutput{*outputHash, outputName}); if (!thisRealisation) throw Error( - "output '%s' of derivation '%s' isn’t built", outputName, - store.printStorePath(inputDrv)); + "output '%s' of derivation '%s' isn’t built", outputName, store.printStorePath(inputDrv)); inputRealisations.insert(*thisRealisation); } } if (!inputNode.value.empty()) { auto d = makeConstantStorePathRef(inputDrv); for (const auto & [outputName, childNode] : inputNode.childMap) { - SingleDerivedPath next = SingleDerivedPath::Built { d, outputName }; + SingleDerivedPath next = SingleDerivedPath::Built{d, outputName}; accumRealisations( // TODO deep resolutions for dynamic derivations, issue #8947, would go here. resolveDerivedPath(store, next, evalStore_), @@ -395,25 +392,28 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, auto outputsOpt_ = store.queryPartialDerivationOutputMap(drvPath, evalStore_); - auto outputsOpt = std::visit(overloaded { - [&](const OutputsSpec::All &) { - // Keep all outputs - return std::move(outputsOpt_); + auto outputsOpt = std::visit( + overloaded{ + [&](const OutputsSpec::All &) { + // Keep all outputs + return std::move(outputsOpt_); + }, + [&](const OutputsSpec::Names & names) { + // Get just those mentioned by name + std::map> outputsOpt; + for (auto & output : names) { + auto * pOutputPathOpt = get(outputsOpt_, output); + if (!pOutputPathOpt) + throw Error( + "the derivation '%s' doesn't have an output named '%s'", + bfd.drvPath->to_string(store), + output); + outputsOpt.insert_or_assign(output, std::move(*pOutputPathOpt)); + } + return outputsOpt; + }, }, - [&](const OutputsSpec::Names & names) { - // Get just those mentioned by name - std::map> outputsOpt; - for (auto & output : names) { - auto * pOutputPathOpt = get(outputsOpt_, output); - if (!pOutputPathOpt) - throw Error( - "the derivation '%s' doesn't have an output named '%s'", - bfd.drvPath->to_string(store), output); - outputsOpt.insert_or_assign(output, std::move(*pOutputPathOpt)); - } - return outputsOpt; - }, - }, bfd.outputs.raw); + bfd.outputs.raw); OutputPathMap outputs; for (auto & [outputName, outputPathOpt] : outputsOpt) { @@ -425,42 +425,40 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, return outputs; } - StorePath resolveDerivedPath(Store & store, const SingleDerivedPath & req, Store * evalStore_) { auto & evalStore = evalStore_ ? *evalStore_ : store; - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & bo) { - return bo.path; + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & bo) { return bo.path; }, + [&](const SingleDerivedPath::Built & bfd) { + auto drvPath = resolveDerivedPath(store, *bfd.drvPath, evalStore_); + auto outputPaths = evalStore.queryPartialDerivationOutputMap(drvPath, evalStore_); + if (outputPaths.count(bfd.output) == 0) + throw Error( + "derivation '%s' does not have an output named '%s'", + store.printStorePath(drvPath), + bfd.output); + auto & optPath = outputPaths.at(bfd.output); + if (!optPath) + throw MissingRealisation(bfd.drvPath->to_string(store), bfd.output); + return *optPath; + }, }, - [&](const SingleDerivedPath::Built & bfd) { - auto drvPath = resolveDerivedPath(store, *bfd.drvPath, evalStore_); - auto outputPaths = evalStore.queryPartialDerivationOutputMap(drvPath, evalStore_); - if (outputPaths.count(bfd.output) == 0) - throw Error("derivation '%s' does not have an output named '%s'", - store.printStorePath(drvPath), bfd.output); - auto & optPath = outputPaths.at(bfd.output); - if (!optPath) - throw MissingRealisation(bfd.drvPath->to_string(store), bfd.output); - return *optPath; - }, - }, req.raw()); + req.raw()); } - OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd) { auto drvPath = resolveDerivedPath(store, *bfd.drvPath); auto outputMap = store.queryDerivationOutputMap(drvPath); - auto outputsLeft = std::visit(overloaded { - [&](const OutputsSpec::All &) { - return StringSet {}; + auto outputsLeft = std::visit( + overloaded{ + [&](const OutputsSpec::All &) { return StringSet{}; }, + [&](const OutputsSpec::Names & names) { return static_cast(names); }, }, - [&](const OutputsSpec::Names & names) { - return static_cast(names); - }, - }, bfd.outputs.raw); + bfd.outputs.raw); for (auto iter = outputMap.begin(); iter != outputMap.end();) { auto & outputName = iter->first; if (bfd.outputs.contains(outputName)) { @@ -471,10 +469,11 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd) } } if (!outputsLeft.empty()) - throw Error("derivation '%s' does not have an outputs %s", + throw Error( + "derivation '%s' does not have an outputs %s", store.printStorePath(drvPath), concatStringsSep(", ", quoteStrings(std::get(bfd.outputs.raw)))); return outputMap; } -} +} // namespace nix diff --git a/src/libstore/names.cc b/src/libstore/names.cc index 998b9356a..263007e03 100644 --- a/src/libstore/names.cc +++ b/src/libstore/names.cc @@ -3,28 +3,25 @@ #include - namespace nix { - struct Regex { std::regex regex; }; - DrvName::DrvName() { name = ""; } - /* Parse a derivation name. The `name' part of a derivation name is everything up to but not including the first dash *not* followed by a letter. The `version' part is the rest (excluding the separating dash). E.g., `apache-httpd-2.0.48' is parsed to (`apache-httpd', '2.0.48'). */ -DrvName::DrvName(std::string_view s) : hits(0) +DrvName::DrvName(std::string_view s) + : hits(0) { name = fullName = std::string(s); for (unsigned int i = 0; i < s.size(); ++i) { @@ -37,10 +34,7 @@ DrvName::DrvName(std::string_view s) : hits(0) } } - -DrvName::~DrvName() -{ } - +DrvName::~DrvName() {} bool DrvName::matches(const DrvName & n) { @@ -49,27 +43,30 @@ bool DrvName::matches(const DrvName & n) regex = std::make_unique(); regex->regex = std::regex(name, std::regex::extended); } - if (!std::regex_match(n.name, regex->regex)) return false; + if (!std::regex_match(n.name, regex->regex)) + return false; } - if (version != "" && version != n.version) return false; + if (version != "" && version != n.version) + return false; return true; } - -std::string_view nextComponent(std::string_view::const_iterator & p, - const std::string_view::const_iterator end) +std::string_view nextComponent(std::string_view::const_iterator & p, const std::string_view::const_iterator end) { /* Skip any dots and dashes (component separators). */ - while (p != end && (*p == '.' || *p == '-')) ++p; + while (p != end && (*p == '.' || *p == '-')) + ++p; - if (p == end) return ""; + if (p == end) + return ""; /* If the first character is a digit, consume the longest sequence of digits. Otherwise, consume the longest sequence of non-digit, non-separator characters. */ auto s = p; if (isdigit(*p)) - while (p != end && isdigit(*p)) p++; + while (p != end && isdigit(*p)) + p++; else while (p != end && (!isdigit(*p) && *p != '.' && *p != '-')) p++; @@ -77,23 +74,28 @@ std::string_view nextComponent(std::string_view::const_iterator & p, return {s, size_t(p - s)}; } - static bool componentsLT(const std::string_view c1, const std::string_view c2) { auto n1 = string2Int(c1); auto n2 = string2Int(c2); - if (n1 && n2) return *n1 < *n2; - else if (c1 == "" && n2) return true; - else if (c1 == "pre" && c2 != "pre") return true; - else if (c2 == "pre") return false; + if (n1 && n2) + return *n1 < *n2; + else if (c1 == "" && n2) + return true; + else if (c1 == "pre" && c2 != "pre") + return true; + else if (c2 == "pre") + return false; /* Assume that `2.3a' < `2.3.1'. */ - else if (n2) return true; - else if (n1) return false; - else return c1 < c2; + else if (n2) + return true; + else if (n1) + return false; + else + return c1 < c2; } - std::strong_ordering compareVersions(const std::string_view v1, const std::string_view v2) { auto p1 = v1.begin(); @@ -102,14 +104,15 @@ std::strong_ordering compareVersions(const std::string_view v1, const std::strin while (p1 != v1.end() || p2 != v2.end()) { auto c1 = nextComponent(p1, v1.end()); auto c2 = nextComponent(p2, v2.end()); - if (componentsLT(c1, c2)) return std::strong_ordering::less; - else if (componentsLT(c2, c1)) return std::strong_ordering::greater; + if (componentsLT(c1, c2)) + return std::strong_ordering::less; + else if (componentsLT(c2, c1)) + return std::strong_ordering::greater; } return std::strong_ordering::equal; } - DrvNames drvNamesFromArgs(const Strings & opArgs) { DrvNames result; @@ -118,5 +121,4 @@ DrvNames drvNamesFromArgs(const Strings & opArgs) return result; } - -} +} // namespace nix diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 6aba68a36..63fe774c9 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -29,8 +29,10 @@ private: public: NarMemberConstructor(NarMember & nm, uint64_t & pos) - : narMember(nm), pos(pos) - { } + : narMember(nm) + , pos(pos) + { + } void isExecutable() override { @@ -43,8 +45,7 @@ public: narMember.stat.narOffset = pos; } - void operator () (std::string_view data) override - { } + void operator()(std::string_view data) override {} }; struct NarAccessor : public SourceAccessor @@ -67,18 +68,21 @@ struct NarAccessor : public SourceAccessor uint64_t pos = 0; NarIndexer(NarAccessor & acc, Source & source) - : acc(acc), source(source) - { } + : acc(acc) + , source(source) + { + } NarMember & createMember(const CanonPath & path, NarMember member) { size_t level = 0; for (auto _ : path) { - (void)_; + (void) _; ++level; } - while (parents.size() > level) parents.pop(); + while (parents.size() > level) + parents.pop(); if (parents.empty()) { acc.root = std::move(member); @@ -96,32 +100,23 @@ struct NarAccessor : public SourceAccessor void createDirectory(const CanonPath & path) override { - createMember(path, NarMember{ .stat = { - .type = Type::tDirectory, - .fileSize = 0, - .isExecutable = false, - .narOffset = 0 - } }); + createMember( + path, + NarMember{.stat = {.type = Type::tDirectory, .fileSize = 0, .isExecutable = false, .narOffset = 0}}); } void createRegularFile(const CanonPath & path, std::function func) override { - auto & nm = createMember(path, NarMember{ .stat = { - .type = Type::tRegular, - .fileSize = 0, - .isExecutable = false, - .narOffset = 0 - } }); - NarMemberConstructor nmc { nm, pos }; + auto & nm = createMember( + path, + NarMember{.stat = {.type = Type::tRegular, .fileSize = 0, .isExecutable = false, .narOffset = 0}}); + NarMemberConstructor nmc{nm, pos}; func(nmc); } void createSymlink(const CanonPath & path, const std::string & target) override { - createMember(path, - NarMember{ - .stat = {.type = Type::tSymlink}, - .target = target}); + createMember(path, NarMember{.stat = {.type = Type::tSymlink}, .target = target}); } size_t read(char * data, size_t len) override @@ -132,7 +127,8 @@ struct NarAccessor : public SourceAccessor } }; - NarAccessor(std::string && _nar) : nar(_nar) + NarAccessor(std::string && _nar) + : nar(_nar) { StringSource source(*nar); NarIndexer indexer(*this, source); @@ -157,7 +153,7 @@ struct NarAccessor : public SourceAccessor if (type == "directory") { member.stat = {.type = Type::tDirectory}; - for (const auto &[name, function] : v["entries"].items()) { + for (const auto & [name, function] : v["entries"].items()) { recurse(member.children[name], function); } } else if (type == "regular") { @@ -165,12 +161,12 @@ struct NarAccessor : public SourceAccessor .type = Type::tRegular, .fileSize = v["size"], .isExecutable = v.value("executable", false), - .narOffset = v["narOffset"] - }; + .narOffset = v["narOffset"]}; } else if (type == "symlink") { member.stat = {.type = Type::tSymlink}; member.target = v.value("target", ""); - } else return; + } else + return; }; json v = json::parse(listing); @@ -182,16 +178,19 @@ struct NarAccessor : public SourceAccessor NarMember * current = &root; for (const auto & i : path) { - if (current->stat.type != Type::tDirectory) return nullptr; + if (current->stat.type != Type::tDirectory) + return nullptr; auto child = current->children.find(std::string(i)); - if (child == current->children.end()) return nullptr; + if (child == current->children.end()) + return nullptr; current = &child->second; } return current; } - NarMember & get(const CanonPath & path) { + NarMember & get(const CanonPath & path) + { auto result = find(path); if (!result) throw Error("NAR file does not contain path '%1%'", path); @@ -226,7 +225,8 @@ struct NarAccessor : public SourceAccessor if (i.stat.type != Type::tRegular) throw Error("path '%1%' inside NAR file is not a regular file", path); - if (getNarBytes) return getNarBytes(*i.stat.narOffset, *i.stat.fileSize); + if (getNarBytes) + return getNarBytes(*i.stat.narOffset, *i.stat.fileSize); assert(nar); return std::string(*nar, *i.stat.narOffset, *i.stat.fileSize); @@ -251,13 +251,13 @@ ref makeNarAccessor(Source & source) return make_ref(source); } -ref makeLazyNarAccessor(const std::string & listing, - GetNarBytes getNarBytes) +ref makeLazyNarAccessor(const std::string & listing, GetNarBytes getNarBytes) { return make_ref(listing, getNarBytes); } using nlohmann::json; + json listNar(ref accessor, const CanonPath & path, bool recurse) { auto st = accessor->lstat(path); @@ -278,7 +278,7 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) obj["type"] = "directory"; { obj["entries"] = json::object(); - json &res2 = obj["entries"]; + json & res2 = obj["entries"]; for (const auto & [name, type] : accessor->readDirectory(path)) { if (recurse) { res2[name] = listNar(accessor, path / name, true); @@ -301,4 +301,4 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) return obj; } -} +} // namespace nix diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 5d72ba8ae..0350c874a 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -79,9 +79,8 @@ public: struct State { SQLite db; - SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, - queryNAR, insertRealisation, insertMissingRealisation, - queryRealisation, purgeCache; + SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR, insertRealisation, + insertMissingRealisation, queryRealisation, purgeCache; std::map caches; }; @@ -99,35 +98,42 @@ public: state->db.exec(schema); - state->insertCache.create(state->db, + state->insertCache.create( + state->db, "insert into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?1, ?2, ?3, ?4, ?5) on conflict (url) do update set timestamp = ?2, storeDir = ?3, wantMassQuery = ?4, priority = ?5 returning id;"); - state->queryCache.create(state->db, + state->queryCache.create( + state->db, "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?"); - state->insertNAR.create(state->db, + state->insertNAR.create( + state->db, "insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, " "narSize, refs, deriver, sigs, ca, timestamp, present) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1)"); - state->insertMissingNAR.create(state->db, - "insert or replace into NARs(cache, hashPart, timestamp, present) values (?, ?, ?, 0)"); + state->insertMissingNAR.create( + state->db, "insert or replace into NARs(cache, hashPart, timestamp, present) values (?, ?, ?, 0)"); - state->queryNAR.create(state->db, + state->queryNAR.create( + state->db, "select present, namePart, url, compression, fileHash, fileSize, narHash, narSize, refs, deriver, sigs, ca from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); - state->insertRealisation.create(state->db, + state->insertRealisation.create( + state->db, R"( insert or replace into Realisations(cache, outputId, content, timestamp) values (?, ?, ?, ?) )"); - state->insertMissingRealisation.create(state->db, + state->insertMissingRealisation.create( + state->db, R"( insert or replace into Realisations(cache, outputId, timestamp) values (?, ?, ?) )"); - state->queryRealisation.create(state->db, + state->queryRealisation.create( + state->db, R"( select content from Realisations where cache = ? and outputId = ? and @@ -143,20 +149,21 @@ public: auto queryLastPurge_(queryLastPurge.use()); if (!queryLastPurge_.next() || queryLastPurge_.getInt(0) < now - purgeInterval) { - SQLiteStmt(state->db, + SQLiteStmt( + state->db, "delete from NARs where ((present = 0 and timestamp < ?) or (present = 1 and timestamp < ?))") .use() // Use a minimum TTL to prevent --refresh from // nuking the entire disk cache. - (now - std::max(settings.ttlNegativeNarInfoCache.get(), 3600U)) - (now - std::max(settings.ttlPositiveNarInfoCache.get(), 30 * 24 * 3600U)) + (now - std::max(settings.ttlNegativeNarInfoCache.get(), 3600U))( + now - std::max(settings.ttlPositiveNarInfoCache.get(), 30 * 24 * 3600U)) .exec(); debug("deleted %d entries from the NAR info disk cache", sqlite3_changes(state->db)); - SQLiteStmt(state->db, - "insert or replace into LastPurge(dummy, value) values ('', ?)") - .use()(now).exec(); + SQLiteStmt(state->db, "insert or replace into LastPurge(dummy, value) values ('', ?)") + .use()(now) + .exec(); } }); } @@ -164,7 +171,8 @@ public: Cache & getCache(State & state, const std::string & uri) { auto i = state.caches.find(uri); - if (i == state.caches.end()) unreachable(); + if (i == state.caches.end()) + unreachable(); return i->second; } @@ -177,7 +185,7 @@ private: auto queryCache(state.queryCache.use()(uri)(time(0) - cacheInfoTtl)); if (!queryCache.next()) return std::nullopt; - auto cache = Cache { + auto cache = Cache{ .id = (int) queryCache.getInt(0), .storeDir = queryCache.getStr(1), .wantMassQuery = queryCache.getInt(2) != 0, @@ -202,7 +210,7 @@ public: if (cache) return cache->id; - Cache ret { + Cache ret{ .id = -1, // set below .storeDir = storeDir, .wantMassQuery = wantMassQuery, @@ -210,8 +218,10 @@ public: }; { - auto r(state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority)); - if (!r.next()) { unreachable(); } + auto r(state->insertCache.use()(uri)(time(0))(storeDir) (wantMassQuery) (priority)); + if (!r.next()) { + unreachable(); + } ret.id = (int) r.getInt(0); } @@ -229,94 +239,80 @@ public: auto cache(queryCacheRaw(*state, uri)); if (!cache) return std::nullopt; - return CacheInfo { - .id = cache->id, - .wantMassQuery = cache->wantMassQuery, - .priority = cache->priority - }; + return CacheInfo{.id = cache->id, .wantMassQuery = cache->wantMassQuery, .priority = cache->priority}; }); } - std::pair> lookupNarInfo( - const std::string & uri, const std::string & hashPart) override + std::pair> + lookupNarInfo(const std::string & uri, const std::string & hashPart) override { return retrySQLite>>( [&]() -> std::pair> { - auto state(_state.lock()); + auto state(_state.lock()); - auto & cache(getCache(*state, uri)); + auto & cache(getCache(*state, uri)); - auto now = time(0); + auto now = time(0); - auto queryNAR(state->queryNAR.use() - (cache.id) - (hashPart) - (now - settings.ttlNegativeNarInfoCache) - (now - settings.ttlPositiveNarInfoCache)); + auto queryNAR(state->queryNAR.use()(cache.id)(hashPart) (now - settings.ttlNegativeNarInfoCache)( + now - settings.ttlPositiveNarInfoCache)); - if (!queryNAR.next()) - return {oUnknown, 0}; + if (!queryNAR.next()) + return {oUnknown, 0}; - if (!queryNAR.getInt(0)) - return {oInvalid, 0}; + if (!queryNAR.getInt(0)) + return {oInvalid, 0}; - auto namePart = queryNAR.getStr(1); - auto narInfo = make_ref( - StorePath(hashPart + "-" + namePart), - Hash::parseAnyPrefixed(queryNAR.getStr(6))); - narInfo->url = queryNAR.getStr(2); - narInfo->compression = queryNAR.getStr(3); - if (!queryNAR.isNull(4)) - narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4)); - narInfo->fileSize = queryNAR.getInt(5); - narInfo->narSize = queryNAR.getInt(7); - for (auto & r : tokenizeString(queryNAR.getStr(8), " ")) - narInfo->references.insert(StorePath(r)); - if (!queryNAR.isNull(9)) - narInfo->deriver = StorePath(queryNAR.getStr(9)); - for (auto & sig : tokenizeString(queryNAR.getStr(10), " ")) - narInfo->sigs.insert(sig); - narInfo->ca = ContentAddress::parseOpt(queryNAR.getStr(11)); + auto namePart = queryNAR.getStr(1); + auto narInfo = + make_ref(StorePath(hashPart + "-" + namePart), Hash::parseAnyPrefixed(queryNAR.getStr(6))); + narInfo->url = queryNAR.getStr(2); + narInfo->compression = queryNAR.getStr(3); + if (!queryNAR.isNull(4)) + narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4)); + narInfo->fileSize = queryNAR.getInt(5); + narInfo->narSize = queryNAR.getInt(7); + for (auto & r : tokenizeString(queryNAR.getStr(8), " ")) + narInfo->references.insert(StorePath(r)); + if (!queryNAR.isNull(9)) + narInfo->deriver = StorePath(queryNAR.getStr(9)); + for (auto & sig : tokenizeString(queryNAR.getStr(10), " ")) + narInfo->sigs.insert(sig); + narInfo->ca = ContentAddress::parseOpt(queryNAR.getStr(11)); - return {oValid, narInfo}; - }); + return {oValid, narInfo}; + }); } - std::pair> lookupRealisation( - const std::string & uri, const DrvOutput & id) override + std::pair> + lookupRealisation(const std::string & uri, const DrvOutput & id) override { return retrySQLite>>( [&]() -> std::pair> { - auto state(_state.lock()); + auto state(_state.lock()); - auto & cache(getCache(*state, uri)); + auto & cache(getCache(*state, uri)); - auto now = time(0); + auto now = time(0); - auto queryRealisation(state->queryRealisation.use() - (cache.id) - (id.to_string()) - (now - settings.ttlNegativeNarInfoCache) - (now - settings.ttlPositiveNarInfoCache)); + auto queryRealisation(state->queryRealisation.use()(cache.id)(id.to_string())( + now - settings.ttlNegativeNarInfoCache)(now - settings.ttlPositiveNarInfoCache)); - if (!queryRealisation.next()) - return {oUnknown, 0}; + if (!queryRealisation.next()) + return {oUnknown, 0}; - if (queryRealisation.isNull(0)) - return {oInvalid, 0}; + if (queryRealisation.isNull(0)) + return {oInvalid, 0}; - auto realisation = - std::make_shared(Realisation::fromJSON( - nlohmann::json::parse(queryRealisation.getStr(0)), - "Local disk cache")); + auto realisation = std::make_shared( + Realisation::fromJSON(nlohmann::json::parse(queryRealisation.getStr(0)), "Local disk cache")); - return {oValid, realisation}; - }); + return {oValid, realisation}; + }); } void upsertNarInfo( - const std::string & uri, const std::string & hashPart, - std::shared_ptr info) override + const std::string & uri, const std::string & hashPart, std::shared_ptr info) override { retrySQLite([&]() { auto state(_state.lock()); @@ -327,63 +323,44 @@ public: auto narInfo = std::dynamic_pointer_cast(info); - //assert(hashPart == storePathToHash(info->path)); + // assert(hashPart == storePathToHash(info->path)); - state->insertNAR.use() - (cache.id) - (hashPart) - (std::string(info->path.name())) - (narInfo ? narInfo->url : "", narInfo != 0) - (narInfo ? narInfo->compression : "", narInfo != 0) - (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash) - (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) - (info->narHash.to_string(HashFormat::Nix32, true)) - (info->narSize) - (concatStringsSep(" ", info->shortRefs())) - (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) - (concatStringsSep(" ", info->sigs)) - (renderContentAddress(info->ca)) - (time(0)).exec(); + state->insertNAR + .use()(cache.id)(hashPart) (std::string(info->path.name()))( + narInfo ? narInfo->url : "", narInfo != 0)(narInfo ? narInfo->compression : "", narInfo != 0)( + narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", + narInfo && narInfo->fileHash)( + narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)(info->narHash.to_string( + HashFormat::Nix32, true))(info->narSize)(concatStringsSep(" ", info->shortRefs()))( + info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)( + concatStringsSep(" ", info->sigs))(renderContentAddress(info->ca))(time(0)) + .exec(); } else { - state->insertMissingNAR.use() - (cache.id) - (hashPart) - (time(0)).exec(); + state->insertMissingNAR.use()(cache.id)(hashPart) (time(0)).exec(); } }); } - void upsertRealisation( - const std::string & uri, - const Realisation & realisation) override + void upsertRealisation(const std::string & uri, const Realisation & realisation) override { retrySQLite([&]() { auto state(_state.lock()); auto & cache(getCache(*state, uri)); - state->insertRealisation.use() - (cache.id) - (realisation.id.to_string()) - (realisation.toJSON().dump()) - (time(0)).exec(); + state->insertRealisation.use()(cache.id)(realisation.id.to_string())(realisation.toJSON().dump())(time(0)) + .exec(); }); - } - virtual void upsertAbsentRealisation( - const std::string & uri, - const DrvOutput & id) override + virtual void upsertAbsentRealisation(const std::string & uri, const DrvOutput & id) override { retrySQLite([&]() { auto state(_state.lock()); auto & cache(getCache(*state, uri)); - state->insertMissingRealisation.use() - (cache.id) - (id.to_string()) - (time(0)).exec(); + state->insertMissingRealisation.use()(cache.id)(id.to_string())(time(0)).exec(); }); } }; @@ -399,4 +376,4 @@ ref getTestNarInfoDiskCache(Path dbPath) return make_ref(dbPath); } -} +} // namespace nix diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index ef7af6126..783ec7d34 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -12,7 +12,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & unsigned line = 1; auto corrupt = [&](const char * reason) { - return Error("NAR info file '%1%' is corrupt: %2%", whence, + return Error( + "NAR info file '%1%' is corrupt: %2%", + whence, std::string(reason) + (line > 0 ? " at line " + std::to_string(line) : "")); }; @@ -31,20 +33,21 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & while (pos < s.size()) { size_t colon = s.find(':', pos); - if (colon == s.npos) throw corrupt("expecting ':'"); + if (colon == s.npos) + throw corrupt("expecting ':'"); std::string name(s, pos, colon - pos); size_t eol = s.find('\n', colon + 2); - if (eol == s.npos) throw corrupt("expecting '\\n'"); + if (eol == s.npos) + throw corrupt("expecting '\\n'"); std::string value(s, colon + 2, eol - colon - 2); if (name == "StorePath") { path = store.parseStorePath(value); havePath = true; - } - else if (name == "URL") + } else if (name == "URL") url = value; else if (name == "Compression") compression = value; @@ -52,32 +55,31 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & fileHash = parseHashField(value); else if (name == "FileSize") { auto n = string2Int(value); - if (!n) throw corrupt("invalid FileSize"); + if (!n) + throw corrupt("invalid FileSize"); fileSize = *n; - } - else if (name == "NarHash") { + } else if (name == "NarHash") { narHash = parseHashField(value); haveNarHash = true; - } - else if (name == "NarSize") { + } else if (name == "NarSize") { auto n = string2Int(value); - if (!n) throw corrupt("invalid NarSize"); + if (!n) + throw corrupt("invalid NarSize"); narSize = *n; - } - else if (name == "References") { + } else if (name == "References") { auto refs = tokenizeString(value, " "); - if (!references.empty()) throw corrupt("extra References"); + if (!references.empty()) + throw corrupt("extra References"); for (auto & r : refs) references.insert(StorePath(r)); - } - else if (name == "Deriver") { + } else if (name == "Deriver") { if (value != "unknown-deriver") deriver = StorePath(value); - } - else if (name == "Sig") + } else if (name == "Sig") sigs.insert(value); else if (name == "CA") { - if (ca) throw corrupt("extra CA"); + if (ca) + throw corrupt("extra CA"); // FIXME: allow blank ca or require skipping field? ca = ContentAddress::parseOpt(value); } @@ -86,16 +88,17 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & line += 1; } - if (compression == "") compression = "bzip2"; + if (compression == "") + compression = "bzip2"; if (!havePath || !haveNarHash || url.empty() || narSize == 0) { line = 0; // don't include line information in the error throw corrupt( - !havePath ? "StorePath missing" : - !haveNarHash ? "NarHash missing" : - url.empty() ? "URL missing" : - narSize == 0 ? "NarSize missing or zero" - : "?"); + !havePath ? "StorePath missing" + : !haveNarHash ? "NarHash missing" + : url.empty() ? "URL missing" + : narSize == 0 ? "NarSize missing or zero" + : "?"); } } @@ -127,10 +130,7 @@ std::string NarInfo::to_string(const Store & store) const return res; } -nlohmann::json NarInfo::toJSON( - const Store & store, - bool includeImpureInfo, - HashFormat hashFormat) const +nlohmann::json NarInfo::toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const { using nlohmann::json; @@ -150,19 +150,14 @@ nlohmann::json NarInfo::toJSON( return jsonObject; } -NarInfo NarInfo::fromJSON( - const Store & store, - const StorePath & path, - const nlohmann::json & json) +NarInfo NarInfo::fromJSON(const Store & store, const StorePath & path, const nlohmann::json & json) { using nlohmann::detail::value_t; - NarInfo res { - ValidPathInfo { - path, - UnkeyedValidPathInfo::fromJSON(store, json), - } - }; + NarInfo res{ValidPathInfo{ + path, + UnkeyedValidPathInfo::fromJSON(store, json), + }}; if (json.contains("url")) res.url = getString(valueAt(json, "url")); @@ -171,9 +166,7 @@ NarInfo NarInfo::fromJSON( res.compression = getString(valueAt(json, "compression")); if (json.contains("downloadHash")) - res.fileHash = Hash::parseAny( - getString(valueAt(json, "downloadHash")), - std::nullopt); + res.fileHash = Hash::parseAny(getString(valueAt(json, "downloadHash")), std::nullopt); if (json.contains("downloadSize")) res.fileSize = getUnsigned(valueAt(json, "downloadSize")); @@ -181,4 +174,4 @@ NarInfo NarInfo::fromJSON( return res; } -} +} // namespace nix diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index e47c0707c..8073ee41b 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -17,7 +17,6 @@ namespace nix { - static void makeWritable(const Path & path) { auto st = lstat(path); @@ -25,30 +24,35 @@ static void makeWritable(const Path & path) throw SysError("changing writability of '%1%'", path); } - struct MakeReadOnly { Path path; - MakeReadOnly(const PathView path) : path(path) { } + + MakeReadOnly(const PathView path) + : path(path) + { + } + ~MakeReadOnly() { try { /* This will make the path read-only. */ - if (path != "") canonicaliseTimestampAndPermissions(path); + if (path != "") + canonicaliseTimestampAndPermissions(path); } catch (...) { ignoreExceptionInDestructor(); } } }; - LocalStore::InodeHash LocalStore::loadInodeHash() { debug("loading hash inodes in memory"); InodeHash inodeHash; AutoCloseDir dir(opendir(linksDir.c_str())); - if (!dir) throw SysError("opening directory '%1%'", linksDir); + if (!dir) + throw SysError("opening directory '%1%'", linksDir); struct dirent * dirent; while (errno = 0, dirent = readdir(dir.get())) { /* sic */ @@ -56,20 +60,21 @@ LocalStore::InodeHash LocalStore::loadInodeHash() // We don't care if we hit non-hash files, anything goes inodeHash.insert(dirent->d_ino); } - if (errno) throw SysError("reading directory '%1%'", linksDir); + if (errno) + throw SysError("reading directory '%1%'", linksDir); printMsg(lvlTalkative, "loaded %1% hash inodes", inodeHash.size()); return inodeHash; } - Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash) { Strings names; AutoCloseDir dir(opendir(path.c_str())); - if (!dir) throw SysError("opening directory '%1%'", path); + if (!dir) + throw SysError("opening directory '%1%'", path); struct dirent * dirent; while (errno = 0, dirent = readdir(dir.get())) { /* sic */ @@ -81,17 +86,18 @@ Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHa } std::string name = dirent->d_name; - if (name == "." || name == "..") continue; + if (name == "." || name == "..") + continue; names.push_back(name); } - if (errno) throw SysError("reading directory '%1%'", path); + if (errno) + throw SysError("reading directory '%1%'", path); return names; } - -void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, - const Path & path, InodeHash & inodeHash, RepairFlag repair) +void LocalStore::optimisePath_( + Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash, RepairFlag repair) { checkInterrupt(); @@ -104,8 +110,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, See https://github.com/NixOS/nix/issues/1443 and https://github.com/NixOS/nix/pull/2230 for more discussion. */ - if (std::regex_search(path, std::regex("\\.app/Contents/.+$"))) - { + if (std::regex_search(path, std::regex("\\.app/Contents/.+$"))) { debug("'%1%' is not allowed to be linked in macOS", path); return; } @@ -123,7 +128,8 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, #if CAN_LINK_SYMLINK && !S_ISLNK(st.st_mode) #endif - ) return; + ) + return; /* Sometimes SNAFUs can cause files in the Nix store to be modified, in particular when running programs as root under @@ -152,7 +158,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, Hash hash = ({ hashPath( {make_ref(), CanonPath(path)}, - FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first; + FileSerialisationMethod::NixArchive, + HashAlgorithm::SHA256) + .first; }); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); @@ -162,17 +170,18 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, /* Maybe delete the link, if it has been corrupted. */ if (std::filesystem::exists(std::filesystem::symlink_status(linkPath))) { auto stLink = lstat(linkPath.string()); - if (st.st_size != stLink.st_size - || (repair && hash != ({ - hashPath( - PosixSourceAccessor::createAtRoot(linkPath), - FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256).first; - }))) - { + if (st.st_size != stLink.st_size || (repair && hash != ({ + hashPath( + PosixSourceAccessor::createAtRoot(linkPath), + FileSerialisationMethod::NixArchive, + HashAlgorithm::SHA256) + .first; + }))) { // XXX: Consider overwriting linkPath with our valid version. warn("removing corrupted link %s", linkPath); - warn("There may be more corrupted paths." - "\nYou should run `nix-store --verify --check-contents --repair` to fix them all"); + warn( + "There may be more corrupted paths." + "\nYou should run `nix-store --verify --check-contents --repair` to fix them all"); std::filesystem::remove(linkPath); } } @@ -197,7 +206,8 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, return; } - else throw; + else + throw; } } @@ -217,7 +227,8 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, permissions). */ const Path dirOfPath(dirOf(path)); bool mustToggle = dirOfPath != config->realStoreDir.get(); - if (mustToggle) makeWritable(dirOfPath); + if (mustToggle) + makeWritable(dirOfPath); /* When we're done, make the directory read-only again and reset its timestamp back to 0. */ @@ -245,7 +256,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, std::filesystem::rename(tempLink, path); } catch (std::filesystem::filesystem_error & e) { std::filesystem::remove(tempLink); - printError("unable to unlink '%1%'", tempLink); + printError("unable to unlink '%1%'", tempLink); if (e.code() == std::errc::too_many_links) { /* Some filesystems generate too many links on the rename, rather than on the original link. (Probably it @@ -261,14 +272,16 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, stats.bytesFreed += st.st_size; if (act) - act->result(resFileLinked, st.st_size + act->result( + resFileLinked, + st.st_size #ifndef _WIN32 - , st.st_blocks + , + st.st_blocks #endif - ); + ); } - void LocalStore::optimiseStore(OptimiseStats & stats) { Activity act(*logger, actOptimiseStore); @@ -282,7 +295,8 @@ void LocalStore::optimiseStore(OptimiseStats & stats) for (auto & i : paths) { addTempRoot(i); - if (!isValidPath(i)) continue; /* path was GC'ed, probably */ + if (!isValidPath(i)) + continue; /* path was GC'ed, probably */ { Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i))); optimisePath_(&act, stats, config->realStoreDir + "/" + std::string(i.to_string()), inodeHash, NoRepair); @@ -298,9 +312,7 @@ void LocalStore::optimiseStore() optimiseStore(stats); - printInfo("%s freed by hard-linking %d files", - showBytes(stats.bytesFreed), - stats.filesLinked); + printInfo("%s freed by hard-linking %d files", showBytes(stats.bytesFreed), stats.filesLinked); } void LocalStore::optimisePath(const Path & path, RepairFlag repair) @@ -308,8 +320,8 @@ void LocalStore::optimisePath(const Path & path, RepairFlag repair) OptimiseStats stats; InodeHash inodeHash; - if (settings.autoOptimiseStore) optimisePath_(nullptr, stats, path, inodeHash, repair); + if (settings.autoOptimiseStore) + optimisePath_(nullptr, stats, path, inodeHash, repair); } - -} +} // namespace nix diff --git a/src/libstore/outputs-spec.cc b/src/libstore/outputs-spec.cc index 28fe45de9..7f73c7d35 100644 --- a/src/libstore/outputs-spec.cc +++ b/src/libstore/outputs-spec.cc @@ -11,39 +11,33 @@ namespace nix { bool OutputsSpec::contains(const std::string & outputName) const { - return std::visit(overloaded { - [&](const OutputsSpec::All &) { - return true; + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) { return true; }, + [&](const OutputsSpec::Names & outputNames) { return outputNames.count(outputName) > 0; }, }, - [&](const OutputsSpec::Names & outputNames) { - return outputNames.count(outputName) > 0; - }, - }, raw); + raw); } -static std::string outputSpecRegexStr = - regex::either( - regex::group(R"(\*)"), - regex::group(regex::list(nameRegexStr))); +static std::string outputSpecRegexStr = regex::either(regex::group(R"(\*)"), regex::group(regex::list(nameRegexStr))); std::optional OutputsSpec::parseOpt(std::string_view s) { - static std::regex regex(std::string { outputSpecRegexStr }); + static std::regex regex(std::string{outputSpecRegexStr}); std::cmatch match; if (!std::regex_match(s.cbegin(), s.cend(), match, regex)) return std::nullopt; if (match[1].matched) - return { OutputsSpec::All {} }; + return {OutputsSpec::All{}}; if (match[2].matched) - return OutputsSpec::Names { tokenizeString({match[2].first, match[2].second}, ",") }; + return OutputsSpec::Names{tokenizeString({match[2].first, match[2].second}, ",")}; assert(false); } - OutputsSpec OutputsSpec::parse(std::string_view s) { std::optional spec = parseOpt(s); @@ -52,21 +46,19 @@ OutputsSpec OutputsSpec::parse(std::string_view s) return std::move(*spec); } - std::optional> ExtendedOutputsSpec::parseOpt(std::string_view s) { auto found = s.rfind('^'); if (found == std::string::npos) - return std::pair { s, ExtendedOutputsSpec::Default {} }; + return std::pair{s, ExtendedOutputsSpec::Default{}}; auto specOpt = OutputsSpec::parseOpt(s.substr(found + 1)); if (!specOpt) return std::nullopt; - return std::pair { s.substr(0, found), ExtendedOutputsSpec::Explicit { std::move(*specOpt) } }; + return std::pair{s.substr(0, found), ExtendedOutputsSpec::Explicit{std::move(*specOpt)}}; } - std::pair ExtendedOutputsSpec::parse(std::string_view s) { std::optional spec = parseOpt(s); @@ -75,79 +67,73 @@ std::pair ExtendedOutputsSpec::parse(std: return *spec; } - std::string OutputsSpec::to_string() const { - return std::visit(overloaded { - [&](const OutputsSpec::All &) -> std::string { - return "*"; + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) -> std::string { return "*"; }, + [&](const OutputsSpec::Names & outputNames) -> std::string { return concatStringsSep(",", outputNames); }, }, - [&](const OutputsSpec::Names & outputNames) -> std::string { - return concatStringsSep(",", outputNames); - }, - }, raw); + raw); } - std::string ExtendedOutputsSpec::to_string() const { - return std::visit(overloaded { - [&](const ExtendedOutputsSpec::Default &) -> std::string { - return ""; + return std::visit( + overloaded{ + [&](const ExtendedOutputsSpec::Default &) -> std::string { return ""; }, + [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> std::string { + return "^" + outputSpec.to_string(); + }, }, - [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> std::string { - return "^" + outputSpec.to_string(); - }, - }, raw); + raw); } - OutputsSpec OutputsSpec::union_(const OutputsSpec & that) const { - return std::visit(overloaded { - [&](const OutputsSpec::All &) -> OutputsSpec { - return OutputsSpec::All { }; + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) -> OutputsSpec { return OutputsSpec::All{}; }, + [&](const OutputsSpec::Names & theseNames) -> OutputsSpec { + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) -> OutputsSpec { return OutputsSpec::All{}; }, + [&](const OutputsSpec::Names & thoseNames) -> OutputsSpec { + OutputsSpec::Names ret = theseNames; + ret.insert(thoseNames.begin(), thoseNames.end()); + return ret; + }, + }, + that.raw); + }, }, - [&](const OutputsSpec::Names & theseNames) -> OutputsSpec { - return std::visit(overloaded { - [&](const OutputsSpec::All &) -> OutputsSpec { - return OutputsSpec::All {}; - }, - [&](const OutputsSpec::Names & thoseNames) -> OutputsSpec { - OutputsSpec::Names ret = theseNames; - ret.insert(thoseNames.begin(), thoseNames.end()); - return ret; - }, - }, that.raw); - }, - }, raw); + raw); } - bool OutputsSpec::isSubsetOf(const OutputsSpec & that) const { - return std::visit(overloaded { - [&](const OutputsSpec::All &) { - return true; + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) { return true; }, + [&](const OutputsSpec::Names & thoseNames) { + return std::visit( + overloaded{ + [&](const OutputsSpec::All &) { return false; }, + [&](const OutputsSpec::Names & theseNames) { + bool ret = true; + for (auto & o : theseNames) + if (thoseNames.count(o) == 0) + ret = false; + return ret; + }, + }, + raw); + }, }, - [&](const OutputsSpec::Names & thoseNames) { - return std::visit(overloaded { - [&](const OutputsSpec::All &) { - return false; - }, - [&](const OutputsSpec::Names & theseNames) { - bool ret = true; - for (auto & o : theseNames) - if (thoseNames.count(o) == 0) - ret = false; - return ret; - }, - }, raw); - }, - }, that.raw); + that.raw); } -} +} // namespace nix namespace nlohmann { @@ -159,44 +145,40 @@ OutputsSpec adl_serializer::from_json(const json & json) { auto names = json.get(); if (names == StringSet({"*"})) - return OutputsSpec::All {}; + return OutputsSpec::All{}; else - return OutputsSpec::Names { std::move(names) }; + return OutputsSpec::Names{std::move(names)}; } void adl_serializer::to_json(json & json, OutputsSpec t) { - std::visit(overloaded { - [&](const OutputsSpec::All &) { - json = std::vector({"*"}); + std::visit( + overloaded{ + [&](const OutputsSpec::All &) { json = std::vector({"*"}); }, + [&](const OutputsSpec::Names & names) { json = names; }, }, - [&](const OutputsSpec::Names & names) { - json = names; - }, - }, t.raw); + t.raw); } ExtendedOutputsSpec adl_serializer::from_json(const json & json) { if (json.is_null()) - return ExtendedOutputsSpec::Default {}; + return ExtendedOutputsSpec::Default{}; else { - return ExtendedOutputsSpec::Explicit { json.get() }; + return ExtendedOutputsSpec::Explicit{json.get()}; } } void adl_serializer::to_json(json & json, ExtendedOutputsSpec t) { - std::visit(overloaded { - [&](const ExtendedOutputsSpec::Default &) { - json = nullptr; + std::visit( + overloaded{ + [&](const ExtendedOutputsSpec::Default &) { json = nullptr; }, + [&](const ExtendedOutputsSpec::Explicit & e) { adl_serializer::to_json(json, e); }, }, - [&](const ExtendedOutputsSpec::Explicit & e) { - adl_serializer::to_json(json, e); - }, - }, t.raw); + t.raw); } #endif -} +} // namespace nlohmann diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index d6453c6db..5c6deb87a 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -14,7 +14,7 @@ std::optional StructuredAttrs::tryParse(const StringPairs & env auto jsonAttr = env.find("__json"); if (jsonAttr != env.end()) { try { - return StructuredAttrs { + return StructuredAttrs{ .structuredAttrs = nlohmann::json::parse(jsonAttr->second), }; } catch (std::exception & e) { @@ -36,9 +36,7 @@ static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*"); * mechanism to allow this to evolve again and get back in sync, but for * now we must not change - not even extend - the behavior. */ -static nlohmann::json pathInfoToJSON( - Store & store, - const StorePathSet & storePaths) +static nlohmann::json pathInfoToJSON(Store & store, const StorePathSet & storePaths) { using nlohmann::json; @@ -100,8 +98,7 @@ nlohmann::json StructuredAttrs::prepareStructuredAttrs( StorePathSet storePaths; for (auto & p : inputPaths) storePaths.insert(store.toStorePath(p).first); - json[key] = pathInfoToJSON(store, - store.exportReferences(storePaths, storePaths)); + json[key] = pathInfoToJSON(store, store.exportReferences(storePaths, storePaths)); } return json; @@ -133,7 +130,8 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) for (auto & [key, value] : json.items()) { - if (!std::regex_match(key, shVarName)) continue; + if (!std::regex_match(key, shVarName)) + continue; auto s = handleSimpleType(value); if (s) @@ -145,8 +143,12 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) for (auto & value2 : value) { auto s3 = handleSimpleType(value2); - if (!s3) { good = false; break; } - s2 += *s3; s2 += ' '; + if (!s3) { + good = false; + break; + } + s2 += *s3; + s2 += ' '; } if (good) @@ -159,7 +161,10 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) for (auto & [key2, value2] : value.items()) { auto s3 = handleSimpleType(value2); - if (!s3) { good = false; break; } + if (!s3) { + good = false; + break; + } s2 += fmt("[%s]=%s ", escapeShellArgAlways(key2), *s3); } @@ -170,4 +175,4 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) return jsonSh; } -} +} // namespace nix diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 175146435..ad4123e8f 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -17,7 +17,7 @@ GENERATE_CMP_EXT( me->references, me->registrationTime, me->narSize, - //me->id, + // me->id, me->ultimate, me->sigs, me->ca); @@ -25,16 +25,12 @@ GENERATE_CMP_EXT( std::string ValidPathInfo::fingerprint(const Store & store) const { if (narSize == 0) - throw Error("cannot calculate fingerprint of path '%s' because its size is not known", - store.printStorePath(path)); - return - "1;" + store.printStorePath(path) + ";" - + narHash.to_string(HashFormat::Nix32, true) + ";" - + std::to_string(narSize) + ";" - + concatStringsSep(",", store.printStorePathSet(references)); + throw Error( + "cannot calculate fingerprint of path '%s' because its size is not known", store.printStorePath(path)); + return "1;" + store.printStorePath(path) + ";" + narHash.to_string(HashFormat::Nix32, true) + ";" + + std::to_string(narSize) + ";" + concatStringsSep(",", store.printStorePathSet(references)); } - void ValidPathInfo::sign(const Store & store, const Signer & signer) { sigs.insert(signer.signDetached(fingerprint(store))); @@ -43,46 +39,45 @@ void ValidPathInfo::sign(const Store & store, const Signer & signer) void ValidPathInfo::sign(const Store & store, const std::vector> & signers) { auto fingerprint = this->fingerprint(store); - for (auto & signer: signers) { + for (auto & signer : signers) { sigs.insert(signer->signDetached(fingerprint)); } } std::optional ValidPathInfo::contentAddressWithReferences() const { - if (! ca) + if (!ca) return std::nullopt; switch (ca->method.raw) { - case ContentAddressMethod::Raw::Text: - { - assert(references.count(path) == 0); - return TextInfo { - .hash = ca->hash, - .references = references, - }; - } + case ContentAddressMethod::Raw::Text: { + assert(references.count(path) == 0); + return TextInfo{ + .hash = ca->hash, + .references = references, + }; + } - case ContentAddressMethod::Raw::Flat: - case ContentAddressMethod::Raw::NixArchive: - case ContentAddressMethod::Raw::Git: - default: - { - auto refs = references; - bool hasSelfReference = false; - if (refs.count(path)) { - hasSelfReference = true; - refs.erase(path); - } - return FixedOutputInfo { - .method = ca->method.getFileIngestionMethod(), - .hash = ca->hash, - .references = { + case ContentAddressMethod::Raw::Flat: + case ContentAddressMethod::Raw::NixArchive: + case ContentAddressMethod::Raw::Git: + default: { + auto refs = references; + bool hasSelfReference = false; + if (refs.count(path)) { + hasSelfReference = true; + refs.erase(path); + } + return FixedOutputInfo{ + .method = ca->method.getFileIngestionMethod(), + .hash = ca->hash, + .references = + { .others = std::move(refs), .self = hasSelfReference, }, - }; - } + }; + } } } @@ -90,7 +85,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const { auto fullCaOpt = contentAddressWithReferences(); - if (! fullCaOpt) + if (!fullCaOpt) return false; auto caPath = store.makeFixedOutputPathFromCA(path.name(), *fullCaOpt); @@ -103,10 +98,10 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const return res; } - size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const { - if (isContentAddressed(store)) return maxSigs; + if (isContentAddressed(store)) + return maxSigs; size_t good = 0; for (auto & sig : sigs) @@ -115,13 +110,11 @@ size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & pu return good; } - bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const { return verifyDetached(fingerprint(store), sig, publicKeys); } - Strings ValidPathInfo::shortRefs() const { Strings refs; @@ -131,34 +124,27 @@ Strings ValidPathInfo::shortRefs() const } ValidPathInfo::ValidPathInfo( - const Store & store, - std::string_view name, - ContentAddressWithReferences && ca, - Hash narHash) - : UnkeyedValidPathInfo(narHash) - , path(store.makeFixedOutputPathFromCA(name, ca)) + const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash) + : UnkeyedValidPathInfo(narHash) + , path(store.makeFixedOutputPathFromCA(name, ca)) { - this->ca = ContentAddress { + this->ca = ContentAddress{ .method = ca.getMethod(), .hash = ca.getHash(), }; - std::visit(overloaded { - [this](TextInfo && ti) { - this->references = std::move(ti.references); + std::visit( + overloaded{ + [this](TextInfo && ti) { this->references = std::move(ti.references); }, + [this](FixedOutputInfo && foi) { + this->references = std::move(foi.references.others); + if (foi.references.self) + this->references.insert(path); + }, }, - [this](FixedOutputInfo && foi) { - this->references = std::move(foi.references.others); - if (foi.references.self) - this->references.insert(path); - }, - }, std::move(ca).raw); + std::move(ca).raw); } - -nlohmann::json UnkeyedValidPathInfo::toJSON( - const Store & store, - bool includeImpureInfo, - HashFormat hashFormat) const +nlohmann::json UnkeyedValidPathInfo::toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const { using nlohmann::json; @@ -173,12 +159,12 @@ nlohmann::json UnkeyedValidPathInfo::toJSON( jsonRefs.emplace_back(store.printStorePath(ref)); } - jsonObject["ca"] = ca ? (std::optional { renderContentAddress(*ca) }) : std::nullopt; + jsonObject["ca"] = ca ? (std::optional{renderContentAddress(*ca)}) : std::nullopt; if (includeImpureInfo) { - jsonObject["deriver"] = deriver ? (std::optional { store.printStorePath(*deriver) }) : std::nullopt; + jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt; - jsonObject["registrationTime"] = registrationTime ? (std::optional { registrationTime }) : std::nullopt; + jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt; jsonObject["ultimate"] = ultimate; @@ -190,11 +176,9 @@ nlohmann::json UnkeyedValidPathInfo::toJSON( return jsonObject; } -UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON( - const Store & store, - const nlohmann::json & _json) +UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const Store & store, const nlohmann::json & _json) { - UnkeyedValidPathInfo res { + UnkeyedValidPathInfo res{ Hash(Hash::dummy), }; @@ -205,8 +189,7 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON( try { auto references = getStringList(valueAt(json, "references")); for (auto & input : references) - res.references.insert(store.parseStorePath(static_cast -(input))); + res.references.insert(store.parseStorePath(static_cast(input))); } catch (Error & e) { e.addTrace({}, "while reading key 'references'"); throw; @@ -235,4 +218,4 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON( return res; } -} +} // namespace nix diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index c06647eb1..2c71f437f 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -7,14 +7,13 @@ #include #include - namespace nix { - PathRefScanSink::PathRefScanSink(StringSet && hashes, std::map && backMap) : RefScanSink(std::move(hashes)) , backMap(std::move(backMap)) -{ } +{ +} PathRefScanSink PathRefScanSink::fromPaths(const StorePathSet & refs) { @@ -44,24 +43,18 @@ StorePathSet PathRefScanSink::getResultPaths() return found; } - -std::pair scanForReferences( - const std::string & path, - const StorePathSet & refs) +std::pair scanForReferences(const std::string & path, const StorePathSet & refs) { - HashSink hashSink { HashAlgorithm::SHA256 }; + HashSink hashSink{HashAlgorithm::SHA256}; auto found = scanForReferences(hashSink, path, refs); auto hash = hashSink.finish(); return std::pair(found, hash); } -StorePathSet scanForReferences( - Sink & toTee, - const Path & path, - const StorePathSet & refs) +StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs) { PathRefScanSink refsSink = PathRefScanSink::fromPaths(refs); - TeeSink sink { refsSink, toTee }; + TeeSink sink{refsSink, toTee}; /* Look for the hashes in the NAR dump of the path. */ dumpPath(path, sink); @@ -69,4 +62,4 @@ StorePathSet scanForReferences( return refsSink.getResultPaths(); } -} +} // namespace nix diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index f3fc534ef..4309ceac5 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -4,101 +4,96 @@ #include "nix/store/store-api.hh" #include "nix/util/strings.hh" - namespace nix { std::string StorePathWithOutputs::to_string(const StoreDirConfig & store) const { - return outputs.empty() - ? store.printStorePath(path) - : store.printStorePath(path) + "!" + concatStringsSep(",", outputs); + return outputs.empty() ? store.printStorePath(path) + : store.printStorePath(path) + "!" + concatStringsSep(",", outputs); } - DerivedPath StorePathWithOutputs::toDerivedPath() const { if (!outputs.empty()) { - return DerivedPath::Built { + return DerivedPath::Built{ .drvPath = makeConstantStorePathRef(path), - .outputs = OutputsSpec::Names { outputs }, + .outputs = OutputsSpec::Names{outputs}, }; } else if (path.isDerivation()) { assert(outputs.empty()); - return DerivedPath::Built { + return DerivedPath::Built{ .drvPath = makeConstantStorePathRef(path), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }; } else { - return DerivedPath::Opaque { path }; + return DerivedPath::Opaque{path}; } } - std::vector toDerivedPaths(const std::vector ss) { std::vector reqs; reqs.reserve(ss.size()); - for (auto & s : ss) reqs.push_back(s.toDerivedPath()); + for (auto & s : ss) + reqs.push_back(s.toDerivedPath()); return reqs; } - StorePathWithOutputs::ParseResult StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p) { - return std::visit(overloaded { - [&](const DerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { - if (bo.path.isDerivation()) { - // drv path gets interpreted as "build", not "get drv file itself" - return bo.path; - } - return StorePathWithOutputs { bo.path }; + return std::visit( + overloaded{ + [&](const DerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { + if (bo.path.isDerivation()) { + // drv path gets interpreted as "build", not "get drv file itself" + return bo.path; + } + return StorePathWithOutputs{bo.path}; + }, + [&](const DerivedPath::Built & bfd) -> StorePathWithOutputs::ParseResult { + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { + return StorePathWithOutputs{ + .path = bo.path, + // Use legacy encoding of wildcard as empty set + .outputs = std::visit( + overloaded{ + [&](const OutputsSpec::All &) -> StringSet { return {}; }, + [&](const OutputsSpec::Names & outputs) { + return static_cast(outputs); + }, + }, + bfd.outputs.raw), + }; + }, + [&](const SingleDerivedPath::Built &) -> StorePathWithOutputs::ParseResult { + return std::monostate{}; + }, + }, + bfd.drvPath->raw()); + }, }, - [&](const DerivedPath::Built & bfd) -> StorePathWithOutputs::ParseResult { - return std::visit(overloaded { - [&](const SingleDerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { - return StorePathWithOutputs { - .path = bo.path, - // Use legacy encoding of wildcard as empty set - .outputs = std::visit(overloaded { - [&](const OutputsSpec::All &) -> StringSet { - return {}; - }, - [&](const OutputsSpec::Names & outputs) { - return static_cast(outputs); - }, - }, bfd.outputs.raw), - }; - }, - [&](const SingleDerivedPath::Built &) -> StorePathWithOutputs::ParseResult { - return std::monostate {}; - }, - }, bfd.drvPath->raw()); - }, - }, p.raw()); + p.raw()); } - std::pair parsePathWithOutputs(std::string_view s) { size_t n = s.find("!"); - return n == s.npos - ? std::make_pair(s, StringSet()) - : std::make_pair(s.substr(0, n), - tokenizeString(s.substr(n + 1), ",")); + return n == s.npos ? std::make_pair(s, StringSet()) + : std::make_pair(s.substr(0, n), tokenizeString(s.substr(n + 1), ",")); } - StorePathWithOutputs parsePathWithOutputs(const StoreDirConfig & store, std::string_view pathWithOutputs) { auto [path, outputs] = parsePathWithOutputs(pathWithOutputs); - return StorePathWithOutputs { store.parseStorePath(path), std::move(outputs) }; + return StorePathWithOutputs{store.parseStorePath(path), std::move(outputs)}; } - StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs) { auto [path, outputs] = parsePathWithOutputs(pathWithOutputs); - return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) }; + return StorePathWithOutputs{store.followLinksToStorePath(path), std::move(outputs)}; } -} +} // namespace nix diff --git a/src/libstore/path.cc b/src/libstore/path.cc index d989b1caa..3f7745288 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -14,19 +14,19 @@ void checkName(std::string_view name) if (name.size() == 1) throw BadStorePathName("name '%s' is not valid", name); if (name[1] == '-') - throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, "."); + throw BadStorePathName( + "name '%s' is not valid: first dash-separated component must not be '%s'", name, "."); if (name[1] == '.') { if (name.size() == 2) throw BadStorePathName("name '%s' is not valid", name); if (name[2] == '-') - throw BadStorePathName("name '%s' is not valid: first dash-separated component must not be '%s'", name, ".."); + throw BadStorePathName( + "name '%s' is not valid: first dash-separated component must not be '%s'", name, ".."); } } for (auto c : name) - if (!((c >= '0' && c <= '9') - || (c >= 'a' && c <= 'z') - || (c >= 'A' && c <= 'Z') - || c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '=')) + if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '+' || c == '-' + || c == '.' || c == '_' || c == '?' || c == '=')) throw BadStorePathName("name '%s' contains illegal character '%s'", name, c); } @@ -45,8 +45,7 @@ StorePath::StorePath(std::string_view _baseName) if (baseName.size() < HashLen + 1) throw BadStorePath("'%s' is too short to be a valid store path", baseName); for (auto c : hashPart()) - if (c == 'e' || c == 'o' || c == 'u' || c == 't' - || !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z'))) + if (c == 'e' || c == 'o' || c == 'u' || c == 't' || !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z'))) throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c); checkPathName(baseName, name()); } @@ -111,7 +110,8 @@ bool MixStoreDirMethods::isStorePath(std::string_view path) const StorePathSet MixStoreDirMethods::parseStorePathSet(const PathSet & paths) const { StorePathSet res; - for (auto & i : paths) res.insert(parseStorePath(i)); + for (auto & i : paths) + res.insert(parseStorePath(i)); return res; } @@ -123,8 +123,9 @@ std::string MixStoreDirMethods::printStorePath(const StorePath & path) const PathSet MixStoreDirMethods::printStorePathSet(const StorePathSet & paths) const { PathSet res; - for (auto & i : paths) res.insert(printStorePath(i)); + for (auto & i : paths) + res.insert(printStorePath(i)); return res; } -} +} // namespace nix diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 34acfb02d..068c65625 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -6,7 +6,6 @@ #include #include - namespace nix { PathLocks::PathLocks() @@ -14,14 +13,12 @@ PathLocks::PathLocks() { } - PathLocks::PathLocks(const PathSet & paths, const std::string & waitMsg) : deletePaths(false) { lockPaths(paths, waitMsg); } - PathLocks::~PathLocks() { try { @@ -31,11 +28,9 @@ PathLocks::~PathLocks() } } - void PathLocks::setDeletion(bool deletePaths) { this->deletePaths = deletePaths; } - -} +} // namespace nix diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index 792fe5c76..2484d51a6 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -8,14 +8,13 @@ #include "store-config-private.hh" #if NIX_SUPPORT_ACL -# include +# include #endif namespace nix { const time_t mtimeStore = 1; /* 1 second into the epoch */ - static void canonicaliseTimestampAndPermissions(const Path & path, const struct stat & st) { if (!S_ISLNK(st.st_mode)) { @@ -24,31 +23,25 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct mode_t mode = st.st_mode & ~S_IFMT; if (mode != 0444 && mode != 0555) { - mode = (st.st_mode & S_IFMT) - | 0444 - | (st.st_mode & S_IXUSR ? 0111 : 0); + mode = (st.st_mode & S_IFMT) | 0444 | (st.st_mode & S_IXUSR ? 0111 : 0); if (chmod(path.c_str(), mode) == -1) throw SysError("changing mode of '%1%' to %2$o", path, mode); } - } #ifndef _WIN32 // TODO implement if (st.st_mtime != mtimeStore) { struct stat st2 = st; - st2.st_mtime = mtimeStore, - setWriteTime(path, st2); + st2.st_mtime = mtimeStore, setWriteTime(path, st2); } #endif } - void canonicaliseTimestampAndPermissions(const Path & path) { canonicaliseTimestampAndPermissions(path, lstat(path)); } - static void canonicalisePathMetaData_( const Path & path, #ifndef _WIN32 @@ -87,12 +80,13 @@ static void canonicalisePathMetaData_( if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0) throw SysError("querying extended attributes of '%s'", path); - for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) { - if (settings.ignoredAcls.get().count(eaName)) continue; + for (auto & eaName : tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) { + if (settings.ignoredAcls.get().count(eaName)) + continue; if (lremovexattr(path.c_str(), eaName.c_str()) == -1) throw SysError("removing extended attribute '%s' from '%s'", eaName, path); } - } + } #endif #ifndef _WIN32 @@ -106,7 +100,9 @@ static void canonicalisePathMetaData_( if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino))) throw BuildError("invalid ownership on file '%1%'", path); mode_t mode = st.st_mode & ~S_IFMT; - assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); + assert( + S_ISLNK(st.st_mode) + || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); return; } #endif @@ -124,14 +120,12 @@ static void canonicalisePathMetaData_( store (since that directory is group-writable for the Nix build users group); we check for this case below. */ if (st.st_uid != geteuid()) { -#if HAVE_LCHOWN +# if HAVE_LCHOWN if (lchown(path.c_str(), geteuid(), getegid()) == -1) -#else - if (!S_ISLNK(st.st_mode) && - chown(path.c_str(), geteuid(), getegid()) == -1) -#endif - throw SysError("changing owner of '%1%' to %2%", - path, geteuid()); +# else + if (!S_ISLNK(st.st_mode) && chown(path.c_str(), geteuid(), getegid()) == -1) +# endif + throw SysError("changing owner of '%1%' to %2%", path, geteuid()); } #endif @@ -148,7 +142,6 @@ static void canonicalisePathMetaData_( } } - void canonicalisePathMetaData( const Path & path, #ifndef _WIN32 @@ -175,12 +168,13 @@ void canonicalisePathMetaData( #endif } - -void canonicalisePathMetaData(const Path & path +void canonicalisePathMetaData( + const Path & path #ifndef _WIN32 - , std::optional> uidRange + , + std::optional> uidRange #endif - ) +) { InodesSeen inodesSeen; canonicalisePathMetaData_( @@ -191,4 +185,4 @@ void canonicalisePathMetaData(const Path & path inodesSeen); } -} +} // namespace nix diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 09ef36705..2b679e2a3 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -10,27 +10,26 @@ #include #include - namespace nix { - /** * Parse a generation name of the format * `--link'. */ static std::optional parseName(const std::string & profileName, const std::string & name) { - if (name.substr(0, profileName.size() + 1) != profileName + "-") return {}; + if (name.substr(0, profileName.size() + 1) != profileName + "-") + return {}; auto s = name.substr(profileName.size() + 1); auto p = s.find("-link"); - if (p == std::string::npos) return {}; + if (p == std::string::npos) + return {}; if (auto n = string2Int(s.substr(0, p))) return *n; else return {}; } - std::pair> findGenerations(Path profile) { Generations gens; @@ -42,28 +41,15 @@ std::pair> findGenerations(Path pro checkInterrupt(); if (auto n = parseName(profileName, i.path().filename().string())) { auto path = i.path().string(); - gens.push_back({ - .number = *n, - .path = path, - .creationTime = lstat(path).st_mtime - }); + gens.push_back({.number = *n, .path = path, .creationTime = lstat(path).st_mtime}); } } - gens.sort([](const Generation & a, const Generation & b) - { - return a.number < b.number; - }); + gens.sort([](const Generation & a, const Generation & b) { return a.number < b.number; }); - return { - gens, - pathExists(profile) - ? parseName(profileName, readLink(profile)) - : std::nullopt - }; + return {gens, pathExists(profile) ? parseName(profileName, readLink(profile)) : std::nullopt}; } - /** * Create a generation name that can be parsed by `parseName()`. */ @@ -72,7 +58,6 @@ static Path makeName(const Path & profile, GenerationNumber num) return fmt("%s-%s-link", profile, num); } - Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath) { /* The new generation number should be higher than old the @@ -110,14 +95,12 @@ Path createGeneration(LocalFSStore & store, Path profile, StorePath outPath) return generation; } - static void removeFile(const Path & path) { if (remove(path.c_str()) == -1) throw SysError("cannot unlink '%1%'", path); } - void deleteGeneration(const Path & profile, GenerationNumber gen) { Path generation = makeName(profile, gen); @@ -143,7 +126,6 @@ static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool d } } - void deleteGenerations(const Path & profile, const std::set & gensToDelete, bool dryRun) { PathLocks lock; @@ -155,7 +137,8 @@ void deleteGenerations(const Path & profile, const std::set & throw Error("cannot delete current version of profile %1%'", profile); for (auto & i : gens) { - if (!gensToDelete.count(i.number)) continue; + if (!gensToDelete.count(i.number)) + continue; deleteGeneration2(profile, i.number, dryRun); } } @@ -165,7 +148,8 @@ void deleteGenerations(const Path & profile, const std::set & */ static inline void iterDropUntil(Generations & gens, auto && i, auto && cond) { - for (; i != gens.rend() && !cond(*i); ++i); + for (; i != gens.rend() && !cond(*i); ++i) + ; } void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun) @@ -185,7 +169,8 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo iterDropUntil(gens, i, [&](auto & g) { return g.number == curGen; }); // Skip over `max` generations, preserving them - for (GenerationNumber keep = 0; i != gens.rend() && keep < max; ++i, ++keep); + for (GenerationNumber keep = 0; i != gens.rend() && keep < max; ++i, ++keep) + ; // Delete the rest for (; i != gens.rend(); ++i) @@ -204,7 +189,6 @@ void deleteOldGenerations(const Path & profile, bool dryRun) deleteGeneration2(profile, i.number, dryRun); } - void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) { PathLocks lock; @@ -225,7 +209,8 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) We don't want delete this one yet because it existed at the requested point in time, and we want to be able to roll back to it. */ - if (i != gens.rend()) ++i; + if (i != gens.rend()) + ++i; // Delete all previous generations (unless current). for (; i != gens.rend(); ++i) { @@ -237,7 +222,6 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun) } } - time_t parseOlderThanTimeSpec(std::string_view timeSpec) { if (timeSpec.empty() || timeSpec[timeSpec.size() - 1] != 'd') @@ -253,20 +237,16 @@ time_t parseOlderThanTimeSpec(std::string_view timeSpec) return curTime - *days * 24 * 3600; } - void switchLink(Path link, Path target) { /* Hacky. */ - if (dirOf(target) == dirOf(link)) target = baseNameOf(target); + if (dirOf(target) == dirOf(link)) + target = baseNameOf(target); replaceSymlink(target, link); } - -void switchGeneration( - const Path & profile, - std::optional dstGen, - bool dryRun) +void switchGeneration(const Path & profile, std::optional dstGen, bool dryRun) { PathLocks lock; lockProfile(lock, profile); @@ -275,8 +255,7 @@ void switchGeneration( std::optional dst; for (auto & i : gens) - if ((!dstGen && i.number < curGen) || - (dstGen && i.number == *dstGen)) + if ((!dstGen && i.number < curGen) || (dstGen && i.number == *dstGen)) dst = i; if (!dst) { @@ -288,31 +267,26 @@ void switchGeneration( notice("switching profile from version %d to %d", curGen.value_or(0), dst->number); - if (dryRun) return; + if (dryRun) + return; switchLink(profile, dst->path); } - void lockProfile(PathLocks & lock, const Path & profile) { lock.lockPaths({profile}, fmt("waiting for lock on profile '%1%'", profile)); lock.setDeletion(true); } - std::string optimisticLockProfile(const Path & profile) { return pathExists(profile) ? readLink(profile) : ""; } - Path profilesDir() { - auto profileRoot = - isRootUser() - ? rootProfilesDir() - : createNixStateDir() + "/profiles"; + auto profileRoot = isRootUser() ? rootProfilesDir() : createNixStateDir() + "/profiles"; createDirs(profileRoot); return profileRoot; } @@ -322,7 +296,6 @@ Path rootProfilesDir() return settings.nixStateDir + "/profiles/per-user/root"; } - Path getDefaultProfile() { Path profileLink = settings.useXDGBaseDirectories ? createNixStateDir() + "/profile" : getHome() + "/.nix-profile"; @@ -355,4 +328,4 @@ Path rootChannelsDir() return rootProfilesDir() + "/channels"; } -} +} // namespace nix diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 9a72422eb..8a6d99ffe 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -8,18 +8,20 @@ namespace nix { MakeError(InvalidDerivationOutputId, Error); -DrvOutput DrvOutput::parse(const std::string &strRep) { +DrvOutput DrvOutput::parse(const std::string & strRep) +{ size_t n = strRep.find("!"); if (n == strRep.npos) throw InvalidDerivationOutputId("Invalid derivation output id %s", strRep); return DrvOutput{ .drvHash = Hash::parseAnyPrefixed(strRep.substr(0, n)), - .outputName = strRep.substr(n+1), + .outputName = strRep.substr(n + 1), }; } -std::string DrvOutput::to_string() const { +std::string DrvOutput::to_string() const +{ return strHash() + "!" + outputName; } @@ -32,23 +34,21 @@ std::set Realisation::closure(Store & store, const std::set & startOutputs, std::set & res) { - auto getDeps = [&](const Realisation& current) -> std::set { + auto getDeps = [&](const Realisation & current) -> std::set { std::set res; - for (auto& [currentDep, _] : current.dependentRealisations) { + for (auto & [currentDep, _] : current.dependentRealisations) { if (auto currentRealisation = store.queryRealisation(currentDep)) res.insert(*currentRealisation); else - throw Error( - "Unrealised derivation '%s'", currentDep.to_string()); + throw Error("Unrealised derivation '%s'", currentDep.to_string()); } return res; }; computeClosure( - startOutputs, res, - [&](const Realisation& current, - std::function>&)> - processEdges) { + startOutputs, + res, + [&](const Realisation & current, std::function> &)> processEdges) { std::promise> promise; try { auto res = getDeps(current); @@ -60,7 +60,8 @@ void Realisation::closure(Store & store, const std::set & startOutp }); } -nlohmann::json Realisation::toJSON() const { +nlohmann::json Realisation::toJSON() const +{ auto jsonDependentRealisations = nlohmann::json::object(); for (auto & [depId, depOutPath] : dependentRealisations) jsonDependentRealisations.emplace(depId.to_string(), depOutPath.to_string()); @@ -72,9 +73,8 @@ nlohmann::json Realisation::toJSON() const { }; } -Realisation Realisation::fromJSON( - const nlohmann::json& json, - const std::string& whence) { +Realisation Realisation::fromJSON(const nlohmann::json & json, const std::string & whence) +{ auto getOptionalField = [&](std::string fieldName) -> std::optional { auto fieldIterator = json.find(fieldName); if (fieldIterator == json.end()) @@ -85,16 +85,14 @@ Realisation Realisation::fromJSON( if (auto field = getOptionalField(fieldName)) return *field; else - throw Error( - "Drv output info file '%1%' is corrupt, missing field %2%", - whence, fieldName); + throw Error("Drv output info file '%1%' is corrupt, missing field %2%", whence, fieldName); }; StringSet signatures; if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end()) signatures.insert(signaturesIterator->begin(), signaturesIterator->end()); - std::map dependentRealisations; + std::map dependentRealisations; if (auto jsonDependencies = json.find("dependentRealisations"); jsonDependencies != json.end()) for (auto & [jsonDepId, jsonDepOutPath] : jsonDependencies->get()) dependentRealisations.insert({DrvOutput::parse(jsonDepId), StorePath(jsonDepOutPath)}); @@ -114,7 +112,7 @@ std::string Realisation::fingerprint() const return serialized.dump(); } -void Realisation::sign(const Signer &signer) +void Realisation::sign(const Signer & signer) { signatures.insert(signer.signDetached(fingerprint())); } @@ -137,11 +135,10 @@ size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const return good; } - -SingleDrvOutputs filterDrvOutputs(const OutputsSpec& wanted, SingleDrvOutputs&& outputs) +SingleDrvOutputs filterDrvOutputs(const OutputsSpec & wanted, SingleDrvOutputs && outputs) { SingleDrvOutputs ret = std::move(outputs); - for (auto it = ret.begin(); it != ret.end(); ) { + for (auto it = ret.begin(); it != ret.end();) { if (!wanted.contains(it->first)) it = ret.erase(it); else @@ -150,13 +147,14 @@ SingleDrvOutputs filterDrvOutputs(const OutputsSpec& wanted, SingleDrvOutputs&& return ret; } -StorePath RealisedPath::path() const { +StorePath RealisedPath::path() const +{ return std::visit([](auto && arg) { return arg.getPath(); }, raw); } bool Realisation::isCompatibleWith(const Realisation & other) const { - assert (id == other.id); + assert(id == other.id); if (outPath == other.outPath) { if (dependentRealisations.empty() != other.dependentRealisations.empty()) { warn( @@ -172,27 +170,24 @@ bool Realisation::isCompatibleWith(const Realisation & other) const return false; } -void RealisedPath::closure( - Store& store, - const RealisedPath::Set& startPaths, - RealisedPath::Set& ret) +void RealisedPath::closure(Store & store, const RealisedPath::Set & startPaths, RealisedPath::Set & ret) { // FIXME: This only builds the store-path closure, not the real realisation // closure StorePathSet initialStorePaths, pathsClosure; - for (auto& path : startPaths) + for (auto & path : startPaths) initialStorePaths.insert(path.path()); store.computeFSClosure(initialStorePaths, pathsClosure); ret.insert(startPaths.begin(), startPaths.end()); ret.insert(pathsClosure.begin(), pathsClosure.end()); } -void RealisedPath::closure(Store& store, RealisedPath::Set & ret) const +void RealisedPath::closure(Store & store, RealisedPath::Set & ret) const { RealisedPath::closure(store, {*this}, ret); } -RealisedPath::Set RealisedPath::closure(Store& store) const +RealisedPath::Set RealisedPath::closure(Store & store) const { RealisedPath::Set ret; closure(store, ret); diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index fdbe12fa9..12c810eca 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -58,7 +58,8 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); auto i = nars.find(std::string(storePath.hashPart())); - if (i != nars.end()) return {i->second, restPath}; + if (i != nars.end()) + return {i->second, restPath}; std::string listing; Path cacheFile; @@ -68,36 +69,38 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat try { listing = nix::readFile(makeCacheFile(storePath.hashPart(), "ls")); - auto narAccessor = makeLazyNarAccessor(listing, - [cacheFile](uint64_t offset, uint64_t length) { - - AutoCloseFD fd = toDescriptor(open(cacheFile.c_str(), O_RDONLY - #ifndef _WIN32 + auto narAccessor = makeLazyNarAccessor(listing, [cacheFile](uint64_t offset, uint64_t length) { + AutoCloseFD fd = toDescriptor(open( + cacheFile.c_str(), + O_RDONLY +#ifndef _WIN32 | O_CLOEXEC - #endif - )); - if (!fd) - throw SysError("opening NAR cache file '%s'", cacheFile); +#endif + )); + if (!fd) + throw SysError("opening NAR cache file '%s'", cacheFile); - if (lseek(fromDescriptorReadOnly(fd.get()), offset, SEEK_SET) != (off_t) offset) - throw SysError("seeking in '%s'", cacheFile); + if (lseek(fromDescriptorReadOnly(fd.get()), offset, SEEK_SET) != (off_t) offset) + throw SysError("seeking in '%s'", cacheFile); - std::string buf(length, 0); - readFull(fd.get(), buf.data(), length); + std::string buf(length, 0); + readFull(fd.get(), buf.data(), length); - return buf; - }); + return buf; + }); nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SystemError &) { } + } catch (SystemError &) { + } try { auto narAccessor = makeNarAccessor(nix::readFile(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SystemError &) { } + } catch (SystemError &) { + } } StringSink sink; @@ -129,4 +132,4 @@ std::string RemoteFSAccessor::readLink(const CanonPath & path) return res.first->readLink(res.second); } -} +} // namespace nix diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 1b8bad048..2b072980b 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -27,30 +27,29 @@ namespace nix { RemoteStore::RemoteStore(const Config & config) : Store{config} , config{config} - , connections(make_ref>( - std::max(1, config.maxConnections.get()), - [this]() { - auto conn = openConnectionWrapper(); - try { - initConnection(*conn); - } catch (...) { - failed = true; - throw; - } - return conn; - }, - [this](const ref & r) { - return - r->to.good() - && r->from.good() - && std::chrono::duration_cast( - std::chrono::steady_clock::now() - r->startTime).count() < this->config.maxConnectionAge; - } - )) + , connections( + make_ref>( + std::max(1, config.maxConnections.get()), + [this]() { + auto conn = openConnectionWrapper(); + try { + initConnection(*conn); + } catch (...) { + failed = true; + throw; + } + return conn; + }, + [this](const ref & r) { + return r->to.good() && r->from.good() + && std::chrono::duration_cast( + std::chrono::steady_clock::now() - r->startTime) + .count() + < this->config.maxConnectionAge; + })) { } - ref RemoteStore::openConnectionWrapper() { if (failed) @@ -63,7 +62,6 @@ ref RemoteStore::openConnectionWrapper() } } - void RemoteStore::initConnection(Connection & conn) { /* Send the magic greeting, check for the reply. */ @@ -73,9 +71,8 @@ void RemoteStore::initConnection(Connection & conn) StringSink saved; TeeSource tee(conn.from, saved); try { - auto [protoVersion, features] = WorkerProto::BasicClientConnection::handshake( - conn.to, tee, PROTOCOL_VERSION, - WorkerProto::allFeatures); + auto [protoVersion, features] = + WorkerProto::BasicClientConnection::handshake(conn.to, tee, PROTOCOL_VERSION, WorkerProto::allFeatures); conn.protoVersion = protoVersion; conn.features = features; } catch (SerialisationError & e) { @@ -95,31 +92,22 @@ void RemoteStore::initConnection(Connection & conn) debug("negotiated feature '%s'", feature); auto ex = conn.processStderrReturn(); - if (ex) std::rethrow_exception(ex); - } - catch (Error & e) { + if (ex) + std::rethrow_exception(ex); + } catch (Error & e) { throw Error("cannot open connection to remote store '%s': %s", getUri(), e.what()); } setOptions(conn); } - void RemoteStore::setOptions(Connection & conn) { - conn.to << WorkerProto::Op::SetOptions - << settings.keepFailed - << settings.keepGoing - << settings.tryFallback - << verbosity - << settings.maxBuildJobs - << settings.maxSilentTime - << true - << (settings.verboseBuild ? lvlError : lvlVomit) - << 0 // obsolete log type - << 0 /* obsolete print build trace */ - << settings.buildCores - << settings.useSubstitutes; + conn.to << WorkerProto::Op::SetOptions << settings.keepFailed << settings.keepGoing << settings.tryFallback + << verbosity << settings.maxBuildJobs << settings.maxSilentTime << true + << (settings.verboseBuild ? lvlError : lvlVomit) << 0 // obsolete log type + << 0 /* obsolete print build trace */ + << settings.buildCores << settings.useSubstitutes; if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 12) { std::map overrides; @@ -141,10 +129,10 @@ void RemoteStore::setOptions(Connection & conn) } auto ex = conn.processStderrReturn(); - if (ex) std::rethrow_exception(ex); + if (ex) + std::rethrow_exception(ex); } - RemoteStore::ConnectionHandle::~ConnectionHandle() { if (!daemonException && std::uncaught_exceptions()) { @@ -158,7 +146,6 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, handle->processStderr(&daemonException, sink, source, flush, block); } - RemoteStore::ConnectionHandle RemoteStore::getConnection() { return ConnectionHandle(connections->get()); @@ -177,21 +164,20 @@ bool RemoteStore::isValidPathUncached(const StorePath & path) return readInt(conn->from); } - StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->protoVersion) < 12) { StorePathSet res; for (auto & i : paths) - if (isValidPath(i)) res.insert(i); + if (isValidPath(i)) + res.insert(i); return res; } else { return conn->queryValidPaths(*this, &conn.daemonException, paths, maybeSubstitute); } } - StorePathSet RemoteStore::queryAllValidPaths() { auto conn(getConnection()); @@ -200,7 +186,6 @@ StorePathSet RemoteStore::queryAllValidPaths() return WorkerProto::Serialise::read(*this, *conn); } - StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) { auto conn(getConnection()); @@ -209,7 +194,8 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) for (auto & i : paths) { conn->to << WorkerProto::Op::HasSubstitutes << printStorePath(i); conn.processStderr(); - if (readInt(conn->from)) res.insert(i); + if (readInt(conn->from)) + res.insert(i); } return res; } else { @@ -220,10 +206,10 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) } } - void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, SubstitutablePathInfos & infos) { - if (pathsMap.empty()) return; + if (pathsMap.empty()) + return; auto conn(getConnection()); @@ -234,7 +220,8 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S conn->to << WorkerProto::Op::QuerySubstitutablePathInfo << printStorePath(i.first); conn.processStderr(); unsigned int reply = readInt(conn->from); - if (reply == 0) continue; + if (reply == 0) + continue; auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); @@ -265,30 +252,26 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } - } } - -void RemoteStore::queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept +void RemoteStore::queryPathInfoUncached( + const StorePath & path, Callback> callback) noexcept { try { std::shared_ptr info; { auto conn(getConnection()); info = std::make_shared( - StorePath{path}, - conn->queryPathInfo(*this, &conn.daemonException, path)); - + StorePath{path}, conn->queryPathInfo(*this, &conn.daemonException, path)); } callback(std::move(info)); - } catch (...) { callback.rethrow(); } + } catch (...) { + callback.rethrow(); + } } - -void RemoteStore::queryReferrers(const StorePath & path, - StorePathSet & referrers) +void RemoteStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { auto conn(getConnection()); conn->to << WorkerProto::Op::QueryReferrers << printStorePath(path); @@ -297,7 +280,6 @@ void RemoteStore::queryReferrers(const StorePath & path, referrers.insert(i); } - StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) { auto conn(getConnection()); @@ -306,7 +288,6 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) return WorkerProto::Serialise::read(*this, *conn); } - StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) { if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) { @@ -318,8 +299,8 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) return WorkerProto::Serialise::read(*this, *conn); } - -std::map> RemoteStore::queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore_) +std::map> +RemoteStore::queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore_) { if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) { if (!evalStore_) { @@ -358,28 +339,25 @@ std::optional RemoteStore::queryPathFromHashPart(const std::string & conn->to << WorkerProto::Op::QueryPathFromHashPart << hashPart; conn.processStderr(); Path path = readString(conn->from); - if (path.empty()) return {}; + if (path.empty()) + return {}; return parseStorePath(path); } - ref RemoteStore::addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashAlgorithm hashAlgo, - const StorePathSet & references, - RepairFlag repair) + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { std::optional conn_(getConnection()); auto & conn = *conn_; if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 25) { - conn->to - << WorkerProto::Op::AddToStore - << name - << caMethod.renderWithAlgo(hashAlgo); + conn->to << WorkerProto::Op::AddToStore << name << caMethod.renderWithAlgo(hashAlgo); WorkerProto::write(*this, *conn, references); conn->to << repair; @@ -387,66 +365,63 @@ ref RemoteStore::addCAToStore( connections->incCapacity(); { Finally cleanup([&]() { connections->decCapacity(); }); - conn.withFramedSink([&](Sink & sink) { - dump.drainInto(sink); - }); + conn.withFramedSink([&](Sink & sink) { dump.drainInto(sink); }); } - return make_ref( - WorkerProto::Serialise::read(*this, *conn)); - } - else { - if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); + return make_ref(WorkerProto::Serialise::read(*this, *conn)); + } else { + if (repair) + throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); switch (caMethod.raw) { - case ContentAddressMethod::Raw::Text: - { - if (hashAlgo != HashAlgorithm::SHA256) - throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashAlgo(hashAlgo)); - std::string s = dump.drain(); - conn->to << WorkerProto::Op::AddTextToStore << name << s; - WorkerProto::write(*this, *conn, references); - conn.processStderr(); - break; - } - case ContentAddressMethod::Raw::Flat: - case ContentAddressMethod::Raw::NixArchive: - case ContentAddressMethod::Raw::Git: - default: - { - auto fim = caMethod.getFileIngestionMethod(); - conn->to - << WorkerProto::Op::AddToStore - << name - << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::NixArchive) ? 0 : 1) /* backwards compatibility hack */ - << (fim == FileIngestionMethod::NixArchive ? 1 : 0) - << printHashAlgo(hashAlgo); + case ContentAddressMethod::Raw::Text: { + if (hashAlgo != HashAlgorithm::SHA256) + throw UnimplementedError( + "When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", + name, + printHashAlgo(hashAlgo)); + std::string s = dump.drain(); + conn->to << WorkerProto::Op::AddTextToStore << name << s; + WorkerProto::write(*this, *conn, references); + conn.processStderr(); + break; + } + case ContentAddressMethod::Raw::Flat: + case ContentAddressMethod::Raw::NixArchive: + case ContentAddressMethod::Raw::Git: + default: { + auto fim = caMethod.getFileIngestionMethod(); + conn->to << WorkerProto::Op::AddToStore << name + << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::NixArchive) + ? 0 + : 1) /* backwards compatibility hack */ + << (fim == FileIngestionMethod::NixArchive ? 1 : 0) << printHashAlgo(hashAlgo); - try { - conn->to.written = 0; - connections->incCapacity(); - { - Finally cleanup([&]() { connections->decCapacity(); }); - if (fim == FileIngestionMethod::NixArchive) { - dump.drainInto(conn->to); - } else { - std::string contents = dump.drain(); - dumpString(contents, conn->to); - } + try { + conn->to.written = 0; + connections->incCapacity(); + { + Finally cleanup([&]() { connections->decCapacity(); }); + if (fim == FileIngestionMethod::NixArchive) { + dump.drainInto(conn->to); + } else { + std::string contents = dump.drain(); + dumpString(contents, conn->to); } - conn.processStderr(); - } catch (SysError & e) { - /* Daemon closed while we were sending the path. Probably OOM - or I/O error. */ - if (e.errNo == EPIPE) - try { - conn.processStderr(); - } catch (EndOfFile & e) { } - throw; } - break; + conn.processStderr(); + } catch (SysError & e) { + /* Daemon closed while we were sending the path. Probably OOM + or I/O error. */ + if (e.errNo == EPIPE) + try { + conn.processStderr(); + } catch (EndOfFile & e) { + } + throw; } + break; + } } auto path = parseStorePath(readString(conn->from)); // Release our connection to prevent a deadlock in queryPathInfo(). @@ -455,7 +430,6 @@ ref RemoteStore::addCAToStore( } } - StorePath RemoteStore::addToStoreFromDump( Source & dump, std::string_view name, @@ -485,9 +459,7 @@ StorePath RemoteStore::addToStoreFromDump( return addCAToStore(dump, name, hashMethod, hashAlgo, references, repair)->path; } - -void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) +void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { auto conn(getConnection()); @@ -496,33 +468,25 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, sink << 1 // == path follows ; copyNAR(source, sink); - sink - << exportMagic - << printStorePath(info.path); + sink << exportMagic << printStorePath(info.path); WorkerProto::write(*this, *conn, info.references); - sink - << (info.deriver ? printStorePath(*info.deriver) : "") - << 0 // == no legacy signature - << 0 // == no path follows + sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature + << 0 // == no path follows ; }); conn->importPaths(*this, &conn.daemonException, *source2); } else { - conn->to << WorkerProto::Op::AddToStoreNar - << printStorePath(info.path) + conn->to << WorkerProto::Op::AddToStoreNar << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(HashFormat::Base16, false); WorkerProto::write(*this, *conn, info.references); - conn->to << info.registrationTime << info.narSize - << info.ultimate << info.sigs << renderContentAddress(info.ca) + conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 23) { - conn.withFramedSink([&](Sink & sink) { - copyNAR(source, sink); - }); + conn.withFramedSink([&](Sink & sink) { copyNAR(source, sink); }); } else if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 21) { conn.processStderr(0, &source); } else { @@ -532,12 +496,8 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, } } - void RemoteStore::addMultipleToStore( - PathsSource && pathsToCopy, - Activity & act, - RepairFlag repair, - CheckSigsFlag checkSigs) + PathsSource && pathsToCopy, Activity & act, RepairFlag repair, CheckSigsFlag checkSigs) { // `addMultipleToStore` is single threaded size_t bytesExpected = 0; @@ -555,12 +515,13 @@ void RemoteStore::addMultipleToStore( act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0)); auto & [pathInfo, pathSource] = pathsToCopy.back(); - WorkerProto::Serialise::write(*this, - WorkerProto::WriteConn { - .to = sink, - .version = 16, - }, - pathInfo); + WorkerProto::Serialise::write( + *this, + WorkerProto::WriteConn{ + .to = sink, + .version = 16, + }, + pathInfo); pathSource->drainInto(sink); pathsToCopy.pop_back(); } @@ -569,25 +530,16 @@ void RemoteStore::addMultipleToStore( addMultipleToStore(*source, repair, checkSigs); } -void RemoteStore::addMultipleToStore( - Source & source, - RepairFlag repair, - CheckSigsFlag checkSigs) +void RemoteStore::addMultipleToStore(Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { if (GET_PROTOCOL_MINOR(getConnection()->protoVersion) >= 32) { auto conn(getConnection()); - conn->to - << WorkerProto::Op::AddMultipleToStore - << repair - << !checkSigs; - conn.withFramedSink([&](Sink & sink) { - source.drainInto(sink); - }); + conn->to << WorkerProto::Op::AddMultipleToStore << repair << !checkSigs; + conn.withFramedSink([&](Sink & sink) { source.drainInto(sink); }); } else Store::addMultipleToStore(source, repair, checkSigs); } - void RemoteStore::registerDrvOutput(const Realisation & info) { auto conn(getConnection()); @@ -601,8 +553,8 @@ void RemoteStore::registerDrvOutput(const Realisation & info) conn.processStderr(); } -void RemoteStore::queryRealisationUncached(const DrvOutput & id, - Callback> callback) noexcept +void RemoteStore::queryRealisationUncached( + const DrvOutput & id, Callback> callback) noexcept { try { auto conn(getConnection()); @@ -618,14 +570,12 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id, auto real = [&]() -> std::shared_ptr { if (GET_PROTOCOL_MINOR(conn->protoVersion) < 31) { - auto outPaths = WorkerProto::Serialise>::read( - *this, *conn); + auto outPaths = WorkerProto::Serialise>::read(*this, *conn); if (outPaths.empty()) return nullptr; - return std::make_shared(Realisation { .id = id, .outPath = *outPaths.begin() }); + return std::make_shared(Realisation{.id = id, .outPath = *outPaths.begin()}); } else { - auto realisations = WorkerProto::Serialise>::read( - *this, *conn); + auto realisations = WorkerProto::Serialise>::read(*this, *conn); if (realisations.empty()) return nullptr; return std::make_shared(*realisations.begin()); @@ -633,32 +583,33 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id, }(); callback(std::shared_ptr(real)); - } catch (...) { return callback.rethrow(); } + } catch (...) { + return callback.rethrow(); + } } -void RemoteStore::copyDrvsFromEvalStore( - const std::vector & paths, - std::shared_ptr evalStore) +void RemoteStore::copyDrvsFromEvalStore(const std::vector & paths, std::shared_ptr evalStore) { if (evalStore && evalStore.get() != this) { /* The remote doesn't have a way to access evalStore, so copy the .drvs. */ RealisedPath::Set drvPaths2; for (const auto & i : paths) { - std::visit(overloaded { - [&](const DerivedPath::Opaque & bp) { - // Do nothing, path is hopefully there already + std::visit( + overloaded{ + [&](const DerivedPath::Opaque & bp) { + // Do nothing, path is hopefully there already + }, + [&](const DerivedPath::Built & bp) { drvPaths2.insert(bp.drvPath->getBaseStorePath()); }, }, - [&](const DerivedPath::Built & bp) { - drvPaths2.insert(bp.drvPath->getBaseStorePath()); - }, - }, i.raw()); + i.raw()); } copyClosure(*evalStore, *this, drvPaths2); } } -void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) +void RemoteStore::buildPaths( + const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) { copyDrvsFromEvalStore(drvPaths, evalStore); @@ -678,9 +629,7 @@ void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMod } std::vector RemoteStore::buildPathsWithResults( - const std::vector & paths, - BuildMode buildMode, - std::shared_ptr evalStore) + const std::vector & paths, BuildMode buildMode, std::shared_ptr evalStore) { copyDrvsFromEvalStore(paths, evalStore); @@ -705,20 +654,19 @@ std::vector RemoteStore::buildPathsWithResults( for (auto & path : paths) { std::visit( - overloaded { + overloaded{ [&](const DerivedPath::Opaque & bo) { - results.push_back(KeyedBuildResult { - { - .status = BuildResult::Substituted, - }, - /* .path = */ bo, - }); + results.push_back( + KeyedBuildResult{ + { + .status = BuildResult::Substituted, + }, + /* .path = */ bo, + }); }, [&](const DerivedPath::Built & bfd) { - KeyedBuildResult res { - { - .status = BuildResult::Built - }, + KeyedBuildResult res{ + {.status = BuildResult::Built}, /* .path = */ bfd, }; @@ -732,18 +680,18 @@ std::vector RemoteStore::buildPathsWithResults( if (!outputHash) throw Error( "the derivation '%s' doesn't have an output named '%s'", - printStorePath(drvPath), output); - auto outputId = DrvOutput{ *outputHash, output }; + printStorePath(drvPath), + output); + auto outputId = DrvOutput{*outputHash, output}; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - auto realisation = - queryRealisation(outputId); + auto realisation = queryRealisation(outputId); if (!realisation) throw MissingRealisation(outputId); res.builtOutputs.emplace(output, *realisation); } else { res.builtOutputs.emplace( output, - Realisation { + Realisation{ .id = outputId, .outPath = outputPath, }); @@ -751,8 +699,7 @@ std::vector RemoteStore::buildPathsWithResults( } results.push_back(res); - } - }, + }}, path.raw()); } @@ -760,9 +707,7 @@ std::vector RemoteStore::buildPathsWithResults( } } - -BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) +BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { auto conn(getConnection()); conn->putBuildDerivationRequest(*this, &conn.daemonException, drvPath, drv, buildMode); @@ -770,7 +715,6 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD return WorkerProto::Serialise::read(*this, *conn); } - void RemoteStore::ensurePath(const StorePath & path) { auto conn(getConnection()); @@ -779,14 +723,12 @@ void RemoteStore::ensurePath(const StorePath & path) readInt(conn->from); } - void RemoteStore::addTempRoot(const StorePath & path) { auto conn(getConnection()); conn->addTempRoot(*this, &conn.daemonException, path); } - Roots RemoteStore::findRoots(bool censor) { auto conn(getConnection()); @@ -802,18 +744,16 @@ Roots RemoteStore::findRoots(bool censor) return result; } - void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) { auto conn(getConnection()); - conn->to - << WorkerProto::Op::CollectGarbage << options.action; + conn->to << WorkerProto::Op::CollectGarbage << options.action; WorkerProto::write(*this, *conn, options.pathsToDelete); conn->to << options.ignoreLiveness - << options.maxFreed - /* removed options */ - << 0 << 0 << 0; + << options.maxFreed + /* removed options */ + << 0 << 0 << 0; conn.processStderr(); @@ -827,7 +767,6 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) } } - void RemoteStore::optimiseStore() { auto conn(getConnection()); @@ -836,7 +775,6 @@ void RemoteStore::optimiseStore() readInt(conn->from); } - bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) { auto conn(getConnection()); @@ -845,7 +783,6 @@ bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) return readInt(conn->from); } - void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { auto conn(getConnection()); @@ -854,7 +791,6 @@ void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & s readInt(conn->from); } - MissingPaths RemoteStore::queryMissing(const std::vector & targets) { { @@ -874,36 +810,30 @@ MissingPaths RemoteStore::queryMissing(const std::vector & targets) return res; } - fallback: +fallback: return Store::queryMissing(targets); } - void RemoteStore::addBuildLog(const StorePath & drvPath, std::string_view log) { auto conn(getConnection()); conn->to << WorkerProto::Op::AddBuildLog << drvPath.to_string(); StringSource source(log); - conn.withFramedSink([&](Sink & sink) { - source.drainInto(sink); - }); + conn.withFramedSink([&](Sink & sink) { source.drainInto(sink); }); readInt(conn->from); } - std::optional RemoteStore::getVersion() { auto conn(getConnection()); return conn->daemonNixVersion; } - void RemoteStore::connect() { auto conn(getConnection()); } - unsigned int RemoteStore::getProtocol() { auto conn(connections->get()); @@ -924,9 +854,7 @@ void RemoteStore::flushBadConnections() void RemoteStore::narFromPath(const StorePath & path, Sink & sink) { auto conn(getConnection()); - conn->narFromPath(*this, &conn.daemonException, path, [&](Source & source) { - copyNAR(conn->from, sink); - }); + conn->narFromPath(*this, &conn.daemonException, path, [&](Source & source) { copyNAR(conn->from, sink); }); } ref RemoteStore::getFSAccessor(bool requireValidPath) @@ -951,4 +879,4 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function & targ return res; } -} +} // namespace nix diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 9bb47a010..0df7e482a 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -2,33 +2,33 @@ #if NIX_WITH_S3_SUPPORT -#include +# include -#include "nix/store/s3.hh" -#include "nix/store/nar-info.hh" -#include "nix/store/nar-info-disk-cache.hh" -#include "nix/store/globals.hh" -#include "nix/util/compression.hh" -#include "nix/store/filetransfer.hh" -#include "nix/util/signals.hh" -#include "nix/store/store-registration.hh" +# include "nix/store/s3.hh" +# include "nix/store/nar-info.hh" +# include "nix/store/nar-info-disk-cache.hh" +# include "nix/store/globals.hh" +# include "nix/util/compression.hh" +# include "nix/store/filetransfer.hh" +# include "nix/util/signals.hh" +# include "nix/store/store-registration.hh" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include +# include using namespace Aws::Transfer; @@ -40,8 +40,10 @@ struct S3Error : public Error Aws::String exceptionName; template - S3Error(Aws::S3::S3Errors err, Aws::String exceptionName, const Args & ... args) - : Error(args...), err(err), exceptionName(exceptionName) { }; + S3Error(Aws::S3::S3Errors err, Aws::String exceptionName, const Args &... args) + : Error(args...) + , err(err) + , exceptionName(exceptionName){}; }; /* Helper: given an Outcome, return R in case of success, or @@ -53,11 +55,7 @@ R && checkAws(std::string_view s, Aws::Utils::Outcome && outcome) throw S3Error( outcome.GetError().GetErrorType(), outcome.GetError().GetExceptionName(), - fmt( - "%s: %s (request id: %s)", - s, - outcome.GetError().GetMessage(), - outcome.GetError().GetRequestId())); + fmt("%s: %s (request id: %s)", s, outcome.GetError().GetMessage(), outcome.GetError().GetRequestId())); return outcome.GetResultWithOwnership(); } @@ -70,9 +68,9 @@ class AwsLogger : public Aws::Utils::Logging::FormattedLogSystem debug("AWS: %s", chomp(statement)); } -#if !(AWS_SDK_VERSION_MAJOR <= 1 && AWS_SDK_VERSION_MINOR <= 7 && AWS_SDK_VERSION_PATCH <= 115) +# if !(AWS_SDK_VERSION_MAJOR <= 1 && AWS_SDK_VERSION_MINOR <= 7 && AWS_SDK_VERSION_PATCH <= 115) void Flush() override {} -#endif +# endif }; /* Retrieve the credentials from the list of AWS default providers, with the addition of the STS creds provider. This @@ -110,9 +108,7 @@ static void initAWS() if (verbosity >= lvlDebug) { options.loggingOptions.logLevel = - verbosity == lvlDebug - ? Aws::Utils::Logging::LogLevel::Debug - : Aws::Utils::Logging::LogLevel::Trace; + verbosity == lvlDebug ? Aws::Utils::Logging::LogLevel::Debug : Aws::Utils::Logging::LogLevel::Trace; options.loggingOptions.logger_create_fn = [options]() { return std::make_shared(options.loggingOptions.logLevel); }; @@ -123,32 +119,31 @@ static void initAWS() } S3Helper::S3Helper( - const std::string & profile, - const std::string & region, - const std::string & scheme, - const std::string & endpoint) + const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint) : config(makeConfig(region, scheme, endpoint)) - , client(make_ref( - std::make_shared(profile), - *config, -#if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3 - false, -#else - Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, -#endif - endpoint.empty())) + , client( + make_ref( + std::make_shared(profile), + *config, +# if AWS_SDK_VERSION_MAJOR == 1 && AWS_SDK_VERSION_MINOR < 3 + false, +# else + Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, +# endif + endpoint.empty())) { } /* Log AWS retries. */ class RetryStrategy : public Aws::Client::DefaultRetryStrategy { - bool ShouldRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override + bool ShouldRetry(const Aws::Client::AWSError & error, long attemptedRetries) const override { checkInterrupt(); auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); if (retry) - printError("AWS error '%s' (%s; request id: %s), will retry in %d ms", + printError( + "AWS error '%s' (%s; request id: %s), will retry in %d ms", error.GetExceptionName(), error.GetMessage(), error.GetRequestId(), @@ -157,10 +152,8 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy } }; -ref S3Helper::makeConfig( - const std::string & region, - const std::string & scheme, - const std::string & endpoint) +ref +S3Helper::makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint) { initAWS(); auto res = make_ref(); @@ -179,38 +172,30 @@ ref S3Helper::makeConfig( return res; } -S3Helper::FileTransferResult S3Helper::getObject( - const std::string & bucketName, const std::string & key) +S3Helper::FileTransferResult S3Helper::getObject(const std::string & bucketName, const std::string & key) { std::string uri = "s3://" + bucketName + "/" + key; - Activity act(*logger, lvlTalkative, actFileTransfer, - fmt("downloading '%s'", uri), - Logger::Fields{uri}, getCurActivity()); + Activity act( + *logger, lvlTalkative, actFileTransfer, fmt("downloading '%s'", uri), Logger::Fields{uri}, getCurActivity()); - auto request = - Aws::S3::Model::GetObjectRequest() - .WithBucket(bucketName) - .WithKey(key); + auto request = Aws::S3::Model::GetObjectRequest().WithBucket(bucketName).WithKey(key); - request.SetResponseStreamFactory([&]() { - return Aws::New("STRINGSTREAM"); - }); + request.SetResponseStreamFactory([&]() { return Aws::New("STRINGSTREAM"); }); size_t bytesDone = 0; size_t bytesExpected = 0; - request.SetDataReceivedEventHandler([&](const Aws::Http::HttpRequest * req, Aws::Http::HttpResponse * resp, long long l) { - if (!bytesExpected && resp->HasHeader("Content-Length")) { - if (auto length = string2Int(resp->GetHeader("Content-Length"))) { - bytesExpected = *length; + request.SetDataReceivedEventHandler( + [&](const Aws::Http::HttpRequest * req, Aws::Http::HttpResponse * resp, long long l) { + if (!bytesExpected && resp->HasHeader("Content-Length")) { + if (auto length = string2Int(resp->GetHeader("Content-Length"))) { + bytesExpected = *length; + } } - } - bytesDone += l; - act.progress(bytesDone, bytesExpected); - }); + bytesDone += l; + act.progress(bytesDone, bytesExpected); + }); - request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { - return !isInterrupted(); - }); + request.SetContinueRequestHandler([](const Aws::Http::HttpRequest *) { return !isInterrupted(); }); FileTransferResult res; @@ -218,23 +203,20 @@ S3Helper::FileTransferResult S3Helper::getObject( try { - auto result = checkAws(fmt("AWS error fetching '%s'", key), - client->GetObject(request)); + auto result = checkAws(fmt("AWS error fetching '%s'", key), client->GetObject(request)); act.progress(result.GetContentLength(), result.GetContentLength()); - res.data = decompress(result.GetContentEncoding(), - dynamic_cast(result.GetBody()).str()); + res.data = decompress(result.GetContentEncoding(), dynamic_cast(result.GetBody()).str()); } catch (S3Error & e) { - if ((e.err != Aws::S3::S3Errors::NO_SUCH_KEY) && - (e.err != Aws::S3::S3Errors::ACCESS_DENIED) && + if ((e.err != Aws::S3::S3Errors::NO_SUCH_KEY) && (e.err != Aws::S3::S3Errors::ACCESS_DENIED) && // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. // - // AWS unwilling to provide a specific error type for the situation (https://github.com/aws/aws-sdk-cpp/issues/1843) - // so use this hack - (e.exceptionName != "ExpiredToken") - ) throw; + // AWS unwilling to provide a specific error type for the situation + // (https://github.com/aws/aws-sdk-cpp/issues/1843) so use this hack + (e.exceptionName != "ExpiredToken")) + throw; } auto now2 = std::chrono::steady_clock::now(); @@ -244,11 +226,8 @@ S3Helper::FileTransferResult S3Helper::getObject( return res; } - S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( - std::string_view uriScheme, - std::string_view bucketName, - const Params & params) + std::string_view uriScheme, std::string_view bucketName, const Params & params) : StoreConfig(params) , BinaryCacheStoreConfig(params) , bucketName(bucketName) @@ -262,20 +241,19 @@ S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( throw UsageError("`%s` store requires a bucket name in its Store URI", uriScheme); } - S3BinaryCacheStore::S3BinaryCacheStore(ref config) : BinaryCacheStore(*config) , config{config} -{ } +{ +} std::string S3BinaryCacheStoreConfig::doc() { return - #include "s3-binary-cache-store.md" - ; +# include "s3-binary-cache-store.md" + ; } - struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore { Stats stats; @@ -303,8 +281,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore config->priority.setDefault(cacheInfo->priority); } else { BinaryCacheStore::init(); - diskCache->createCache( - getUri(), config->storeDir, config->wantMassQuery, config->priority); + diskCache->createCache(getUri(), config->storeDir, config->wantMassQuery, config->priority); } } @@ -332,17 +309,15 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore stats.head++; auto res = s3Helper.client->HeadObject( - Aws::S3::Model::HeadObjectRequest() - .WithBucket(config->bucketName) - .WithKey(path)); + Aws::S3::Model::HeadObjectRequest().WithBucket(config->bucketName).WithKey(path)); if (!res.IsSuccess()) { auto & error = res.GetError(); if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND || error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY // Expired tokens are not really an error, more of a caching problem. Should be treated same as 403. - // AWS unwilling to provide a specific error type for the situation (https://github.com/aws/aws-sdk-cpp/issues/1843) - // so use this hack + // AWS unwilling to provide a specific error type for the situation + // (https://github.com/aws/aws-sdk-cpp/issues/1843) so use this hack || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN && error.GetExceptionName() == "ExpiredToken") // If bucket listing is disabled, 404s turn into 403s || error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED) @@ -373,29 +348,31 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore cv.wait(lk); } - AsyncContext(const Activity & act) : act(act) {} + AsyncContext(const Activity & act) + : act(act) + { + } }; - void uploadFile(const std::string & path, + void uploadFile( + const std::string & path, std::shared_ptr> istream, const std::string & mimeType, const std::string & contentEncoding) { std::string uri = "s3://" + config->bucketName + "/" + path; - Activity act(*logger, lvlTalkative, actFileTransfer, - fmt("uploading '%s'", uri), - Logger::Fields{uri}, getCurActivity()); + Activity act( + *logger, lvlTalkative, actFileTransfer, fmt("uploading '%s'", uri), Logger::Fields{uri}, getCurActivity()); istream->seekg(0, istream->end); auto size = istream->tellg(); istream->seekg(0, istream->beg); auto maxThreads = std::thread::hardware_concurrency(); - static std::shared_ptr - executor = std::make_shared(maxThreads); + static std::shared_ptr executor = + std::make_shared(maxThreads); - std::call_once(transferManagerCreated, [&]() - { + std::call_once(transferManagerCreated, [&]() { if (config->multipartUpload) { TransferManagerConfiguration transferConfig(executor.get()); @@ -404,8 +381,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore transferConfig.uploadProgressCallback = [](const TransferManager * transferManager, - const std::shared_ptr & transferHandle) - { + const std::shared_ptr & transferHandle) { auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); size_t bytesDone = transferHandle->GetBytesTransferred(); size_t bytesTotal = transferHandle->GetBytesTotalSize(); @@ -418,8 +394,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore }; transferConfig.transferStatusUpdatedCallback = [](const TransferManager * transferManager, - const std::shared_ptr & transferHandle) - { + const std::shared_ptr & transferHandle) { auto context = std::dynamic_pointer_cast(transferHandle->GetContext()); context->notify(); }; @@ -438,11 +413,13 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore throw Error("setting a content encoding is not supported with S3 multi-part uploads"); auto context = std::make_shared(act); - std::shared_ptr transferHandle = - transferManager->UploadFile( - istream, bucketName, path, mimeType, - Aws::Map(), - context /*, contentEncoding */); + std::shared_ptr transferHandle = transferManager->UploadFile( + istream, + bucketName, + path, + mimeType, + Aws::Map(), + context /*, contentEncoding */); TransferStatus status = transferHandle->GetStatus(); while (status == TransferStatus::IN_PROGRESS || status == TransferStatus::NOT_STARTED) { @@ -457,20 +434,19 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore act.progress(transferHandle->GetBytesTransferred(), transferHandle->GetBytesTotalSize()); if (status == TransferStatus::FAILED) - throw Error("AWS error: failed to upload 's3://%s/%s': %s", - bucketName, path, transferHandle->GetLastError().GetMessage()); + throw Error( + "AWS error: failed to upload 's3://%s/%s': %s", + bucketName, + path, + transferHandle->GetLastError().GetMessage()); if (status != TransferStatus::COMPLETED) - throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", - bucketName, path); + throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", bucketName, path); } else { act.progress(0, size); - auto request = - Aws::S3::Model::PutObjectRequest() - .WithBucket(bucketName) - .WithKey(path); + auto request = Aws::S3::Model::PutObjectRequest().WithBucket(bucketName).WithKey(path); size_t bytesSent = 0; request.SetDataSentEventHandler([&](const Aws::Http::HttpRequest * req, long long l) { @@ -478,9 +454,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore act.progress(bytesSent, size); }); - request.SetContinueRequestHandler([](const Aws::Http::HttpRequest*) { - return !isInterrupted(); - }); + request.SetContinueRequestHandler([](const Aws::Http::HttpRequest *) { return !isInterrupted(); }); request.SetContentType(mimeType); @@ -489,32 +463,28 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore request.SetBody(istream); - auto result = checkAws(fmt("AWS error uploading '%s'", path), - s3Helper.client->PutObject(request)); + auto result = checkAws(fmt("AWS error uploading '%s'", path), s3Helper.client->PutObject(request)); act.progress(size, size); } auto now2 = std::chrono::steady_clock::now(); - auto duration = - std::chrono::duration_cast(now2 - now1) - .count(); + auto duration = std::chrono::duration_cast(now2 - now1).count(); - printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms", - bucketName, path, size, duration); + printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms", bucketName, path, size, duration); stats.putTimeMs += duration; stats.putBytes += std::max(size, (decltype(size)) 0); stats.put++; } - void upsertFile(const std::string & path, + void upsertFile( + const std::string & path, std::shared_ptr> istream, const std::string & mimeType) override { - auto compress = [&](std::string compression) - { + auto compress = [&](std::string compression) { auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain()); return std::make_shared(std::move(compressed)); }; @@ -540,8 +510,12 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore stats.getTimeMs += res.durationMs; if (res.data) { - printTalkative("downloaded 's3://%s/%s' (%d bytes) in %d ms", - config->bucketName, path, res.data->size(), res.durationMs); + printTalkative( + "downloaded 's3://%s/%s' (%d bytes) in %d ms", + config->bucketName, + path, + res.data->size(), + res.durationMs); sink(*res.data); } else @@ -558,21 +532,19 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore do { debug("listing bucket 's3://%s' from key '%s'...", bucketName, marker); - auto res = checkAws(fmt("AWS error listing bucket '%s'", bucketName), + auto res = checkAws( + fmt("AWS error listing bucket '%s'", bucketName), s3Helper.client->ListObjects( - Aws::S3::Model::ListObjectsRequest() - .WithBucket(bucketName) - .WithDelimiter("/") - .WithMarker(marker))); + Aws::S3::Model::ListObjectsRequest().WithBucket(bucketName).WithDelimiter("/").WithMarker(marker))); auto & contents = res.GetContents(); - debug("got %d keys, next marker '%s'", - contents.size(), res.GetNextMarker()); + debug("got %d keys, next marker '%s'", contents.size(), res.GetNextMarker()); for (const auto & object : contents) { auto & key = object.GetKey(); - if (key.size() != 40 || !hasSuffix(key, ".narinfo")) continue; + if (key.size() != 40 || !hasSuffix(key, ".narinfo")) + continue; paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-" + MissingName)); } @@ -595,16 +567,15 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore ref S3BinaryCacheStoreImpl::Config::openStore() const { - auto store = make_ref(ref{ - // FIXME we shouldn't actually need a mutable config - std::const_pointer_cast(shared_from_this()) - }); + auto store = + make_ref(ref{// FIXME we shouldn't actually need a mutable config + std::const_pointer_cast(shared_from_this())}); store->init(); return store; } static RegisterStoreImplementation regS3BinaryCacheStore; -} +} // namespace nix #endif diff --git a/src/libstore/serve-protocol-connection.cc b/src/libstore/serve-protocol-connection.cc index 276086f6f..908994f4e 100644 --- a/src/libstore/serve-protocol-connection.cc +++ b/src/libstore/serve-protocol-connection.cc @@ -103,4 +103,4 @@ void ServeProto::BasicClientConnection::importPaths(const StoreDirConfig & store throw Error("remote machine failed to import closure"); } -} +} // namespace nix diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc index 520c37951..7cf5e6997 100644 --- a/src/libstore/serve-protocol.cc +++ b/src/libstore/serve-protocol.cc @@ -20,33 +20,22 @@ BuildResult ServeProto::Serialise::read(const StoreDirConfig & stor conn.from >> status.errorMsg; if (GET_PROTOCOL_MINOR(conn.version) >= 3) - conn.from - >> status.timesBuilt - >> status.isNonDeterministic - >> status.startTime - >> status.stopTime; + conn.from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime; if (GET_PROTOCOL_MINOR(conn.version) >= 6) { auto builtOutputs = ServeProto::Serialise::read(store, conn); for (auto && [output, realisation] : builtOutputs) - status.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); + status.builtOutputs.insert_or_assign(std::move(output.outputName), std::move(realisation)); } return status; } -void ServeProto::Serialise::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const BuildResult & status) +void ServeProto::Serialise::write( + const StoreDirConfig & store, ServeProto::WriteConn conn, const BuildResult & status) { - conn.to - << status.status - << status.errorMsg; + conn.to << status.status << status.errorMsg; if (GET_PROTOCOL_MINOR(conn.version) >= 3) - conn.to - << status.timesBuilt - << status.isNonDeterministic - << status.startTime - << status.stopTime; + conn.to << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime; if (GET_PROTOCOL_MINOR(conn.version) >= 6) { DrvOutputs builtOutputs; for (auto & [output, realisation] : status.builtOutputs) @@ -55,12 +44,11 @@ void ServeProto::Serialise::write(const StoreDirConfig & store, Ser } } - UnkeyedValidPathInfo ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { /* Hash should be set below unless very old `nix-store --serve`. Caller should assert that it did set it. */ - UnkeyedValidPathInfo info { Hash::dummy }; + UnkeyedValidPathInfo info{Hash::dummy}; auto deriver = readString(conn.from); if (deriver != "") @@ -81,25 +69,21 @@ UnkeyedValidPathInfo ServeProto::Serialise::read(const Sto return info; } -void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info) +void ServeProto::Serialise::write( + const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info) { - conn.to - << (info.deriver ? store.printStorePath(*info.deriver) : ""); + conn.to << (info.deriver ? store.printStorePath(*info.deriver) : ""); ServeProto::write(store, conn, info.references); // !!! Maybe we want compression? - conn.to - << info.narSize // downloadSize, lie a little - << info.narSize; + conn.to << info.narSize // downloadSize, lie a little + << info.narSize; if (GET_PROTOCOL_MINOR(conn.version) >= 4) - conn.to - << info.narHash.to_string(HashFormat::Nix32, true) - << renderContentAddress(info.ca) - << info.sigs; + conn.to << info.narHash.to_string(HashFormat::Nix32, true) << renderContentAddress(info.ca) << info.sigs; } - -ServeProto::BuildOptions ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +ServeProto::BuildOptions +ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { BuildOptions options; options.maxSilentTime = readInt(conn.from); @@ -116,22 +100,18 @@ ServeProto::BuildOptions ServeProto::Serialise::read(c return options; } -void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options) +void ServeProto::Serialise::write( + const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options) { - conn.to - << options.maxSilentTime - << options.buildTimeout; + conn.to << options.maxSilentTime << options.buildTimeout; if (GET_PROTOCOL_MINOR(conn.version) >= 2) - conn.to - << options.maxLogSize; + conn.to << options.maxLogSize; if (GET_PROTOCOL_MINOR(conn.version) >= 3) - conn.to - << options.nrRepeats - << options.enforceDeterminism; + conn.to << options.nrRepeats << options.enforceDeterminism; if (GET_PROTOCOL_MINOR(conn.version) >= 7) { conn.to << ((int) options.keepFailed); } } -} +} // namespace nix diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index c3fb1f413..dd9f10422 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -11,11 +11,18 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf) - : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) +SQLiteError::SQLiteError( + const char * path, const char * errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf) + : Error("") + , path(path) + , errMsg(errMsg) + , errNo(errNo) + , extendedErrNo(extendedErrNo) + , offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; - err.msg = HintFmt("%s: %s%s, %s (in '%s')", + err.msg = HintFmt( + "%s: %s%s, %s (in '%s')", Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), @@ -35,9 +42,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf)); exp.err.msg = HintFmt( - err == SQLITE_PROTOCOL - ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" - : "SQLite database '%s' is busy", + err == SQLITE_PROTOCOL ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" : "SQLite database '%s' is busy", path ? path : "(in-memory)"); throw exp; } else @@ -58,10 +63,11 @@ SQLite::SQLite(const Path & path, SQLiteOpenMode mode) // useSQLiteWAL also indicates what virtual file system we need. Using // `unix-dotfile` is needed on NFS file systems and on Windows' Subsystem // for Linux (WSL) where useSQLiteWAL should be false by default. - const char *vfs = settings.useSQLiteWAL ? 0 : "unix-dotfile"; + const char * vfs = settings.useSQLiteWAL ? 0 : "unix-dotfile"; bool immutable = mode == SQLiteOpenMode::Immutable; int flags = immutable ? SQLITE_OPEN_READONLY : SQLITE_OPEN_READWRITE; - if (mode == SQLiteOpenMode::Normal) flags |= SQLITE_OPEN_CREATE; + if (mode == SQLiteOpenMode::Normal) + flags |= SQLITE_OPEN_CREATE; auto uri = "file:" + percentEncode(path) + "?immutable=" + (immutable ? "1" : "0"); int ret = sqlite3_open_v2(uri.c_str(), &db, SQLITE_OPEN_URI | flags, vfs); if (ret != SQLITE_OK) { @@ -143,7 +149,7 @@ SQLiteStmt::Use::~Use() sqlite3_reset(stmt); } -SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool notNull) +SQLiteStmt::Use & SQLiteStmt::Use::operator()(std::string_view value, bool notNull) { if (notNull) { if (sqlite3_bind_text(stmt, curArg++, value.data(), -1, SQLITE_TRANSIENT) != SQLITE_OK) @@ -153,7 +159,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool not return *this; } -SQLiteStmt::Use & SQLiteStmt::Use::operator () (const unsigned char * data, size_t len, bool notNull) +SQLiteStmt::Use & SQLiteStmt::Use::operator()(const unsigned char * data, size_t len, bool notNull) { if (notNull) { if (sqlite3_bind_blob(stmt, curArg++, data, len, SQLITE_TRANSIENT) != SQLITE_OK) @@ -163,7 +169,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (const unsigned char * data, size return *this; } -SQLiteStmt::Use & SQLiteStmt::Use::operator () (int64_t value, bool notNull) +SQLiteStmt::Use & SQLiteStmt::Use::operator()(int64_t value, bool notNull) { if (notNull) { if (sqlite3_bind_int64(stmt, curArg++, value) != SQLITE_OK) @@ -249,16 +255,14 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning) time_t now = time(0); if (now > nextWarning) { nextWarning = now + 10; - logWarning({ - .msg = e.info().msg - }); + logWarning({.msg = e.info().msg}); } /* Sleep for a while since retrying the transaction right away is likely to fail again. */ checkInterrupt(); /* <= 0.1s */ - std::this_thread::sleep_for(std::chrono::milliseconds { rand() % 100 }); + std::this_thread::sleep_for(std::chrono::milliseconds{rand() % 100}); } -} +} // namespace nix diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 6992ae774..875a4fea5 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -11,25 +11,20 @@ namespace nix { -SSHStoreConfig::SSHStoreConfig( - std::string_view scheme, - std::string_view authority, - const Params & params) +SSHStoreConfig::SSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) : Store::Config{params} , RemoteStore::Config{params} , CommonSSHStoreConfig{scheme, authority, params} { } - std::string SSHStoreConfig::doc() { return - #include "ssh-store.md" - ; +#include "ssh-store.md" + ; } - struct SSHStore : virtual RemoteStore { using Config = SSHStoreConfig; @@ -41,8 +36,8 @@ struct SSHStore : virtual RemoteStore , RemoteStore{*config} , config{config} , master(config->createSSHMaster( - // Use SSH master only if using more than 1 connection. - connections->capacity() > 1)) + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1)) { } @@ -53,7 +48,9 @@ struct SSHStore : virtual RemoteStore // FIXME extend daemon protocol, move implementation to RemoteStore std::optional getBuildLogExact(const StorePath & path) override - { unsupported("getBuildLogExact"); } + { + unsupported("getBuildLogExact"); + } protected: @@ -75,8 +72,7 @@ protected: SSHMaster master; - void setOptions(RemoteStore::Connection & conn) override - { + void setOptions(RemoteStore::Connection & conn) override { /* TODO Add a way to explicitly ask for some options to be forwarded. One option: A way to query the daemon for its settings, and then a series of params to SSHStore like @@ -86,7 +82,6 @@ protected: }; }; - MountedSSHStoreConfig::MountedSSHStoreConfig(StringMap params) : StoreConfig(params) , RemoteStoreConfig(params) @@ -108,11 +103,10 @@ MountedSSHStoreConfig::MountedSSHStoreConfig(std::string_view scheme, std::strin std::string MountedSSHStoreConfig::doc() { return - #include "mounted-ssh-store.md" - ; +#include "mounted-ssh-store.md" + ; } - /** * The mounted ssh store assumes that filesystems on the remote host are * shared with the local host. This means that the remote nix store is @@ -183,18 +177,16 @@ struct MountedSSHStore : virtual SSHStore, virtual LocalFSStore } }; - -ref SSHStore::Config::openStore() const { +ref SSHStore::Config::openStore() const +{ return make_ref(ref{shared_from_this()}); } -ref MountedSSHStore::Config::openStore() const { - return make_ref(ref{ - std::dynamic_pointer_cast(shared_from_this()) - }); +ref MountedSSHStore::Config::openStore() const +{ + return make_ref(ref{std::dynamic_pointer_cast(shared_from_this())}); } - ref SSHStore::openConnection() { auto conn = make_ref(); @@ -204,8 +196,7 @@ ref SSHStore::openConnection() command.push_back("--store"); command.push_back(config->remoteStore.get()); } - command.insert(command.end(), - extraRemoteProgramArgs.begin(), extraRemoteProgramArgs.end()); + command.insert(command.end(), extraRemoteProgramArgs.begin(), extraRemoteProgramArgs.end()); conn->sshConn = master.startCommand(std::move(command)); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); @@ -215,4 +206,4 @@ ref SSHStore::openConnection() static RegisterStoreImplementation regSSHStore; static RegisterStoreImplementation regMountedSSHStore; -} +} // namespace nix diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index c8fec5244..e53c4b336 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -21,7 +21,9 @@ SSHMaster::SSHMaster( std::string_view host, std::string_view keyFile, std::string_view sshPublicHostKey, - bool useMaster, bool compress, Descriptor logFD) + bool useMaster, + bool compress, + Descriptor logFD) : host(host) , fakeSSH(host == "localhost") , keyFile(keyFile) @@ -72,11 +74,12 @@ void SSHMaster::addCommonSSHOpts(Strings & args) args.push_back("-oLocalCommand=echo started"); } -bool SSHMaster::isMasterRunning() { +bool SSHMaster::isMasterRunning() +{ Strings args = {"-O", "check", host}; addCommonSSHOpts(args); - auto res = runProgram(RunOptions {.program = "ssh", .args = args, .mergeStderrToStdout = true}); + auto res = runProgram(RunOptions{.program = "ssh", .args = args, .mergeStderrToStdout = true}); return res.first == 0; } @@ -101,8 +104,7 @@ Strings createSSHEnv() return r; } -std::unique_ptr SSHMaster::startCommand( - Strings && command, Strings && extraSshArgs) +std::unique_ptr SSHMaster::startCommand(Strings && command, Strings && extraSshArgs) { #ifdef _WIN32 // TODO re-enable on Windows, once we can start processes. throw UnimplementedError("cannot yet SSH on windows because spawning processes is not yet implemented"); @@ -122,40 +124,41 @@ std::unique_ptr SSHMaster::startCommand( loggerSuspension = std::make_unique(logger->suspend()); } - conn->sshPid = startProcess([&]() { - restoreProcessContext(); + conn->sshPid = startProcess( + [&]() { + restoreProcessContext(); - close(in.writeSide.get()); - close(out.readSide.get()); + close(in.writeSide.get()); + close(out.readSide.get()); - if (dup2(in.readSide.get(), STDIN_FILENO) == -1) - throw SysError("duping over stdin"); - if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("duping over stdout"); - if (logFD != -1 && dup2(logFD, STDERR_FILENO) == -1) - throw SysError("duping over stderr"); + if (dup2(in.readSide.get(), STDIN_FILENO) == -1) + throw SysError("duping over stdin"); + if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("duping over stdout"); + if (logFD != -1 && dup2(logFD, STDERR_FILENO) == -1) + throw SysError("duping over stderr"); - Strings args; + Strings args; - if (!fakeSSH) { - args = { "ssh", host.c_str(), "-x" }; - addCommonSSHOpts(args); - if (socketPath != "") - args.insert(args.end(), {"-S", socketPath}); - if (verbosity >= lvlChatty) - args.push_back("-v"); - args.splice(args.end(), std::move(extraSshArgs)); - args.push_back("--"); - } + if (!fakeSSH) { + args = {"ssh", host.c_str(), "-x"}; + addCommonSSHOpts(args); + if (socketPath != "") + args.insert(args.end(), {"-S", socketPath}); + if (verbosity >= lvlChatty) + args.push_back("-v"); + args.splice(args.end(), std::move(extraSshArgs)); + args.push_back("--"); + } - args.splice(args.end(), std::move(command)); - auto env = createSSHEnv(); - nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); - - // could not exec ssh/bash - throw SysError("unable to execute '%s'", args.front()); - }, options); + args.splice(args.end(), std::move(command)); + auto env = createSSHEnv(); + nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); + // could not exec ssh/bash + throw SysError("unable to execute '%s'", args.front()); + }, + options); in.readSide = INVALID_DESCRIPTOR; out.writeSide = INVALID_DESCRIPTOR; @@ -166,7 +169,8 @@ std::unique_ptr SSHMaster::startCommand( std::string reply; try { reply = readLine(out.readSide.get()); - } catch (EndOfFile & e) { } + } catch (EndOfFile & e) { + } if (reply != "started") { printTalkative("SSH stdout first line: %s", reply); @@ -185,11 +189,13 @@ std::unique_ptr SSHMaster::startCommand( Path SSHMaster::startMaster() { - if (!useMaster) return ""; + if (!useMaster) + return ""; auto state(state_.lock()); - if (state->sshMaster != INVALID_DESCRIPTOR) return state->socketPath; + if (state->sshMaster != INVALID_DESCRIPTOR) + return state->socketPath; state->socketPath = (Path) *state->tmpDir + "/ssh.sock"; @@ -204,30 +210,33 @@ Path SSHMaster::startMaster() if (isMasterRunning()) return state->socketPath; - state->sshMaster = startProcess([&]() { - restoreProcessContext(); + state->sshMaster = startProcess( + [&]() { + restoreProcessContext(); - close(out.readSide.get()); + close(out.readSide.get()); - if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("duping over stdout"); + if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("duping over stdout"); - Strings args = { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath }; - if (verbosity >= lvlChatty) - args.push_back("-v"); - addCommonSSHOpts(args); - auto env = createSSHEnv(); - nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); + Strings args = {"ssh", host.c_str(), "-M", "-N", "-S", state->socketPath}; + if (verbosity >= lvlChatty) + args.push_back("-v"); + addCommonSSHOpts(args); + auto env = createSSHEnv(); + nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); - throw SysError("unable to execute '%s'", args.front()); - }, options); + throw SysError("unable to execute '%s'", args.front()); + }, + options); out.writeSide = INVALID_DESCRIPTOR; std::string reply; try { reply = readLine(out.readSide.get()); - } catch (EndOfFile & e) { } + } catch (EndOfFile & e) { + } if (reply != "started") { printTalkative("SSH master stdout first line: %s", reply); @@ -254,4 +263,4 @@ void SSHMaster::Connection::trySetBufferSize(size_t size) #endif } -} +} // namespace nix diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 9aeab1d1f..b80d82b99 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -29,13 +29,11 @@ using json = nlohmann::json; namespace nix { - bool MixStoreDirMethods::isInStore(PathView path) const { return isInDir(path, storeDir); } - std::pair MixStoreDirMethods::toStorePath(PathView path) const { if (!isInStore(path)) @@ -47,12 +45,12 @@ std::pair MixStoreDirMethods::toStorePath(PathView path) const return {parseStorePath(path.substr(0, slash)), (Path) path.substr(slash)}; } - Path Store::followLinksToStore(std::string_view _path) const { Path path = absPath(std::string(_path)); while (!isInStore(path)) { - if (!std::filesystem::is_symlink(path)) break; + if (!std::filesystem::is_symlink(path)) + break; auto target = readLink(path); path = absPath(target, dirOf(path)); } @@ -61,13 +59,11 @@ Path Store::followLinksToStore(std::string_view _path) const return path; } - StorePath Store::followLinksToStorePath(std::string_view path) const { return toStorePath(followLinksToStore(path)).first; } - /* The exact specification of store paths is in `protocols/store-path.md` in the Nix manual. These few functions implement that specification. @@ -77,49 +73,38 @@ also update the user-visible behavior, please update the specification to match. */ - -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, - std::string_view hash, std::string_view name) const +StorePath MixStoreDirMethods::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ - auto s = std::string(type) + ":" + std::string(hash) - + ":" + storeDir + ":" + std::string(name); + auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20); return StorePath(h, name); } - -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, - const Hash & hash, std::string_view name) const +StorePath MixStoreDirMethods::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const { return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); } - -StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, - const Hash & hash, std::string_view name) const +StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const { - return makeStorePath("output:" + std::string { id }, hash, outputPathName(name, id)); + return makeStorePath("output:" + std::string{id}, hash, outputPathName(name, id)); } - /* Stuff the references (if any) into the type. This is a bit hacky, but we can't put them in, say, (per the grammar above) since that would be ambiguous. */ -static std::string makeType( - const MixStoreDirMethods & store, - std::string && type, - const StoreReferences & references) +static std::string makeType(const MixStoreDirMethods & store, std::string && type, const StoreReferences & references) { for (auto & i : references.others) { type += ":"; type += store.printStorePath(i); } - if (references.self) type += ":self"; + if (references.self) + type += ":self"; return std::move(type); } - StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const { if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1) @@ -129,40 +114,41 @@ StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const F return makeStorePath(makeType(*this, "source", info.references), info.hash, name); } else { if (!info.references.empty()) { - throw Error("fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", + throw Error( + "fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", name); } // make a unique digest based on the parameters for creating this store object - auto payload = "fixed:out:" - + makeFileIngestionPrefix(info.method) - + info.hash.to_string(HashFormat::Base16, true) + ":"; + auto payload = + "fixed:out:" + makeFileIngestionPrefix(info.method) + info.hash.to_string(HashFormat::Base16, true) + ":"; auto digest = hashString(HashAlgorithm::SHA256, payload); return makeStorePath("output:out", digest, name); } } - -StorePath MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const +StorePath +MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const { // New template - return std::visit(overloaded { - [&](const TextInfo & ti) { - assert(ti.hash.algo == HashAlgorithm::SHA256); - return makeStorePath( - makeType(*this, "text", StoreReferences { - .others = ti.references, - .self = false, - }), - ti.hash, - name); - }, - [&](const FixedOutputInfo & foi) { - return makeFixedOutputPath(name, foi); - } - }, ca.raw); + return std::visit( + overloaded{ + [&](const TextInfo & ti) { + assert(ti.hash.algo == HashAlgorithm::SHA256); + return makeStorePath( + makeType( + *this, + "text", + StoreReferences{ + .others = ti.references, + .self = false, + }), + ti.hash, + name); + }, + [&](const FixedOutputInfo & foi) { return makeFixedOutputPath(name, foi); }}, + ca.raw); } - std::pair MixStoreDirMethods::computeStorePath( std::string_view name, const SourcePath & path, @@ -188,7 +174,6 @@ std::pair MixStoreDirMethods::computeStorePath( }; } - StorePath Store::addToStore( std::string_view name, const SourcePath & path, @@ -223,11 +208,7 @@ StorePath Store::addToStore( return storePath.value(); } -void Store::addMultipleToStore( - PathsSource && pathsToCopy, - Activity & act, - RepairFlag repair, - CheckSigsFlag checkSigs) +void Store::addMultipleToStore(PathsSource && pathsToCopy, Activity & act, RepairFlag repair, CheckSigsFlag checkSigs) { std::atomic nrDone{0}; std::atomic nrFailed{0}; @@ -247,15 +228,12 @@ void Store::addMultipleToStore( act.setExpected(actCopyPath, bytesExpected); - auto showProgress = [&, nrTotal = pathsToCopy.size()]() { - act.progress(nrDone, nrTotal, nrRunning, nrFailed); - }; + auto showProgress = [&, nrTotal = pathsToCopy.size()]() { act.progress(nrDone, nrTotal, nrRunning, nrFailed); }; processGraph( storePathsToAdd, [&](const StorePath & path) { - auto & [info, _] = *infosMap.at(path); if (isValidPath(info.path)) { @@ -301,17 +279,15 @@ void Store::addMultipleToStore( }); } -void Store::addMultipleToStore( - Source & source, - RepairFlag repair, - CheckSigsFlag checkSigs) +void Store::addMultipleToStore(Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { auto expected = readNum(source); for (uint64_t i = 0; i < expected; ++i) { // FIXME we should not be using the worker protocol here, let // alone the worker protocol with a hard-coded version! - auto info = WorkerProto::Serialise::read(*this, - WorkerProto::ReadConn { + auto info = WorkerProto::Serialise::read( + *this, + WorkerProto::ReadConn{ .from = source, .version = 16, }); @@ -320,7 +296,6 @@ void Store::addMultipleToStore( } } - /* The aim of this function is to compute in one pass the correct ValidPathInfo for the files that we are trying to add to the store. To accomplish that in one @@ -347,38 +322,37 @@ digraph graphname { ValidPathInfo Store::addToStoreSlow( std::string_view name, const SourcePath & srcPath, - ContentAddressMethod method, HashAlgorithm hashAlgo, + ContentAddressMethod method, + HashAlgorithm hashAlgo, const StorePathSet & references, std::optional expectedCAHash) { - HashSink narHashSink { HashAlgorithm::SHA256 }; - HashSink caHashSink { hashAlgo }; + HashSink narHashSink{HashAlgorithm::SHA256}; + HashSink caHashSink{hashAlgo}; /* Note that fileSink and unusualHashTee must be mutually exclusive, since they both write to caHashSink. Note that that requisite is currently true because the former is only used in the flat case. */ - RegularFileSink fileSink { caHashSink }; - TeeSink unusualHashTee { narHashSink, caHashSink }; + RegularFileSink fileSink{caHashSink}; + TeeSink unusualHashTee{narHashSink, caHashSink}; auto & narSink = method == ContentAddressMethod::Raw::NixArchive && hashAlgo != HashAlgorithm::SHA256 - ? static_cast(unusualHashTee) - : narHashSink; + ? static_cast(unusualHashTee) + : narHashSink; /* Functionally, this means that fileSource will yield the content of srcPath. The fact that we use scratchpadSink as a temporary buffer here is an implementation detail. */ - auto fileSource = sinkToSource([&](Sink & scratchpadSink) { - srcPath.dumpPath(scratchpadSink); - }); + auto fileSource = sinkToSource([&](Sink & scratchpadSink) { srcPath.dumpPath(scratchpadSink); }); /* tapped provides the same data as fileSource, but we also write all the information to narSink. */ - TeeSource tapped { *fileSource, narSink }; + TeeSource tapped{*fileSource, narSink}; NullFileSystemObjectSink blank; auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat - ? (FileSystemObjectSink &) fileSink - : (FileSystemObjectSink &) blank; // for recursive or git we do recursive + ? (FileSystemObjectSink &) fileSink + : (FileSystemObjectSink &) blank; // for recursive or git we do recursive /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ @@ -388,16 +362,14 @@ ValidPathInfo Store::addToStoreSlow( finish. */ auto [narHash, narSize] = narHashSink.finish(); - auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256 - ? narHash - : method == ContentAddressMethod::Raw::Git - ? git::dumpHash(hashAlgo, srcPath).hash - : caHashSink.finish().first; + auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256 ? narHash + : method == ContentAddressMethod::Raw::Git ? git::dumpHash(hashAlgo, srcPath).hash + : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); - ValidPathInfo info { + ValidPathInfo info{ *this, name, ContentAddressWithReferences::fromParts( @@ -412,9 +384,7 @@ ValidPathInfo Store::addToStoreSlow( info.narSize = narSize; if (!isValidPath(info.path)) { - auto source = sinkToSource([&](Sink & scratchpadSink) { - srcPath.dumpPath(scratchpadSink); - }); + auto source = sinkToSource([&](Sink & scratchpadSink) { srcPath.dumpPath(scratchpadSink); }); addToStore(info, *source); } @@ -442,7 +412,6 @@ Store::Store(const Store::Config & config) assertLibStoreInitialized(); } - std::string Store::getUri() { return ""; @@ -450,9 +419,8 @@ std::string Store::getUri() bool Store::PathInfoCacheValue::isKnownNow() { - std::chrono::duration ttl = didExist() - ? std::chrono::seconds(settings.ttlPositiveNarInfoCache) - : std::chrono::seconds(settings.ttlNegativeNarInfoCache); + std::chrono::duration ttl = didExist() ? std::chrono::seconds(settings.ttlPositiveNarInfoCache) + : std::chrono::seconds(settings.ttlNegativeNarInfoCache); return std::chrono::steady_clock::now() < time_point + ttl; } @@ -467,9 +435,8 @@ std::map> Store::queryStaticPartialDerivat return outputs; } -std::map> Store::queryPartialDerivationOutputMap( - const StorePath & path, - Store * evalStore_) +std::map> +Store::queryPartialDerivationOutputMap(const StorePath & path, Store * evalStore_) { auto & evalStore = evalStore_ ? *evalStore_ : *this; @@ -495,7 +462,8 @@ std::map> Store::queryPartialDerivationOut return outputs; } -OutputPathMap Store::queryDerivationOutputMap(const StorePath & path, Store * evalStore) { +OutputPathMap Store::queryDerivationOutputMap(const StorePath & path, Store * evalStore) +{ auto resp = queryPartialDerivationOutputMap(path, evalStore); OutputPathMap result; for (auto & [outName, optOutPath] : resp) { @@ -510,16 +478,16 @@ StorePathSet Store::queryDerivationOutputs(const StorePath & path) { auto outputMap = this->queryDerivationOutputMap(path); StorePathSet outputPaths; - for (auto & i: outputMap) { + for (auto & i : outputMap) { outputPaths.emplace(std::move(i.second)); } return outputPaths; } - void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos) { - if (!settings.useSubstitutes) return; + if (!settings.useSubstitutes) + return; for (auto & sub : getDefaultSubstituters()) { for (auto & path : paths) { if (infos.count(path.first)) @@ -531,13 +499,17 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta // Recompute store path so that we can use a different store root. if (path.second) { subPath = makeFixedOutputPathFromCA( - path.first.name(), - ContentAddressWithReferences::withoutRefs(*path.second)); + path.first.name(), ContentAddressWithReferences::withoutRefs(*path.second)); if (sub->storeDir == storeDir) assert(subPath == path.first); if (subPath != path.first) - debug("replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), sub->getUri()); - } else if (sub->storeDir != storeDir) continue; + debug( + "replaced path '%s' with '%s' for substituter '%s'", + printStorePath(path.first), + sub->printStorePath(subPath), + sub->getUri()); + } else if (sub->storeDir != storeDir) + continue; debug("checking substituter '%s' for path '%s'", sub->getUri(), sub->printStorePath(subPath)); try { @@ -546,14 +518,15 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta if (sub->storeDir != storeDir && !(info->isContentAddressed(*sub) && info->references.empty())) continue; - auto narInfo = std::dynamic_pointer_cast( - std::shared_ptr(info)); - infos.insert_or_assign(path.first, SubstitutablePathInfo{ - .deriver = info->deriver, - .references = info->references, - .downloadSize = narInfo ? narInfo->fileSize : 0, - .narSize = info->narSize, - }); + auto narInfo = std::dynamic_pointer_cast(std::shared_ptr(info)); + infos.insert_or_assign( + path.first, + SubstitutablePathInfo{ + .deriver = info->deriver, + .references = info->references, + .downloadSize = narInfo ? narInfo->fileSize : 0, + .narSize = info->narSize, + }); } catch (InvalidPath &) { } catch (SubstituterDisabled &) { } catch (Error & e) { @@ -566,7 +539,6 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta } } - bool Store::isValidPath(const StorePath & storePath) { { @@ -583,8 +555,10 @@ bool Store::isValidPath(const StorePath & storePath) if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; auto state_(state.lock()); - state_->pathInfoCache.upsert(storePath.to_string(), - res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue { .value = res.second }); + state_->pathInfoCache.upsert( + storePath.to_string(), + res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} + : PathInfoCacheValue{.value = res.second}); return res.first == NarInfoDiskCache::oValid; } } @@ -598,7 +572,6 @@ bool Store::isValidPath(const StorePath & storePath) return valid; } - /* Default implementation for stores that only implement queryPathInfoUncached(). */ bool Store::isValidPathUncached(const StorePath & path) @@ -611,32 +584,27 @@ bool Store::isValidPathUncached(const StorePath & path) } } - ref Store::queryPathInfo(const StorePath & storePath) { std::promise> promise; - queryPathInfo(storePath, - {[&](std::future> result) { - try { - promise.set_value(result.get()); - } catch (...) { - promise.set_exception(std::current_exception()); - } - }}); + queryPathInfo(storePath, {[&](std::future> result) { + try { + promise.set_value(result.get()); + } catch (...) { + promise.set_exception(std::current_exception()); + } + }}); return promise.get_future().get(); } - static bool goodStorePath(const StorePath & expected, const StorePath & actual) { - return - expected.hashPart() == actual.hashPart() - && (expected.name() == Store::MissingName || expected.name() == actual.name()); + return expected.hashPart() == actual.hashPart() + && (expected.name() == Store::MissingName || expected.name() == actual.name()); } - std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) { auto hashPart = std::string(storePath.hashPart()); @@ -658,10 +626,11 @@ std::optional> Store::queryPathInfoFromClie stats.narInfoReadAverted++; { auto state_(state.lock()); - state_->pathInfoCache.upsert(storePath.to_string(), - res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); - if (res.first == NarInfoDiskCache::oInvalid || - !goodStorePath(storePath, res.second->path)) + state_->pathInfoCache.upsert( + storePath.to_string(), + res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} + : PathInfoCacheValue{.value = res.second}); + if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) return std::make_optional(nullptr); } assert(res.second); @@ -672,9 +641,7 @@ std::optional> Store::queryPathInfoFromClie return std::nullopt; } - -void Store::queryPathInfo(const StorePath & storePath, - Callback> callback) noexcept +void Store::queryPathInfo(const StorePath & storePath, Callback> callback) noexcept { auto hashPart = std::string(storePath.hashPart()); @@ -687,13 +654,14 @@ void Store::queryPathInfo(const StorePath & storePath, else throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } - } catch (...) { return callback.rethrow(); } + } catch (...) { + return callback.rethrow(); + } auto callbackPtr = std::make_shared(std::move(callback)); - queryPathInfoUncached(storePath, - {[this, storePath, hashPart, callbackPtr](std::future> fut) { - + queryPathInfoUncached( + storePath, {[this, storePath, hashPart, callbackPtr](std::future> fut) { try { auto info = fut.get(); @@ -702,7 +670,7 @@ void Store::queryPathInfo(const StorePath & storePath, { auto state_(state.lock()); - state_->pathInfoCache.upsert(storePath.to_string(), PathInfoCacheValue { .value = info }); + state_->pathInfoCache.upsert(storePath.to_string(), PathInfoCacheValue{.value = info}); } if (!info || !goodStorePath(storePath, info->path)) { @@ -711,27 +679,25 @@ void Store::queryPathInfo(const StorePath & storePath, } (*callbackPtr)(ref(info)); - } catch (...) { callbackPtr->rethrow(); } + } catch (...) { + callbackPtr->rethrow(); + } }}); } -void Store::queryRealisation(const DrvOutput & id, - Callback> callback) noexcept +void Store::queryRealisation(const DrvOutput & id, Callback> callback) noexcept { try { if (diskCache) { - auto [cacheOutcome, maybeCachedRealisation] - = diskCache->lookupRealisation(getUri(), id); + auto [cacheOutcome, maybeCachedRealisation] = diskCache->lookupRealisation(getUri(), id); switch (cacheOutcome) { case NarInfoDiskCache::oValid: debug("Returning a cached realisation for %s", id.to_string()); callback(maybeCachedRealisation); return; case NarInfoDiskCache::oInvalid: - debug( - "Returning a cached missing realisation for %s", - id.to_string()); + debug("Returning a cached missing realisation for %s", id.to_string()); callback(nullptr); return; case NarInfoDiskCache::oUnknown: @@ -742,29 +708,25 @@ void Store::queryRealisation(const DrvOutput & id, return callback.rethrow(); } - auto callbackPtr - = std::make_shared(std::move(callback)); + auto callbackPtr = std::make_shared(std::move(callback)); - queryRealisationUncached( - id, - { [this, id, callbackPtr]( - std::future> fut) { - try { - auto info = fut.get(); + queryRealisationUncached(id, {[this, id, callbackPtr](std::future> fut) { + try { + auto info = fut.get(); - if (diskCache) { - if (info) - diskCache->upsertRealisation(getUri(), *info); - else - diskCache->upsertAbsentRealisation(getUri(), id); - } + if (diskCache) { + if (info) + diskCache->upsertRealisation(getUri(), *info); + else + diskCache->upsertAbsentRealisation(getUri(), id); + } - (*callbackPtr)(std::shared_ptr(info)); + (*callbackPtr)(std::shared_ptr(info)); - } catch (...) { - callbackPtr->rethrow(); - } - } }); + } catch (...) { + callbackPtr->rethrow(); + } + }}); } std::shared_ptr Store::queryRealisation(const DrvOutput & id) @@ -772,14 +734,13 @@ std::shared_ptr Store::queryRealisation(const DrvOutput & id) using RealPtr = std::shared_ptr; std::promise promise; - queryRealisation(id, - {[&](std::future result) { - try { - promise.set_value(result.get()); - } catch (...) { - promise.set_exception(std::current_exception()); - } - }}); + queryRealisation(id, {[&](std::future result) { + try { + promise.set_value(result.get()); + } catch (...) { + promise.set_exception(std::current_exception()); + } + }}); return promise.get_future().get(); } @@ -795,14 +756,14 @@ void Store::substitutePaths(const StorePathSet & paths) if (!missing.willSubstitute.empty()) try { std::vector subs; - for (auto & p : missing.willSubstitute) subs.emplace_back(DerivedPath::Opaque{p}); + for (auto & p : missing.willSubstitute) + subs.emplace_back(DerivedPath::Opaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); } } - StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { struct State @@ -820,29 +781,29 @@ StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag m auto doQuery = [&](const StorePath & path) { checkInterrupt(); queryPathInfo(path, {[path, &state_, &wakeup](std::future> fut) { - bool exists = false; - std::exception_ptr newExc{}; + bool exists = false; + std::exception_ptr newExc{}; - try { - auto info = fut.get(); - exists = true; - } catch (InvalidPath &) { - } catch (...) { - newExc = std::current_exception(); - } + try { + auto info = fut.get(); + exists = true; + } catch (InvalidPath &) { + } catch (...) { + newExc = std::current_exception(); + } - auto state(state_.lock()); + auto state(state_.lock()); - if (exists) - state->valid.insert(path); + if (exists) + state->valid.insert(path); - if (newExc) - state->exc = newExc; + if (newExc) + state->exc = newExc; - assert(state->left); - if (!--state->left) - wakeup.notify_one(); - }}); + assert(state->left); + if (!--state->left) + wakeup.notify_one(); + }}); }; for (auto & path : paths) @@ -853,19 +814,18 @@ StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag m while (true) { auto state(state_.lock()); if (!state->left) { - if (state->exc) std::rethrow_exception(state->exc); + if (state->exc) + std::rethrow_exception(state->exc); return std::move(state->valid); } state.wait(wakeup); } } - /* Return a string accepted by decodeValidPathInfo() that registers the specified paths as valid. Note: it's the responsibility of the caller to provide a closure. */ -std::string Store::makeValidityRegistration(const StorePathSet & paths, - bool showDerivers, bool showHash) +std::string Store::makeValidityRegistration(const StorePathSet & paths, bool showDerivers, bool showHash) { std::string s = ""; @@ -891,14 +851,15 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths, return s; } - StorePathSet Store::exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths) { StorePathSet paths; for (auto & storePath : storePaths) { if (!inputPaths.count(storePath)) - throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath)); + throw BuildError( + "cannot export references of path '%s' because it is not in the input closure of the derivation", + printStorePath(storePath)); computeFSClosure({storePath}, paths); } @@ -927,7 +888,6 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor return paths; } - const Store::Stats & Store::getStats() { { @@ -937,26 +897,16 @@ const Store::Stats & Store::getStats() return stats; } - -static std::string makeCopyPathMessage( - std::string_view srcUri, - std::string_view dstUri, - std::string_view storePath) +static std::string makeCopyPathMessage(std::string_view srcUri, std::string_view dstUri, std::string_view storePath) { - return srcUri == "local" || srcUri == "daemon" - ? fmt("copying path '%s' to '%s'", storePath, dstUri) - : dstUri == "local" || dstUri == "daemon" - ? fmt("copying path '%s' from '%s'", storePath, srcUri) - : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri); + return srcUri == "local" || srcUri == "daemon" ? fmt("copying path '%s' to '%s'", storePath, dstUri) + : dstUri == "local" || dstUri == "daemon" + ? fmt("copying path '%s' from '%s'", storePath, srcUri) + : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri); } - void copyStorePath( - Store & srcStore, - Store & dstStore, - const StorePath & storePath, - RepairFlag repair, - CheckSigsFlag checkSigs) + Store & srcStore, Store & dstStore, const StorePath & storePath, RepairFlag repair, CheckSigsFlag checkSigs) { /* Bail out early (before starting a download from srcStore) if dstStore already has this path. */ @@ -966,9 +916,8 @@ void copyStorePath( auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); auto storePathS = srcStore.printStorePath(storePath); - Activity act(*logger, lvlInfo, actCopyPath, - makeCopyPathMessage(srcUri, dstUri, storePathS), - {storePathS, srcUri, dstUri}); + Activity act( + *logger, lvlInfo, actCopyPath, makeCopyPathMessage(srcUri, dstUri, storePathS), {storePathS, srcUri, dstUri}); PushActivity pact(act.id); auto info = srcStore.queryPathInfo(storePath); @@ -978,9 +927,8 @@ void copyStorePath( // recompute store path on the chance dstStore does it differently if (info->ca && info->references.empty()) { auto info2 = make_ref(*info); - info2->path = dstStore.makeFixedOutputPathFromCA( - info->path.name(), - info->contentAddressWithReferences().value()); + info2->path = + dstStore.makeFixedOutputPathFromCA(info->path.name(), info->contentAddressWithReferences().value()); if (dstStore.storeDir == srcStore.storeDir) assert(info->path == info2->path); info = info2; @@ -992,21 +940,23 @@ void copyStorePath( info = info2; } - auto source = sinkToSource([&](Sink & sink) { - LambdaSink progressSink([&](std::string_view data) { - total += data.size(); - act.progress(total, info->narSize); + auto source = sinkToSource( + [&](Sink & sink) { + LambdaSink progressSink([&](std::string_view data) { + total += data.size(); + act.progress(total, info->narSize); + }); + TeeSink tee{sink, progressSink}; + srcStore.narFromPath(storePath, tee); + }, + [&]() { + throw EndOfFile( + "NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), srcStore.getUri()); }); - TeeSink tee { sink, progressSink }; - srcStore.narFromPath(storePath, tee); - }, [&]() { - throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), srcStore.getUri()); - }); dstStore.addToStore(*info, *source, repair, checkSigs); } - std::map copyPaths( Store & srcStore, Store & dstStore, @@ -1038,14 +988,13 @@ std::map copyPaths( throw Error( "incomplete realisation closure: '%s' is a " "dependency of '%s' but isn't registered", - drvOutput.to_string(), current.id.to_string()); + drvOutput.to_string(), + current.id.to_string()); children.insert(*currentChild); } return children; }, - [&](const Realisation& current) -> void { - dstStore.registerDrvOutput(current, checkSigs); - }); + [&](const Realisation & current) -> void { dstStore.registerDrvOutput(current, checkSigs); }); } catch (MissingExperimentalFeature & e) { // Don't fail if the remote doesn't support CA derivations is it might // not be within our control to change that, and we might still want @@ -1071,7 +1020,8 @@ std::map copyPaths( StorePathSet missing; for (auto & path : storePaths) - if (!valid.count(path)) missing.insert(path); + if (!valid.count(path)) + missing.insert(path); Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size())); @@ -1091,15 +1041,15 @@ std::map copyPaths( auto storePathForDst = storePathForSrc; if (currentPathInfo.ca && currentPathInfo.references.empty()) { storePathForDst = dstStore.makeFixedOutputPathFromCA( - currentPathInfo.path.name(), - currentPathInfo.contentAddressWithReferences().value()); + currentPathInfo.path.name(), currentPathInfo.contentAddressWithReferences().value()); if (dstStore.storeDir == srcStore.storeDir) assert(storePathForDst == storePathForSrc); if (storePathForDst != storePathForSrc) - debug("replaced path '%s' to '%s' for substituter '%s'", - srcStore.printStorePath(storePathForSrc), - dstStore.printStorePath(storePathForDst), - dstStore.getUri()); + debug( + "replaced path '%s' to '%s' for substituter '%s'", + srcStore.printStorePath(storePathForSrc), + dstStore.printStorePath(storePathForDst), + dstStore.getUri()); } return storePathForDst; }; @@ -1120,7 +1070,10 @@ std::map copyPaths( auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); auto storePathS = srcStore.printStorePath(missingPath); - Activity act(*logger, lvlInfo, actCopyPath, + Activity act( + *logger, + lvlInfo, + actCopyPath, makeCopyPathMessage(srcUri, dstUri, storePathS), {storePathS, srcUri, dstUri}); PushActivity pact(act.id); @@ -1129,7 +1082,7 @@ std::map copyPaths( total += data.size(); act.progress(total, narSize); }); - TeeSink tee { sink, progressSink }; + TeeSink tee{sink, progressSink}; srcStore.narFromPath(missingPath, tee); }); @@ -1149,7 +1102,8 @@ void copyClosure( CheckSigsFlag checkSigs, SubstituteFlag substitute) { - if (&srcStore == &dstStore) return; + if (&srcStore == &dstStore) + return; RealisedPath::Set closure; RealisedPath::closure(srcStore, paths, closure); @@ -1165,62 +1119,68 @@ void copyClosure( CheckSigsFlag checkSigs, SubstituteFlag substitute) { - if (&srcStore == &dstStore) return; + if (&srcStore == &dstStore) + return; StorePathSet closure; srcStore.computeFSClosure(storePaths, closure); copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute); } -std::optional decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven) +std::optional +decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven) { std::string path; getline(str, path); - if (str.eof()) { return {}; } + if (str.eof()) { + return {}; + } if (!hashGiven) { std::string s; getline(str, s); auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256); getline(str, s); auto narSize = string2Int(s); - if (!narSize) throw Error("number expected"); - hashGiven = { narHash, *narSize }; + if (!narSize) + throw Error("number expected"); + hashGiven = {narHash, *narSize}; } ValidPathInfo info(store.parseStorePath(path), hashGiven->first); info.narSize = hashGiven->second; std::string deriver; getline(str, deriver); - if (deriver != "") info.deriver = store.parseStorePath(deriver); + if (deriver != "") + info.deriver = store.parseStorePath(deriver); std::string s; getline(str, s); auto n = string2Int(s); - if (!n) throw Error("number expected"); + if (!n) + throw Error("number expected"); while ((*n)--) { getline(str, s); info.references.insert(store.parseStorePath(s)); } - if (!str || str.eof()) throw Error("missing input"); + if (!str || str.eof()) + throw Error("missing input"); return std::optional(std::move(info)); } - std::string MixStoreDirMethods::showPaths(const StorePathSet & paths) const { std::string s; for (auto & i : paths) { - if (s.size() != 0) s += ", "; + if (s.size() != 0) + s += ", "; s += "'" + printStorePath(i) + "'"; } return s; } - std::string showPaths(const PathSet & paths) { return concatStringsSep(", ", quoteStrings(paths)); } - Derivation Store::derivationFromPath(const StorePath & drvPath) { ensurePath(drvPath); @@ -1231,9 +1191,8 @@ static Derivation readDerivationCommon(Store & store, const StorePath & drvPath, { auto accessor = store.getFSAccessor(requireValidPath); try { - return parseDerivation(store, - accessor->readFile(CanonPath(drvPath.to_string())), - Derivation::nameFromPath(drvPath)); + return parseDerivation( + store, accessor->readFile(CanonPath(drvPath.to_string())), Derivation::nameFromPath(drvPath)); } catch (FormatError & e) { throw Error("error parsing derivation '%s': %s", store.printStorePath(drvPath), e.msg()); } @@ -1245,7 +1204,8 @@ std::optional Store::getBuildDerivationPath(const StorePath & path) if (!path.isDerivation()) { try { auto info = queryPathInfo(path); - if (!info->deriver) return std::nullopt; + if (!info->deriver) + return std::nullopt; return *info->deriver; } catch (InvalidPath &) { return std::nullopt; @@ -1268,11 +1228,14 @@ std::optional Store::getBuildDerivationPath(const StorePath & path) } Derivation Store::readDerivation(const StorePath & drvPath) -{ return readDerivationCommon(*this, drvPath, true); } +{ + return readDerivationCommon(*this, drvPath, true); +} Derivation Store::readInvalidDerivation(const StorePath & drvPath) -{ return readDerivationCommon(*this, drvPath, false); } - +{ + return readDerivationCommon(*this, drvPath, false); +} void Store::signPathInfo(ValidPathInfo & info) { @@ -1287,7 +1250,6 @@ void Store::signPathInfo(ValidPathInfo & info) } } - void Store::signRealisation(Realisation & realisation) { // FIXME: keep secret keys in memory. @@ -1301,4 +1263,4 @@ void Store::signRealisation(Realisation & realisation) } } -} +} // namespace nix diff --git a/src/libstore/store-dir-config.cc b/src/libstore/store-dir-config.cc index ec65013ef..069c484ba 100644 --- a/src/libstore/store-dir-config.cc +++ b/src/libstore/store-dir-config.cc @@ -10,4 +10,4 @@ StoreDirConfig::StoreDirConfig(const Params & params) { } -} +} // namespace nix diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index cb4e2cfb8..99edefeba 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -113,4 +113,4 @@ std::pair splitUriAndParams(const std::stri return {uri, params}; } -} +} // namespace nix diff --git a/src/libstore/store-registration.cc b/src/libstore/store-registration.cc index 6362ac036..fd8d67437 100644 --- a/src/libstore/store-registration.cc +++ b/src/libstore/store-registration.cc @@ -102,4 +102,4 @@ Implementations::Map & Implementations::registered() return registered; } -} +} // namespace nix diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index c979b5e47..f8b3d834d 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -9,19 +9,17 @@ #include #ifdef _WIN32 -# include -# include +# include +# include #else -# include -# include +# include +# include #endif namespace nix { UDSRemoteStoreConfig::UDSRemoteStoreConfig( - std::string_view scheme, - std::string_view authority, - const StoreReference::Params & params) + std::string_view scheme, std::string_view authority, const StoreReference::Params & params) : Store::Config{params} , LocalFSStore::Config{params} , RemoteStore::Config{params} @@ -32,15 +30,13 @@ UDSRemoteStoreConfig::UDSRemoteStoreConfig( } } - std::string UDSRemoteStoreConfig::doc() { return - #include "uds-remote-store.md" +#include "uds-remote-store.md" ; } - // A bit gross that we now pass empty string but this is knowing that // empty string will later default to the same nixDaemonSocketFile. Why // don't we just wire it all through? I believe there are cases where it @@ -50,7 +46,6 @@ UDSRemoteStoreConfig::UDSRemoteStoreConfig(const Params & params) { } - UDSRemoteStore::UDSRemoteStore(ref config) : Store{*config} , LocalFSStore{*config} @@ -59,25 +54,22 @@ UDSRemoteStore::UDSRemoteStore(ref config) { } - std::string UDSRemoteStore::getUri() { return config->path == settings.nixDaemonSocketFile - ? // FIXME: Not clear why we return daemon here and not default - // to settings.nixDaemonSocketFile - // - // unix:// with no path also works. Change what we return? - "daemon" - : std::string(*Config::uriSchemes().begin()) + "://" + config->path; + ? // FIXME: Not clear why we return daemon here and not default + // to settings.nixDaemonSocketFile + // + // unix:// with no path also works. Change what we return? + "daemon" + : std::string(*Config::uriSchemes().begin()) + "://" + config->path; } - void UDSRemoteStore::Connection::closeWrite() { shutdown(toSocket(fd.get()), SHUT_WR); } - ref UDSRemoteStore::openConnection() { auto conn = make_ref(); @@ -93,7 +85,6 @@ ref UDSRemoteStore::openConnection() return conn; } - void UDSRemoteStore::addIndirectRoot(const Path & path) { auto conn(getConnection()); @@ -102,12 +93,11 @@ void UDSRemoteStore::addIndirectRoot(const Path & path) readInt(conn->from); } - -ref UDSRemoteStore::Config::openStore() const { +ref UDSRemoteStore::Config::openStore() const +{ return make_ref(ref{shared_from_this()}); } - static RegisterStoreImplementation regUDSRemoteStore; -} +} // namespace nix diff --git a/src/libstore/unix/build/child.cc b/src/libstore/unix/build/child.cc index a21fddf51..3a704e6ed 100644 --- a/src/libstore/unix/build/child.cc +++ b/src/libstore/unix/build/child.cc @@ -34,4 +34,4 @@ void commonChildInit() close(fdDevNull); } -} +} // namespace nix diff --git a/src/libstore/unix/build/darwin-derivation-builder.cc b/src/libstore/unix/build/darwin-derivation-builder.cc index 3985498c1..d25325126 100644 --- a/src/libstore/unix/build/darwin-derivation-builder.cc +++ b/src/libstore/unix/build/darwin-derivation-builder.cc @@ -206,6 +206,6 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl } }; -} +} // namespace nix #endif diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index cf6c0a5b1..d598e51d9 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -29,7 +29,7 @@ #include "store-config-private.hh" #if HAVE_STATVFS -# include +# include #endif #include @@ -68,14 +68,13 @@ protected: public: DerivationBuilderImpl( - Store & store, - std::unique_ptr miscMethods, - DerivationBuilderParams params) + Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) : DerivationBuilderParams{std::move(params)} , store{store} , miscMethods{std::move(miscMethods)} , derivationType{drv.type()} - { } + { + } protected: @@ -110,13 +109,18 @@ protected: /** * Stuff we need to pass to initChild(). */ - struct ChrootPath { + struct ChrootPath + { Path source; bool optional; + ChrootPath(Path source = "", bool optional = false) - : source(source), optional(optional) - { } + : source(source) + , optional(optional) + { + } }; + typedef std::map PathsInChroot; // maps target path to source path typedef StringMap Environment; @@ -171,6 +175,7 @@ protected: { return inputPaths.count(path) || addedPaths.count(path); } + bool isAllowed(const DrvOutput & id) override { return addedDrvOutputs.count(id); @@ -319,9 +324,7 @@ protected: /** * Create a file in `tmpDir` owned by the builder. */ - void writeBuilderFile( - const std::string & name, - std::string_view contents); + void writeBuilderFile(const std::string & name, std::string_view contents); /** * Run the builder's process. @@ -332,9 +335,7 @@ protected: * Move the current process into the chroot, if any. Called early * by runChild(). */ - virtual void enterChroot() - { - } + virtual void enterChroot() {} /** * Change the current process's uid/gid to the build user, if @@ -395,27 +396,22 @@ private: }; void handleDiffHook( - uid_t uid, uid_t gid, - const Path & tryA, const Path & tryB, - const Path & drvPath, const Path & tmpDir) + uid_t uid, uid_t gid, const Path & tryA, const Path & tryB, const Path & drvPath, const Path & tmpDir) { auto & diffHookOpt = settings.diffHook.get(); if (diffHookOpt && settings.runDiffHook) { auto & diffHook = *diffHookOpt; try { - auto diffRes = runProgram(RunOptions { - .program = diffHook, - .lookupPath = true, - .args = {tryA, tryB, drvPath, tmpDir}, - .uid = uid, - .gid = gid, - .chdir = "/" - }); + auto diffRes = runProgram( + RunOptions{ + .program = diffHook, + .lookupPath = true, + .args = {tryA, tryB, drvPath, tmpDir}, + .uid = uid, + .gid = gid, + .chdir = "/"}); if (!statusOk(diffRes.first)) - throw ExecError(diffRes.first, - "diff-hook program '%1%' %2%", - diffHook, - statusToString(diffRes.first)); + throw ExecError(diffRes.first, "diff-hook program '%1%' %2%", diffHook, statusToString(diffRes.first)); if (diffRes.second != "") printError(chomp(diffRes.second)); @@ -430,7 +426,6 @@ void handleDiffHook( const Path DerivationBuilderImpl::homeDir = "/homeless-shelter"; - static LocalStore & getLocalStore(Store & store) { auto p = dynamic_cast(&store); @@ -438,7 +433,6 @@ static LocalStore & getLocalStore(Store & store) return *p; } - void DerivationBuilderImpl::killSandbox(bool getStats) { if (buildUser) { @@ -448,7 +442,6 @@ void DerivationBuilderImpl::killSandbox(bool getStats) } } - bool DerivationBuilderImpl::prepareBuild() { if (useBuildUsers()) { @@ -462,11 +455,10 @@ bool DerivationBuilderImpl::prepareBuild() return true; } - std::variant, SingleDrvOutputs> DerivationBuilderImpl::unprepareBuild() { // FIXME: get rid of this, rely on RAII. - Finally releaseBuildUser([&](){ + Finally releaseBuildUser([&]() { /* Release the build user at the end of this function. We don't do it right away because we don't want another build grabbing this uid and then messing around with our output. */ @@ -504,7 +496,8 @@ std::variant, SingleDrvOutputs> Derivation stopDaemon(); if (buildResult.cpuUser && buildResult.cpuSystem) { - debug("builder for '%s' terminated with status %d, user CPU %.3fs, system CPU %.3fs", + debug( + "builder for '%s' terminated with status %d, user CPU %.3fs, system CPU %.3fs", store.printStorePath(drvPath), status, ((double) buildResult.cpuUser->count()) / 1000000, @@ -522,11 +515,11 @@ std::variant, SingleDrvOutputs> Derivation cleanupBuild(); - auto msg = fmt( - "Cannot build '%s'.\n" - "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", - Magenta(store.printStorePath(drvPath)), - statusToString(status)); + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", + Magenta(store.printStorePath(drvPath)), + statusToString(status)); msg += showKnownOutputs(store, drv); @@ -545,12 +538,7 @@ std::variant, SingleDrvOutputs> Derivation StorePathSet outputPaths; for (auto & [_, output] : builtOutputs) outputPaths.insert(output.outPath); - runPostBuildHook( - store, - *logger, - drvPath, - outputPaths - ); + runPostBuildHook(store, *logger, drvPath, outputPaths); /* Delete unused redirected outputs (when doing hash rewriting). */ for (auto & i : redirectedOutputs) @@ -561,11 +549,10 @@ std::variant, SingleDrvOutputs> Derivation return std::move(builtOutputs); } catch (BuildError & e) { - BuildResult::Status st = - dynamic_cast(&e) ? BuildResult::NotDeterministic : - statusOk(status) ? BuildResult::OutputRejected : - !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure : - BuildResult::PermanentFailure; + BuildResult::Status st = dynamic_cast(&e) ? BuildResult::NotDeterministic + : statusOk(status) ? BuildResult::OutputRejected + : !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure + : BuildResult::PermanentFailure; return std::pair{std::move(st), std::move(e)}; } @@ -582,7 +569,6 @@ static void chmod_(const Path & path, mode_t mode) throw SysError("setting permissions on '%s'", path); } - /* Move/rename path 'src' to 'dst'. Temporarily make 'src' writable if it's a directory and we're not root (to be able to update the directory's parent link ".."). */ @@ -601,7 +587,6 @@ static void movePath(const Path & src, const Path & dst) chmod_(dst, st.st_mode); } - static void replaceValidPath(const Path & storePath, const Path & tmpPath) { /* We can't atomically replace storePath (the original) with @@ -651,11 +636,10 @@ bool DerivationBuilderImpl::decideWhetherDiskFull() auto & localStore = getLocalStore(store); uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable struct statvfs st; - if (statvfs(localStore.config->realStoreDir.get().c_str(), &st) == 0 && - (uint64_t) st.f_bavail * st.f_bsize < required) + if (statvfs(localStore.config->realStoreDir.get().c_str(), &st) == 0 + && (uint64_t) st.f_bavail * st.f_bsize < required) diskFull = true; - if (statvfs(tmpDir.c_str(), &st) == 0 && - (uint64_t) st.f_bavail * st.f_bsize < required) + if (statvfs(tmpDir.c_str(), &st) == 0 && (uint64_t) st.f_bavail * st.f_bsize < required) diskFull = true; } #endif @@ -704,7 +688,8 @@ static bool checkNotWorldWritable(std::filesystem::path path) auto st = lstat(path); if (st.st_mode & S_IWOTH) return false; - if (path == path.parent_path()) break; + if (path == path.parent_path()) + break; path = path.parent_path(); } return true; @@ -719,20 +704,24 @@ void DerivationBuilderImpl::startBuilder() /* Right platform? */ if (!drvOptions.canBuildLocally(store, drv)) { - auto msg = fmt( - "Cannot build '%s'.\n" - "Reason: " ANSI_RED "required system or feature not available" ANSI_NORMAL "\n" - "Required system: '%s' with features {%s}\n" - "Current system: '%s' with features {%s}", - Magenta(store.printStorePath(drvPath)), - Magenta(drv.platform), - concatStringsSep(", ", drvOptions.getRequiredSystemFeatures(drv)), - Magenta(settings.thisSystem), - concatStringsSep(", ", store.config.systemFeatures)); + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "required system or feature not available" ANSI_NORMAL + "\n" + "Required system: '%s' with features {%s}\n" + "Current system: '%s' with features {%s}", + Magenta(store.printStorePath(drvPath)), + Magenta(drv.platform), + concatStringsSep(", ", drvOptions.getRequiredSystemFeatures(drv)), + Magenta(settings.thisSystem), + concatStringsSep(", ", store.config.systemFeatures)); - // since aarch64-darwin has Rosetta 2, this user can actually run x86_64-darwin on their hardware - we should tell them to run the command to install Darwin 2 + // since aarch64-darwin has Rosetta 2, this user can actually run x86_64-darwin on their hardware - we should + // tell them to run the command to install Darwin 2 if (drv.platform == "x86_64-darwin" && settings.thisSystem == "aarch64-darwin") - msg += fmt("\nNote: run `%s` to run programs for x86_64-darwin", Magenta("/usr/sbin/softwareupdate --install-rosetta && launchctl stop org.nixos.nix-daemon")); + msg += + fmt("\nNote: run `%s` to run programs for x86_64-darwin", + Magenta("/usr/sbin/softwareupdate --install-rosetta && launchctl stop org.nixos.nix-daemon")); throw BuildError(msg); } @@ -742,7 +731,8 @@ void DerivationBuilderImpl::startBuilder() createDirs(buildDir); if (buildUser && !checkNotWorldWritable(buildDir)) - throw Error("Path %s or a parent directory is world-writable or a symlink. That's not allowed for security.", buildDir); + throw Error( + "Path %s or a parent directory is world-writable or a symlink. That's not allowed for security.", buildDir); /* Create a temporary directory where the build will take place. */ @@ -770,22 +760,20 @@ void DerivationBuilderImpl::startBuilder() corresponding to the valid outputs, and rewrite the contents of the new outputs to replace the dummy strings with the actual hashes. */ - auto scratchPath = - !status.known - ? makeFallbackPath(outputName) - : !needsHashRewrite() - /* Can always use original path in sandbox */ - ? status.known->path - : !status.known->isPresent() - /* If path doesn't yet exist can just use it */ - ? status.known->path - : buildMode != bmRepair && !status.known->isValid() - /* If we aren't repairing we'll delete a corrupted path, so we - can use original path */ - ? status.known->path - : /* If we are repairing or the path is totally valid, we'll need - to use a temporary path */ - makeFallbackPath(status.known->path); + auto scratchPath = !status.known ? makeFallbackPath(outputName) + : !needsHashRewrite() + /* Can always use original path in sandbox */ + ? status.known->path + : !status.known->isPresent() + /* If path doesn't yet exist can just use it */ + ? status.known->path + : buildMode != bmRepair && !status.known->isValid() + /* If we aren't repairing we'll delete a corrupted path, so we + can use original path */ + ? status.known->path + : /* If we are repairing or the path is totally valid, we'll need + to use a temporary path */ + makeFallbackPath(status.known->path); scratchOutputs.insert_or_assign(outputName, scratchPath); /* Substitute output placeholders with the scratch output paths. @@ -793,20 +781,22 @@ void DerivationBuilderImpl::startBuilder() inputRewrites[hashPlaceholder(outputName)] = store.printStorePath(scratchPath); /* Additional tasks if we know the final path a priori. */ - if (!status.known) continue; + if (!status.known) + continue; auto fixedFinalPath = status.known->path; /* Additional tasks if the final and scratch are both known and differ. */ - if (fixedFinalPath == scratchPath) continue; + if (fixedFinalPath == scratchPath) + continue; /* Ensure scratch path is ours to use. */ deletePath(store.printStorePath(scratchPath)); /* Rewrite and unrewrite paths */ { - std::string h1 { fixedFinalPath.hashPart() }; - std::string h2 { scratchPath.hashPart() }; + std::string h1{fixedFinalPath.hashPart()}; + std::string h2{scratchPath.hashPart()}; inputRewrites[h1] = h2; } @@ -828,16 +818,17 @@ void DerivationBuilderImpl::startBuilder() storePathSet.insert(store.toStorePath(storePathS).first); } /* Write closure info to . */ - writeFile(tmpDir + "/" + fileName, - store.makeValidityRegistration( - store.exportReferences(storePathSet, inputPaths), false, false)); + writeFile( + tmpDir + "/" + fileName, + store.makeValidityRegistration(store.exportReferences(storePathSet, inputPaths), false, false)); } } prepareSandbox(); if (needsHashRewrite() && pathExists(homeDir)) - throw Error("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir); + throw Error( + "home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir); /* Fire up a Nix daemon to process recursive Nix calls from the builder. */ @@ -896,7 +887,8 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() /* Allow a user-configurable set of directories from the host file system. */ for (auto i : settings.sandboxPaths.get()) { - if (i.empty()) continue; + if (i.empty()) + continue; bool optional = false; if (i[i.size() - 1] == '?') { optional = true; @@ -908,8 +900,7 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() else pathsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional}; } - if (hasPrefix(store.storeDir, tmpDirInSandbox())) - { + if (hasPrefix(store.storeDir, tmpDirInSandbox())) { throw Error("`sandbox-build-dir` must not contain the storeDir"); } pathsInChroot[tmpDirInSandbox()] = tmpDir; @@ -950,8 +941,10 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() } } if (!found) - throw Error("derivation '%s' requested impure path '%s', but it was not in allowed-impure-host-deps", - store.printStorePath(drvPath), i); + throw Error( + "derivation '%s' requested impure path '%s', but it was not in allowed-impure-host-deps", + store.printStorePath(drvPath), + i); /* Allow files in drvOptions.impureHostDeps to be missing; e.g. macOS 11+ has no /usr/lib/libSystem*.dylib */ @@ -960,16 +953,13 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() if (settings.preBuildHook != "") { printMsg(lvlChatty, "executing pre-build hook '%1%'", settings.preBuildHook); - enum BuildHookState { - stBegin, - stExtraChrootDirs - }; + + enum BuildHookState { stBegin, stExtraChrootDirs }; + auto state = stBegin; auto lines = runProgram(settings.preBuildHook, false, getPreBuildHookArgs()); auto lastPos = std::string::size_type{0}; - for (auto nlPos = lines.find('\n'); nlPos != std::string::npos; - nlPos = lines.find('\n', lastPos)) - { + for (auto nlPos = lines.find('\n'); nlPos != std::string::npos; nlPos = lines.find('\n', lastPos)) { auto line = lines.substr(lastPos, nlPos - lastPos); lastPos = nlPos + 1; if (state == stBegin) { @@ -1040,14 +1030,17 @@ void DerivationBuilderImpl::processSandboxSetupMessages() return readLine(builderOut.get()); } catch (Error & e) { auto status = pid.wait(); - e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)", + e.addTrace( + {}, + "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)", store.printStorePath(drvPath), statusToString(status), concatStringsSep("|", msgs)); throw; } }(); - if (msg.substr(0, 1) == "\2") break; + if (msg.substr(0, 1) == "\2") + break; if (msg.substr(0, 1) == "\1") { FdSource source(builderOut.get()); auto ex = readError(source); @@ -1118,7 +1111,8 @@ void DerivationBuilderImpl::initEnv() derivation, tell the builder, so that for instance `fetchurl' can skip checking the output. On older Nixes, this environment variable won't be set, so `fetchurl' will do the check. */ - if (derivationType.isFixed()) env["NIX_OUTPUT_CHECKED"] = "1"; + if (derivationType.isFixed()) + env["NIX_OUTPUT_CHECKED"] = "1"; /* *Only* if this is a fixed-output derivation, propagate the values of the environment variables specified in the @@ -1134,7 +1128,7 @@ void DerivationBuilderImpl::initEnv() if (!impureEnv.empty()) experimentalFeatureSettings.require(Xp::ConfigurableImpureEnv); - for (auto & i : drvOptions.impureEnvVars){ + for (auto & i : drvOptions.impureEnvVars) { auto envVar = impureEnv.find(i); if (envVar != impureEnv.end()) { env[i] = envVar->second; @@ -1153,15 +1147,10 @@ void DerivationBuilderImpl::initEnv() env["TERM"] = "xterm-256color"; } - void DerivationBuilderImpl::writeStructuredAttrs() { if (parsedDrv) { - auto json = parsedDrv->prepareStructuredAttrs( - store, - drvOptions, - inputPaths, - drv.outputs); + auto json = parsedDrv->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); nlohmann::json rewritten; for (auto & [i, v] : json["outputs"].get()) { /* The placeholder must have a rewrite, so we use it to cover both the @@ -1180,13 +1169,12 @@ void DerivationBuilderImpl::writeStructuredAttrs() } } - void DerivationBuilderImpl::startDaemon() { experimentalFeatureSettings.require(Xp::RecursiveNix); auto store = makeRestrictedStore( - [&]{ + [&] { auto config = make_ref(*getLocalStore(this->store).config); config->pathInfoCacheSize = 0; config->stateDir = "/no-such-path"; @@ -1207,18 +1195,18 @@ void DerivationBuilderImpl::startDaemon() chownToBuilder(socketPath); daemonThread = std::thread([this, store]() { - while (true) { /* Accept a connection. */ struct sockaddr_un remoteAddr; socklen_t remoteAddrLen = sizeof(remoteAddr); - AutoCloseFD remote = accept(daemonSocket.get(), - (struct sockaddr *) &remoteAddr, &remoteAddrLen); + AutoCloseFD remote = accept(daemonSocket.get(), (struct sockaddr *) &remoteAddr, &remoteAddrLen); if (!remote) { - if (errno == EINTR || errno == EAGAIN) continue; - if (errno == EINVAL || errno == ECONNABORTED) break; + if (errno == EINTR || errno == EAGAIN) + continue; + if (errno == EINVAL || errno == ECONNABORTED) + break; throw SysError("accepting connection"); } @@ -1229,10 +1217,7 @@ void DerivationBuilderImpl::startDaemon() auto workerThread = std::thread([store, remote{std::move(remote)}]() { try { daemon::processConnection( - store, - FdSource(remote.get()), - FdSink(remote.get()), - NotTrusted, daemon::Recursive); + store, FdSource(remote.get()), FdSink(remote.get()), NotTrusted, daemon::Recursive); debug("terminated daemon connection"); } catch (const Interrupted &) { debug("interrupted daemon connection"); @@ -1248,7 +1233,6 @@ void DerivationBuilderImpl::startDaemon() }); } - void DerivationBuilderImpl::stopDaemon() { if (daemonSocket && shutdown(daemonSocket.get(), SHUT_RDWR) == -1) { @@ -1281,34 +1265,35 @@ void DerivationBuilderImpl::stopDaemon() daemonSocket.close(); } - void DerivationBuilderImpl::addDependency(const StorePath & path) { - if (isAllowed(path)) return; + if (isAllowed(path)) + return; addedPaths.insert(path); } void DerivationBuilderImpl::chownToBuilder(const Path & path) { - if (!buildUser) return; + if (!buildUser) + return; if (chown(path.c_str(), buildUser->getUID(), buildUser->getGID()) == -1) throw SysError("cannot change ownership of '%1%'", path); } void DerivationBuilderImpl::chownToBuilder(int fd, const Path & path) { - if (!buildUser) return; + if (!buildUser) + return; if (fchown(fd, buildUser->getUID(), buildUser->getGID()) == -1) throw SysError("cannot change ownership of file '%1%'", path); } -void DerivationBuilderImpl::writeBuilderFile( - const std::string & name, - std::string_view contents) +void DerivationBuilderImpl::writeBuilderFile(const std::string & name, std::string_view contents) { auto path = std::filesystem::path(tmpDir) / name; - AutoCloseFD fd{openat(tmpDirFd.get(), name.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC | O_EXCL | O_NOFOLLOW, 0666)}; + AutoCloseFD fd{ + openat(tmpDirFd.get(), name.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC | O_EXCL | O_NOFOLLOW, 0666)}; if (!fd) throw SysError("creating file %s", path); writeFile(fd, path, contents); @@ -1335,13 +1320,15 @@ void DerivationBuilderImpl::runChild() }; if (drv.isBuiltin() && drv.builder == "builtin:fetchurl") { - try { - ctx.netrcData = readFile(settings.netrcFile); - } catch (SystemError &) { } + try { + ctx.netrcData = readFile(settings.netrcFile); + } catch (SystemError &) { + } - try { - ctx.caFileData = readFile(settings.caFile); - } catch (SystemError &) { } + try { + ctx.caFileData = readFile(settings.caFile); + } catch (SystemError &) { + } } enterChroot(); @@ -1353,7 +1340,7 @@ void DerivationBuilderImpl::runChild() unix::closeExtraFDs(); /* Disable core dumps by default. */ - struct rlimit limit = { 0, RLIM_INFINITY }; + struct rlimit limit = {0, RLIM_INFINITY}; setrlimit(RLIMIT_CORE, &limit); // FIXME: set other limits to deterministic values? @@ -1371,8 +1358,7 @@ void DerivationBuilderImpl::runChild() logger = makeJSONLogger(getStandardError()); for (auto & e : drv.outputs) - ctx.outputs.insert_or_assign(e.first, - store.printStorePath(scratchOutputs.at(e.first))); + ctx.outputs.insert_or_assign(e.first, store.printStorePath(scratchOutputs.at(e.first))); std::string builtinName = drv.builder.substr(8); assert(RegisterBuiltinBuilder::builtinBuilders); @@ -1424,14 +1410,10 @@ void DerivationBuilderImpl::setUser() if (setgroups(gids.size(), gids.data()) == -1) throw SysError("cannot set supplementary groups of build user"); - if (setgid(buildUser->getGID()) == -1 || - getgid() != buildUser->getGID() || - getegid() != buildUser->getGID()) + if (setgid(buildUser->getGID()) == -1 || getgid() != buildUser->getGID() || getegid() != buildUser->getGID()) throw SysError("setgid failed"); - if (setuid(buildUser->getUID()) == -1 || - getuid() != buildUser->getUID() || - geteuid() != buildUser->getUID()) + if (setuid(buildUser->getUID()) == -1 || getuid() != buildUser->getUID() || geteuid() != buildUser->getUID()) throw SysError("setuid failed"); } } @@ -1458,9 +1440,12 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() output paths, and any paths that have been built via recursive Nix calls. */ StorePathSet referenceablePaths; - for (auto & p : inputPaths) referenceablePaths.insert(p); - for (auto & i : scratchOutputs) referenceablePaths.insert(i.second); - for (auto & p : addedPaths) referenceablePaths.insert(p); + for (auto & p : inputPaths) + referenceablePaths.insert(p); + for (auto & i : scratchOutputs) + referenceablePaths.insert(i.second); + for (auto & p : addedPaths) + referenceablePaths.insert(p); /* Check whether the output paths were created, and make all output paths read-only. Then get the references of each output (that we @@ -1468,16 +1453,24 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() that are most definitely already installed, we just store their final name so we can also use it in rewrites. */ StringSet outputsToSort; - struct AlreadyRegistered { StorePath path; }; - struct PerhapsNeedToRegister { StorePathSet refs; }; + + struct AlreadyRegistered + { + StorePath path; + }; + + struct PerhapsNeedToRegister + { + StorePathSet refs; + }; + std::map> outputReferencesIfUnregistered; std::map outputStats; for (auto & [outputName, _] : drv.outputs) { auto scratchOutput = get(scratchOutputs, outputName); if (!scratchOutput) throw BuildError( - "builder for '%s' has no scratch output for '%s'", - store.printStorePath(drvPath), outputName); + "builder for '%s' has no scratch output for '%s'", store.printStorePath(drvPath), outputName); auto actualPath = realPathInSandbox(store.printStorePath(*scratchOutput)); outputsToSort.insert(outputName); @@ -1486,17 +1479,14 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() auto initialOutput = get(initialOutputs, outputName); if (!initialOutput) throw BuildError( - "builder for '%s' has no initial output for '%s'", - store.printStorePath(drvPath), outputName); + "builder for '%s' has no initial output for '%s'", store.printStorePath(drvPath), outputName); auto & initialInfo = *initialOutput; /* Don't register if already valid, and not checking */ - initialInfo.wanted = buildMode == bmCheck - || !(initialInfo.known && initialInfo.known->isValid()); + initialInfo.wanted = buildMode == bmCheck || !(initialInfo.known && initialInfo.known->isValid()); if (!initialInfo.wanted) { outputReferencesIfUnregistered.insert_or_assign( - outputName, - AlreadyRegistered { .path = initialInfo.known->path }); + outputName, AlreadyRegistered{.path = initialInfo.known->path}); continue; } @@ -1504,7 +1494,9 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (!optSt) throw BuildError( "builder for '%s' failed to produce output path for output '%s' at '%s'", - store.printStorePath(drvPath), outputName, actualPath); + store.printStorePath(drvPath), + outputName, + actualPath); struct stat & st = *optSt; #ifndef __CYGWIN__ @@ -1512,20 +1504,19 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() that means that someone else can have interfered with the build. Also, the output should be owned by the build user. */ - if ((!S_ISLNK(st.st_mode) && (st.st_mode & (S_IWGRP | S_IWOTH))) || - (buildUser && st.st_uid != buildUser->getUID())) + if ((!S_ISLNK(st.st_mode) && (st.st_mode & (S_IWGRP | S_IWOTH))) + || (buildUser && st.st_uid != buildUser->getUID())) throw BuildError( - "suspicious ownership or permission on '%s' for output '%s'; rejecting this build output", - actualPath, outputName); + "suspicious ownership or permission on '%s' for output '%s'; rejecting this build output", + actualPath, + outputName); #endif /* Canonicalise first. This ensures that the path we're rewriting doesn't contain a hard link to /etc/shadow or something like that. */ canonicalisePathMetaData( - actualPath, - buildUser ? std::optional(buildUser->getUIDRange()) : std::nullopt, - inodesSeen); + actualPath, buildUser ? std::optional(buildUser->getUIDRange()) : std::nullopt, inodesSeen); bool discardReferences = false; if (auto udr = get(drvOptions.unsafeDiscardReferences, outputName)) { @@ -1543,40 +1534,41 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() references = scanForReferences(blank, actualPath, referenceablePaths); } - outputReferencesIfUnregistered.insert_or_assign( - outputName, - PerhapsNeedToRegister { .refs = references }); + outputReferencesIfUnregistered.insert_or_assign(outputName, PerhapsNeedToRegister{.refs = references}); outputStats.insert_or_assign(outputName, std::move(st)); } - auto sortedOutputNames = topoSort(outputsToSort, + auto sortedOutputNames = topoSort( + outputsToSort, {[&](const std::string & name) { auto orifu = get(outputReferencesIfUnregistered, name); if (!orifu) - throw BuildError( - "no output reference for '%s' in build of '%s'", - name, store.printStorePath(drvPath)); - return std::visit(overloaded { - /* Since we'll use the already installed versions of these, we - can treat them as leaves and ignore any references they - have. */ - [&](const AlreadyRegistered &) { return StringSet {}; }, - [&](const PerhapsNeedToRegister & refs) { - StringSet referencedOutputs; - /* FIXME build inverted map up front so no quadratic waste here */ - for (auto & r : refs.refs) - for (auto & [o, p] : scratchOutputs) - if (r == p) - referencedOutputs.insert(o); - return referencedOutputs; + throw BuildError("no output reference for '%s' in build of '%s'", name, store.printStorePath(drvPath)); + return std::visit( + overloaded{ + /* Since we'll use the already installed versions of these, we + can treat them as leaves and ignore any references they + have. */ + [&](const AlreadyRegistered &) { return StringSet{}; }, + [&](const PerhapsNeedToRegister & refs) { + StringSet referencedOutputs; + /* FIXME build inverted map up front so no quadratic waste here */ + for (auto & r : refs.refs) + for (auto & [o, p] : scratchOutputs) + if (r == p) + referencedOutputs.insert(o); + return referencedOutputs; + }, }, - }, *orifu); + *orifu); }}, {[&](const std::string & path, const std::string & parent) { // TODO with more -vvvv also show the temporary paths for manual inspection. return BuildError( "cycle detected in build of '%s' in the references of output '%s' from output '%s'", - store.printStorePath(drvPath), path, parent); + store.printStorePath(drvPath), + path, + parent); }}); std::reverse(sortedOutputNames.begin(), sortedOutputNames.end()); @@ -1596,21 +1588,21 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() use. This is why the topological sort is essential to do first before this for loop. */ if (*scratchPath != finalStorePath) - outputRewrites[std::string { scratchPath->hashPart() }] = std::string { finalStorePath.hashPart() }; + outputRewrites[std::string{scratchPath->hashPart()}] = std::string{finalStorePath.hashPart()}; }; auto orifu = get(outputReferencesIfUnregistered, outputName); assert(orifu); - std::optional referencesOpt = std::visit(overloaded { - [&](const AlreadyRegistered & skippedFinalPath) -> std::optional { - finish(skippedFinalPath.path); - return std::nullopt; + std::optional referencesOpt = std::visit( + overloaded{ + [&](const AlreadyRegistered & skippedFinalPath) -> std::optional { + finish(skippedFinalPath.path); + return std::nullopt; + }, + [&](const PerhapsNeedToRegister & r) -> std::optional { return r.refs; }, }, - [&](const PerhapsNeedToRegister & r) -> std::optional { - return r.refs; - }, - }, *orifu); + *orifu); if (!referencesOpt) continue; @@ -1643,19 +1635,19 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() final path, therefore we look for a *non-rewritten self-reference, and use a bool rather try to solve the computationally intractable fixed point. */ - StoreReferences res { + StoreReferences res{ .self = false, }; for (auto & r : references) { auto name = r.name(); - auto origHash = std::string { r.hashPart() }; + auto origHash = std::string{r.hashPart()}; if (r == *scratchPath) { res.self = true; } else if (auto outputRewrite = get(outputRewrites, origHash)) { std::string newRef = *outputRewrite; newRef += '-'; newRef += name; - res.others.insert(StorePath { newRef }); + res.others.insert(StorePath{newRef}); } else { res.others.insert(r); } @@ -1666,11 +1658,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo { auto st = get(outputStats, outputName); if (!st) - throw BuildError( - "output path %1% without valid stats info", - actualPath); - if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat) - { + throw BuildError("output path %1% without valid stats info", actualPath); + if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat) { /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) throw BuildError( @@ -1680,37 +1669,28 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() } rewriteOutput(outputRewrites); /* FIXME optimize and deduplicate with addToStore */ - std::string oldHashPart { scratchPath->hashPart() }; - auto got = [&]{ + std::string oldHashPart{scratchPath->hashPart()}; + auto got = [&] { auto fim = outputHash.method.getFileIngestionMethod(); switch (fim) { case FileIngestionMethod::Flat: - case FileIngestionMethod::NixArchive: - { - HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; + case FileIngestionMethod::NixArchive: { + HashModuloSink caSink{outputHash.hashAlgo, oldHashPart}; auto fim = outputHash.method.getFileIngestionMethod(); - dumpPath( - {getFSSourceAccessor(), CanonPath(actualPath)}, - caSink, - (FileSerialisationMethod) fim); + dumpPath({getFSSourceAccessor(), CanonPath(actualPath)}, caSink, (FileSerialisationMethod) fim); return caSink.finish().first; } case FileIngestionMethod::Git: { - return git::dumpHash( - outputHash.hashAlgo, - {getFSSourceAccessor(), CanonPath(actualPath)}).hash; + return git::dumpHash(outputHash.hashAlgo, {getFSSourceAccessor(), CanonPath(actualPath)}).hash; } } assert(false); }(); - ValidPathInfo newInfo0 { + ValidPathInfo newInfo0{ store, outputPathName(drv.name, outputName), - ContentAddressWithReferences::fromParts( - outputHash.method, - std::move(got), - rewriteRefs()), + ContentAddressWithReferences::fromParts(outputHash.method, std::move(got), rewriteRefs()), Hash::dummy, }; if (*scratchPath != newInfo0.path) { @@ -1719,15 +1699,14 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() // (note that this doesn't invalidate the ca hash we calculated // above because it's computed *modulo the self-references*, so // it already takes this rewrite into account). - rewriteOutput( - StringMap{{oldHashPart, - std::string(newInfo0.path.hashPart())}}); + rewriteOutput(StringMap{{oldHashPart, std::string(newInfo0.path.hashPart())}}); } { HashResult narHashAndSize = hashPath( {getFSSourceAccessor(), CanonPath(actualPath)}, - FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); + FileSerialisationMethod::NixArchive, + HashAlgorithm::SHA256); newInfo0.narHash = narHashAndSize.first; newInfo0.narSize = narHashAndSize.second; } @@ -1736,90 +1715,90 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() return newInfo0; }; - ValidPathInfo newInfo = std::visit(overloaded { + ValidPathInfo newInfo = std::visit( + overloaded{ - [&](const DerivationOutput::InputAddressed & output) { - /* input-addressed case */ - auto requiredFinalPath = output.path; - /* Preemptively add rewrite rule for final hash, as that is - what the NAR hash will use rather than normalized-self references */ - if (*scratchPath != requiredFinalPath) - outputRewrites.insert_or_assign( - std::string { scratchPath->hashPart() }, - std::string { requiredFinalPath.hashPart() }); - rewriteOutput(outputRewrites); - HashResult narHashAndSize = hashPath( - {getFSSourceAccessor(), CanonPath(actualPath)}, - FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); - ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; - newInfo0.narSize = narHashAndSize.second; - auto refs = rewriteRefs(); - newInfo0.references = std::move(refs.others); - if (refs.self) - newInfo0.references.insert(newInfo0.path); - return newInfo0; - }, + [&](const DerivationOutput::InputAddressed & output) { + /* input-addressed case */ + auto requiredFinalPath = output.path; + /* Preemptively add rewrite rule for final hash, as that is + what the NAR hash will use rather than normalized-self references */ + if (*scratchPath != requiredFinalPath) + outputRewrites.insert_or_assign( + std::string{scratchPath->hashPart()}, std::string{requiredFinalPath.hashPart()}); + rewriteOutput(outputRewrites); + HashResult narHashAndSize = hashPath( + {getFSSourceAccessor(), CanonPath(actualPath)}, + FileSerialisationMethod::NixArchive, + HashAlgorithm::SHA256); + ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.first}; + newInfo0.narSize = narHashAndSize.second; + auto refs = rewriteRefs(); + newInfo0.references = std::move(refs.others); + if (refs.self) + newInfo0.references.insert(newInfo0.path); + return newInfo0; + }, - [&](const DerivationOutput::CAFixed & dof) { - auto & wanted = dof.ca.hash; + [&](const DerivationOutput::CAFixed & dof) { + auto & wanted = dof.ca.hash; - // Replace the output by a fresh copy of itself to make sure - // that there's no stale file descriptor pointing to it - Path tmpOutput = actualPath + ".tmp"; - copyFile( - std::filesystem::path(actualPath), - std::filesystem::path(tmpOutput), true); + // Replace the output by a fresh copy of itself to make sure + // that there's no stale file descriptor pointing to it + Path tmpOutput = actualPath + ".tmp"; + copyFile(std::filesystem::path(actualPath), std::filesystem::path(tmpOutput), true); - std::filesystem::rename(tmpOutput, actualPath); + std::filesystem::rename(tmpOutput, actualPath); - auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { - .method = dof.ca.method, - .hashAlgo = wanted.algo, - }); + auto newInfo0 = newInfoFromCA( + DerivationOutput::CAFloating{ + .method = dof.ca.method, + .hashAlgo = wanted.algo, + }); - /* Check wanted hash */ - assert(newInfo0.ca); - auto & got = newInfo0.ca->hash; - if (wanted != got) { - /* Throw an error after registering the path as - valid. */ - miscMethods->noteHashMismatch(); - delayedException = std::make_exception_ptr( - BuildError("hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", + /* Check wanted hash */ + assert(newInfo0.ca); + auto & got = newInfo0.ca->hash; + if (wanted != got) { + /* Throw an error after registering the path as + valid. */ + miscMethods->noteHashMismatch(); + delayedException = std::make_exception_ptr(BuildError( + "hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", store.printStorePath(drvPath), wanted.to_string(HashFormat::SRI, true), got.to_string(HashFormat::SRI, true))); - } - if (!newInfo0.references.empty()) { - auto numViolations = newInfo.references.size(); - delayedException = std::make_exception_ptr( - BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", + } + if (!newInfo0.references.empty()) { + auto numViolations = newInfo.references.size(); + delayedException = std::make_exception_ptr(BuildError( + "fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", store.printStorePath(drvPath), numViolations, store.printStorePath(*newInfo.references.begin()))); - } + } + + return newInfo0; + }, + + [&](const DerivationOutput::CAFloating & dof) { return newInfoFromCA(dof); }, + + [&](const DerivationOutput::Deferred &) -> ValidPathInfo { + // No derivation should reach that point without having been + // rewritten first + assert(false); + }, + + [&](const DerivationOutput::Impure & doi) { + return newInfoFromCA( + DerivationOutput::CAFloating{ + .method = doi.method, + .hashAlgo = doi.hashAlgo, + }); + }, - return newInfo0; }, - - [&](const DerivationOutput::CAFloating & dof) { - return newInfoFromCA(dof); - }, - - [&](const DerivationOutput::Deferred &) -> ValidPathInfo { - // No derivation should reach that point without having been - // rewritten first - assert(false); - }, - - [&](const DerivationOutput::Impure & doi) { - return newInfoFromCA(DerivationOutput::CAFloating { - .method = doi.method, - .hashAlgo = doi.hashAlgo, - }); - }, - - }, output->raw); + output->raw); /* FIXME: set proper permissions in restorePath() so we don't have to do another traversal. */ @@ -1836,9 +1815,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() PathLocks dynamicOutputLock; dynamicOutputLock.setDeletion(true); auto optFixedPath = output->path(store, drv.name, outputName); - if (!optFixedPath || - store.printStorePath(*optFixedPath) != finalDestPath) - { + if (!optFixedPath || store.printStorePath(*optFixedPath) != finalDestPath) { assert(newInfo.ca); dynamicOutputLock.lockPaths({store.toRealPath(finalDestPath)}); } @@ -1868,7 +1845,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (buildMode == bmCheck) { - if (!store.isValidPath(newInfo.path)) continue; + if (!store.isValidPath(newInfo.path)) + continue; ValidPathInfo oldInfo(*store.queryPathInfo(newInfo.path)); if (newInfo.narHash != oldInfo.narHash) { miscMethods->noteCheckMismatch(); @@ -1880,13 +1858,21 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() handleDiffHook( buildUser ? buildUser->getUID() : getuid(), buildUser ? buildUser->getGID() : getgid(), - finalDestPath, dst, store.printStorePath(drvPath), tmpDir); + finalDestPath, + dst, + store.printStorePath(drvPath), + tmpDir); - throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'", - store.printStorePath(drvPath), store.toRealPath(finalDestPath), dst); + throw NotDeterministic( + "derivation '%s' may not be deterministic: output '%s' differs from '%s'", + store.printStorePath(drvPath), + store.toRealPath(finalDestPath), + dst); } else - throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs", - store.printStorePath(drvPath), store.toRealPath(finalDestPath)); + throw NotDeterministic( + "derivation '%s' may not be deterministic: output '%s' differs", + store.printStorePath(drvPath), + store.toRealPath(finalDestPath)); } /* Since we verified the build, it's now ultimately trusted. */ @@ -1965,16 +1951,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() for (auto & [outputName, newInfo] : infos) { auto oldinfo = get(initialOutputs, outputName); assert(oldinfo); - auto thisRealisation = Realisation { - .id = DrvOutput { - oldinfo->outputHash, - outputName - }, - .outPath = newInfo.path - }; - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) - && !drv.type().isImpure()) - { + auto thisRealisation = Realisation{.id = DrvOutput{oldinfo->outputHash, outputName}, .outPath = newInfo.path}; + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().isImpure()) { store.signRealisation(thisRealisation); store.registerDrvOutput(thisRealisation); } @@ -1984,7 +1962,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() return builtOutputs; } - void DerivationBuilderImpl::checkOutputs(const std::map & outputs) { std::map outputsByPath; @@ -1998,8 +1975,7 @@ void DerivationBuilderImpl::checkOutputs(const std::map pathsLeft; @@ -2008,7 +1984,8 @@ void DerivationBuilderImpl::checkOutputs(const std::map *checks.maxSize) - throw BuildError("path '%s' is too large at %d bytes; limit is %d bytes", - store.printStorePath(info.path), info.narSize, *checks.maxSize); + throw BuildError( + "path '%s' is too large at %d bytes; limit is %d bytes", + store.printStorePath(info.path), + info.narSize, + *checks.maxSize); if (checks.maxClosureSize) { uint64_t closureSize = getClosure(info.path).second; if (closureSize > *checks.maxClosureSize) - throw BuildError("closure of path '%s' is too large at %d bytes; limit is %d bytes", - store.printStorePath(info.path), closureSize, *checks.maxClosureSize); + throw BuildError( + "closure of path '%s' is too large at %d bytes; limit is %d bytes", + store.printStorePath(info.path), + closureSize, + *checks.maxClosureSize); } - auto checkRefs = [&](const StringSet & value, bool allowed, bool recursive) - { + auto checkRefs = [&](const StringSet & value, bool allowed, bool recursive) { /* Parse a list of reference specifiers. Each element must either be a store path, or the symbolic name of the output of the derivation (such as `out'). */ @@ -2051,16 +2032,19 @@ void DerivationBuilderImpl::checkOutputs(const std::mappath); else { - std::string outputsListing = concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); - throw BuildError("derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," + std::string outputsListing = + concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); + throw BuildError( + "derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," " expected store path or output name (one of [%s])", - store.printStorePath(drvPath), outputName, i, outputsListing); + store.printStorePath(drvPath), + outputName, + i, + outputsListing); } } - auto used = recursive - ? getClosure(info.path).first - : info.references; + auto used = recursive ? getClosure(info.path).first : info.references; if (recursive && checks.ignoreSelfRefs) used.erase(info.path); @@ -2082,8 +2066,10 @@ void DerivationBuilderImpl::checkOutputs(const std::map & checksPerOutput) { - if (auto outputChecks = get(checksPerOutput, outputName)) + std::visit( + overloaded{ + [&](const DerivationOptions::OutputChecks & checks) { applyChecks(checks); }, + [&](const std::map & checksPerOutput) { + if (auto outputChecks = get(checksPerOutput, outputName)) - applyChecks(*outputChecks); + applyChecks(*outputChecks); + }, }, - }, drvOptions.outputChecks); + drvOptions.outputChecks); } } - void DerivationBuilderImpl::deleteTmpDir(bool force) { if (topTmpDir != "") { @@ -2138,28 +2123,27 @@ void DerivationBuilderImpl::deleteTmpDir(bool force) printError("note: keeping build directory '%s'", tmpDir); chmod(topTmpDir.c_str(), 0755); chmod(tmpDir.c_str(), 0755); - } - else + } else deletePath(topTmpDir); topTmpDir = ""; tmpDir = ""; } } - StorePath DerivationBuilderImpl::makeFallbackPath(OutputNameView outputName) { // This is a bogus path type, constructed this way to ensure that it doesn't collide with any other store path // See doc/manual/source/protocols/store-path.md for details - // TODO: We may want to separate the responsibilities of constructing the path fingerprint and of actually doing the hashing + // TODO: We may want to separate the responsibilities of constructing the path fingerprint and of actually doing the + // hashing auto pathType = "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName); return store.makeStorePath( pathType, // pass an all-zeroes hash - Hash(HashAlgorithm::SHA256), outputPathName(drv.name, outputName)); + Hash(HashAlgorithm::SHA256), + outputPathName(drv.name, outputName)); } - StorePath DerivationBuilderImpl::makeFallbackPath(const StorePath & path) { // This is a bogus path type, constructed this way to ensure that it doesn't collide with any other store path @@ -2168,10 +2152,11 @@ StorePath DerivationBuilderImpl::makeFallbackPath(const StorePath & path) return store.makeStorePath( pathType, // pass an all-zeroes hash - Hash(HashAlgorithm::SHA256), path.name()); + Hash(HashAlgorithm::SHA256), + path.name()); } -} +} // namespace nix // FIXME: do this properly #include "linux-derivation-builder.cc" @@ -2180,9 +2165,7 @@ StorePath DerivationBuilderImpl::makeFallbackPath(const StorePath & path) namespace nix { std::unique_ptr makeDerivationBuilder( - Store & store, - std::unique_ptr miscMethods, - DerivationBuilderParams params) + Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) { bool useSandbox = false; @@ -2190,16 +2173,19 @@ std::unique_ptr makeDerivationBuilder( { if (settings.sandboxMode == smEnabled) { if (params.drvOptions.noChroot) - throw Error("derivation '%s' has '__noChroot' set, " - "but that's not allowed when 'sandbox' is 'true'", store.printStorePath(params.drvPath)); + throw Error( + "derivation '%s' has '__noChroot' set, " + "but that's not allowed when 'sandbox' is 'true'", + store.printStorePath(params.drvPath)); #ifdef __APPLE__ if (params.drvOptions.additionalSandboxProfile != "") - throw Error("derivation '%s' specifies a sandbox profile, " - "but this is only allowed when 'sandbox' is 'relaxed'", store.printStorePath(params.drvPath)); + throw Error( + "derivation '%s' specifies a sandbox profile, " + "but this is only allowed when 'sandbox' is 'relaxed'", + store.printStorePath(params.drvPath)); #endif useSandbox = true; - } - else if (settings.sandboxMode == smDisabled) + } else if (settings.sandboxMode == smDisabled) useSandbox = false; else if (settings.sandboxMode == smRelaxed) // FIXME: cache derivationType @@ -2208,51 +2194,39 @@ std::unique_ptr makeDerivationBuilder( auto & localStore = getLocalStore(store); if (localStore.storeDir != localStore.config->realStoreDir.get()) { - #ifdef __linux__ - useSandbox = true; - #else - throw Error("building using a diverted store is not supported on this platform"); - #endif +#ifdef __linux__ + useSandbox = true; +#else + throw Error("building using a diverted store is not supported on this platform"); +#endif } - #ifdef __linux__ +#ifdef __linux__ if (useSandbox && !mountAndPidNamespacesSupported()) { if (!settings.sandboxFallback) - throw Error("this system does not support the kernel namespaces that are required for sandboxing; use '--no-sandbox' to disable sandboxing"); + throw Error( + "this system does not support the kernel namespaces that are required for sandboxing; use '--no-sandbox' to disable sandboxing"); debug("auto-disabling sandboxing because the prerequisite namespaces are not available"); useSandbox = false; } if (useSandbox) - return std::make_unique( - store, - std::move(miscMethods), - std::move(params)); - #endif + return std::make_unique(store, std::move(miscMethods), std::move(params)); +#endif if (!useSandbox && params.drvOptions.useUidRange(params.drv)) throw Error("feature 'uid-range' is only supported in sandboxed builds"); - #ifdef __APPLE__ - return std::make_unique( - store, - std::move(miscMethods), - std::move(params), - useSandbox); - #elif defined(__linux__) - return std::make_unique( - store, - std::move(miscMethods), - std::move(params)); - #else +#ifdef __APPLE__ + return std::make_unique(store, std::move(miscMethods), std::move(params), useSandbox); +#elif defined(__linux__) + return std::make_unique(store, std::move(miscMethods), std::move(params)); +#else if (useSandbox) throw Error("sandboxing builds is not supported on this platform"); - return std::make_unique( - store, - std::move(miscMethods), - std::move(params)); - #endif + return std::make_unique(store, std::move(miscMethods), std::move(params)); +#endif } -} +} // namespace nix diff --git a/src/libstore/unix/build/hook-instance.cc b/src/libstore/unix/build/hook-instance.cc index 3713f7c86..83824b51f 100644 --- a/src/libstore/unix/build/hook-instance.cc +++ b/src/libstore/unix/build/hook-instance.cc @@ -46,13 +46,13 @@ HookInstance::HookInstance() /* Fork the hook. */ pid = startProcess([&]() { - if (dup2(fromHook.writeSide.get(), STDERR_FILENO) == -1) throw SysError("cannot pipe standard error into log file"); commonChildInit(); - if (chdir("/") == -1) throw SysError("changing into /"); + if (chdir("/") == -1) + throw SysError("changing into /"); /* Dup the communication pipes. */ if (dup2(toHook.readSide.get(), STDIN_FILENO) == -1) @@ -84,15 +84,15 @@ HookInstance::HookInstance() sink << 0; } - HookInstance::~HookInstance() { try { toHook.writeSide = -1; - if (pid != -1) pid.kill(); + if (pid != -1) + pid.kill(); } catch (...) { ignoreExceptionInDestructor(); } } -} +} // namespace nix diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index b23c8003f..d56990d48 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -878,6 +878,6 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder } }; -} +} // namespace nix #endif diff --git a/src/libstore/unix/include/nix/store/build/child.hh b/src/libstore/unix/include/nix/store/build/child.hh index 3dfc552b9..9216316cc 100644 --- a/src/libstore/unix/include/nix/store/build/child.hh +++ b/src/libstore/unix/include/nix/store/build/child.hh @@ -1,4 +1,5 @@ #pragma once + ///@file namespace nix { @@ -8,4 +9,4 @@ namespace nix { */ void commonChildInit(); -} +} // namespace nix diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/unix/include/nix/store/build/derivation-builder.hh index 5ce38e034..eecad3daa 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/unix/include/nix/store/build/derivation-builder.hh @@ -75,7 +75,8 @@ struct DerivationBuilderParams , inputPaths{inputPaths} , initialOutputs{initialOutputs} , buildMode{buildMode} - { } + { + } DerivationBuilderParams(DerivationBuilderParams &&) = default; }; @@ -189,8 +190,6 @@ struct DerivationBuilder : RestrictionContext }; std::unique_ptr makeDerivationBuilder( - Store & store, - std::unique_ptr miscMethods, - DerivationBuilderParams params); + Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params); -} +} // namespace nix diff --git a/src/libstore/unix/include/nix/store/build/hook-instance.hh b/src/libstore/unix/include/nix/store/build/hook-instance.hh index ff205ff76..87e03665c 100644 --- a/src/libstore/unix/include/nix/store/build/hook-instance.hh +++ b/src/libstore/unix/include/nix/store/build/hook-instance.hh @@ -38,4 +38,4 @@ struct HookInstance ~HookInstance(); }; -} +} // namespace nix diff --git a/src/libstore/unix/include/nix/store/user-lock.hh b/src/libstore/unix/include/nix/store/user-lock.hh index a7caf8518..828980d6f 100644 --- a/src/libstore/unix/include/nix/store/user-lock.hh +++ b/src/libstore/unix/include/nix/store/user-lock.hh @@ -9,7 +9,7 @@ namespace nix { struct UserLock { - virtual ~UserLock() { } + virtual ~UserLock() {} /** * Get the first and last UID. @@ -40,4 +40,4 @@ std::unique_ptr acquireUserLock(uid_t nrIds, bool useUserNamespace); bool useBuildUsers(); -} +} // namespace nix diff --git a/src/libstore/unix/pathlocks.cc b/src/libstore/unix/pathlocks.cc index 58d047f4e..e3f411a5d 100644 --- a/src/libstore/unix/pathlocks.cc +++ b/src/libstore/unix/pathlocks.cc @@ -11,7 +11,6 @@ #include #include - namespace nix { AutoCloseFD openLockFile(const Path & path, bool create) @@ -25,7 +24,6 @@ AutoCloseFD openLockFile(const Path & path, bool create) return fd; } - void deleteLockFile(const Path & path, Descriptor desc) { /* Get rid of the lock file. Have to be careful not to introduce @@ -38,14 +36,17 @@ void deleteLockFile(const Path & path, Descriptor desc) file is an optimisation, not a necessity. */ } - bool lockFile(Descriptor desc, LockType lockType, bool wait) { int type; - if (lockType == ltRead) type = LOCK_SH; - else if (lockType == ltWrite) type = LOCK_EX; - else if (lockType == ltNone) type = LOCK_UN; - else unreachable(); + if (lockType == ltRead) + type = LOCK_SH; + else if (lockType == ltWrite) + type = LOCK_EX; + else if (lockType == ltNone) + type = LOCK_UN; + else + unreachable(); if (wait) { while (flock(desc, type) != 0) { @@ -58,7 +59,8 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait) } else { while (flock(desc, type | LOCK_NB) != 0) { checkInterrupt(); - if (errno == EWOULDBLOCK) return false; + if (errno == EWOULDBLOCK) + return false; if (errno != EINTR) throw SysError("acquiring/releasing lock"); } @@ -67,9 +69,7 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait) return true; } - -bool PathLocks::lockPaths(const PathSet & paths, - const std::string & waitMsg, bool wait) +bool PathLocks::lockPaths(const PathSet & paths, const std::string & waitMsg, bool wait) { assert(fds.empty()); @@ -95,7 +95,8 @@ bool PathLocks::lockPaths(const PathSet & paths, /* Acquire an exclusive lock. */ if (!lockFile(fd.get(), ltWrite, false)) { if (wait) { - if (waitMsg != "") printError(waitMsg); + if (waitMsg != "") + printError(waitMsg); lockFile(fd.get(), ltWrite, true); } else { /* Failed to lock this path; release all other @@ -129,16 +130,14 @@ bool PathLocks::lockPaths(const PathSet & paths, return true; } - void PathLocks::unlock() { for (auto & i : fds) { - if (deletePaths) deleteLockFile(i.second, i.first); + if (deletePaths) + deleteLockFile(i.second, i.first); if (close(i.first) == -1) - printError( - "error (ignored): cannot close lock file on '%1%'", - i.second); + printError("error (ignored): cannot close lock file on '%1%'", i.second); debug("lock released on '%1%'", i.second); } @@ -146,7 +145,6 @@ void PathLocks::unlock() fds.clear(); } - FdLock::FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view waitMsg) : desc(desc) { @@ -159,5 +157,4 @@ FdLock::FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view w acquired = lockFile(desc, lockType, false); } - -} +} // namespace nix diff --git a/src/libstore/unix/user-lock.cc b/src/libstore/unix/user-lock.cc index f5d164e5b..c5e6455e8 100644 --- a/src/libstore/unix/user-lock.cc +++ b/src/libstore/unix/user-lock.cc @@ -13,12 +13,12 @@ namespace nix { #ifdef __linux__ -static std::vector get_group_list(const char *username, gid_t group_id) +static std::vector get_group_list(const char * username, gid_t group_id) { std::vector gids; gids.resize(32); // Initial guess - auto getgroupl_failed {[&] { + auto getgroupl_failed{[&] { int ngroups = gids.size(); int err = getgrouplist(username, group_id, gids.data(), &ngroups); gids.resize(ngroups); @@ -35,7 +35,6 @@ static std::vector get_group_list(const char *username, gid_t group_id) } #endif - struct SimpleUserLock : UserLock { AutoCloseFD fdUserLock; @@ -43,11 +42,27 @@ struct SimpleUserLock : UserLock gid_t gid; std::vector supplementaryGIDs; - uid_t getUID() override { assert(uid); return uid; } - uid_t getUIDCount() override { return 1; } - gid_t getGID() override { assert(gid); return gid; } + uid_t getUID() override + { + assert(uid); + return uid; + } - std::vector getSupplementaryGIDs() override { return supplementaryGIDs; } + uid_t getUIDCount() override + { + return 1; + } + + gid_t getGID() override + { + assert(gid); + return gid; + } + + std::vector getSupplementaryGIDs() override + { + return supplementaryGIDs; + } static std::unique_ptr acquire() { @@ -61,7 +76,7 @@ struct SimpleUserLock : UserLock /* Copy the result of getgrnam. */ Strings users; - for (char * * p = gr->gr_mem; *p; ++p) { + for (char ** p = gr->gr_mem; *p; ++p) { debug("found build user '%s'", *p); users.push_back(*p); } @@ -78,7 +93,7 @@ struct SimpleUserLock : UserLock if (!pw) throw Error("the user '%s' in the group '%s' does not exist", i, settings.buildUsersGroup); - auto fnUserLock = fmt("%s/userpool/%s", settings.nixStateDir,pw->pw_uid); + auto fnUserLock = fmt("%s/userpool/%s", settings.nixStateDir, pw->pw_uid); AutoCloseFD fd = open(fnUserLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); if (!fd) @@ -95,7 +110,7 @@ struct SimpleUserLock : UserLock if (lock->uid == getuid() || lock->uid == geteuid()) throw Error("the Nix user should not be a member of '%s'", settings.buildUsersGroup); - #ifdef __linux__ +#ifdef __linux__ /* Get the list of supplementary groups of this user. This is * usually either empty or contains a group such as "kvm". */ @@ -104,7 +119,7 @@ struct SimpleUserLock : UserLock if (gid != lock->gid) lock->supplementaryGIDs.push_back(gid); } - #endif +#endif return lock; } @@ -121,19 +136,33 @@ struct AutoUserLock : UserLock gid_t firstGid = 0; uid_t nrIds = 1; - uid_t getUID() override { assert(firstUid); return firstUid; } + uid_t getUID() override + { + assert(firstUid); + return firstUid; + } - gid_t getUIDCount() override { return nrIds; } + gid_t getUIDCount() override + { + return nrIds; + } - gid_t getGID() override { assert(firstGid); return firstGid; } + gid_t getGID() override + { + assert(firstGid); + return firstGid; + } - std::vector getSupplementaryGIDs() override { return {}; } + std::vector getSupplementaryGIDs() override + { + return {}; + } static std::unique_ptr acquire(uid_t nrIds, bool useUserNamespace) { - #if !defined(__linux__) +#if !defined(__linux__) useUserNamespace = false; - #endif +#endif experimentalFeatureSettings.require(Xp::AutoAllocateUids); assert(settings.startId > 0); @@ -172,7 +201,8 @@ struct AutoUserLock : UserLock else { struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str()); if (!gr) - throw Error("the group '%s' specified in 'build-users-group' does not exist", settings.buildUsersGroup); + throw Error( + "the group '%s' specified in 'build-users-group' does not exist", settings.buildUsersGroup); lock->firstGid = gr->gr_gid; } lock->nrIds = nrIds; @@ -194,15 +224,15 @@ std::unique_ptr acquireUserLock(uid_t nrIds, bool useUserNamespace) bool useBuildUsers() { - #ifdef __linux__ +#ifdef __linux__ static bool b = (settings.buildUsersGroup != "" || settings.autoAllocateUids) && isRootUser(); return b; - #elif defined(__APPLE__) || defined(__FreeBSD__) +#elif defined(__APPLE__) || defined(__FreeBSD__) static bool b = settings.buildUsersGroup != "" && isRootUser(); return b; - #else +#else return false; - #endif +#endif } -} +} // namespace nix diff --git a/src/libstore/windows/pathlocks.cc b/src/libstore/windows/pathlocks.cc index 92a7cbcf9..c4e3a3d39 100644 --- a/src/libstore/windows/pathlocks.cc +++ b/src/libstore/windows/pathlocks.cc @@ -155,5 +155,5 @@ FdLock::FdLock(Descriptor desc, LockType lockType, bool wait, std::string_view w acquired = lockFile(desc, lockType, false); } -} +} // namespace nix #endif diff --git a/src/libstore/worker-protocol-connection.cc b/src/libstore/worker-protocol-connection.cc index d07dc8163..015a79ad6 100644 --- a/src/libstore/worker-protocol-connection.cc +++ b/src/libstore/worker-protocol-connection.cc @@ -321,4 +321,4 @@ void WorkerProto::BasicClientConnection::importPaths( auto importedPaths = WorkerProto::Serialise::read(store, *this); assert(importedPaths.size() <= importedPaths.size()); } -} +} // namespace nix diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index 21b21a347..1bbff64a2 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -18,14 +18,19 @@ BuildMode WorkerProto::Serialise::read(const StoreDirConfig & store, { auto temp = readNum(conn.from); switch (temp) { - case 0: return bmNormal; - case 1: return bmRepair; - case 2: return bmCheck; - default: throw Error("Invalid build mode"); + case 0: + return bmNormal; + case 1: + return bmRepair; + case 2: + return bmCheck; + default: + throw Error("Invalid build mode"); } } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const BuildMode & buildMode) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const BuildMode & buildMode) { switch (buildMode) { case bmNormal: @@ -42,22 +47,24 @@ void WorkerProto::Serialise::write(const StoreDirConfig & store, Work }; } -std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +std::optional +WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto temp = readNum(conn.from); switch (temp) { - case 0: - return std::nullopt; - case 1: - return { Trusted }; - case 2: - return { NotTrusted }; - default: - throw Error("Invalid trusted status from remote"); + case 0: + return std::nullopt; + case 1: + return {Trusted}; + case 2: + return {NotTrusted}; + default: + throw Error("Invalid trusted status from remote"); } } -void WorkerProto::Serialise>::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optTrusted) +void WorkerProto::Serialise>::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optTrusted) { if (!optTrusted) conn.to << uint8_t{0}; @@ -75,32 +82,32 @@ void WorkerProto::Serialise>::write(const StoreDirCon } } - -std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +std::optional WorkerProto::Serialise>::read( + const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto tag = readNum(conn.from); switch (tag) { - case 0: - return std::nullopt; - case 1: - return std::optional{std::chrono::microseconds(readNum(conn.from))}; - default: - throw Error("Invalid optional tag from remote"); + case 0: + return std::nullopt; + case 1: + return std::optional{std::chrono::microseconds(readNum(conn.from))}; + default: + throw Error("Invalid optional tag from remote"); } } -void WorkerProto::Serialise>::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optDuration) +void WorkerProto::Serialise>::write( + const StoreDirConfig & store, + WorkerProto::WriteConn conn, + const std::optional & optDuration) { if (!optDuration.has_value()) { conn.to << uint8_t{0}; } else { - conn.to - << uint8_t{1} - << optDuration.value().count(); + conn.to << uint8_t{1} << optDuration.value().count(); } } - DerivedPath WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto s = readString(conn.from); @@ -111,58 +118,57 @@ DerivedPath WorkerProto::Serialise::read(const StoreDirConfig & sto } } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const DerivedPath & req) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const DerivedPath & req) { if (GET_PROTOCOL_MINOR(conn.version) >= 30) { conn.to << req.to_string_legacy(store); } else { auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(req); - std::visit(overloaded { - [&](const StorePathWithOutputs & s) { - conn.to << s.to_string(store); + std::visit( + overloaded{ + [&](const StorePathWithOutputs & s) { conn.to << s.to_string(store); }, + [&](const StorePath & drvPath) { + throw Error( + "trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file", + store.printStorePath(drvPath), + GET_PROTOCOL_MAJOR(conn.version), + GET_PROTOCOL_MINOR(conn.version)); + }, + [&](std::monostate) { + throw Error( + "wanted to build a derivation that is itself a build product, but protocols do not support that. Try upgrading the Nix on the other end of this connection"); + }, }, - [&](const StorePath & drvPath) { - throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file", - store.printStorePath(drvPath), - GET_PROTOCOL_MAJOR(conn.version), - GET_PROTOCOL_MINOR(conn.version)); - }, - [&](std::monostate) { - throw Error("wanted to build a derivation that is itself a build product, but protocols do not support that. Try upgrading the Nix on the other end of this connection"); - }, - }, sOrDrvPath); + sOrDrvPath); } } - -KeyedBuildResult WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +KeyedBuildResult +WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto path = WorkerProto::Serialise::read(store, conn); auto br = WorkerProto::Serialise::read(store, conn); - return KeyedBuildResult { + return KeyedBuildResult{ std::move(br), /* .path = */ std::move(path), }; } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const KeyedBuildResult & res) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const KeyedBuildResult & res) { WorkerProto::write(store, conn, res.path); WorkerProto::write(store, conn, static_cast(res)); } - BuildResult WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { BuildResult res; res.status = static_cast(readInt(conn.from)); conn.from >> res.errorMsg; if (GET_PROTOCOL_MINOR(conn.version) >= 29) { - conn.from - >> res.timesBuilt - >> res.isNonDeterministic - >> res.startTime - >> res.stopTime; + conn.from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; } if (GET_PROTOCOL_MINOR(conn.version) >= 37) { res.cpuUser = WorkerProto::Serialise>::read(store, conn); @@ -171,24 +177,17 @@ BuildResult WorkerProto::Serialise::read(const StoreDirConfig & sto if (GET_PROTOCOL_MINOR(conn.version) >= 28) { auto builtOutputs = WorkerProto::Serialise::read(store, conn); for (auto && [output, realisation] : builtOutputs) - res.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); + res.builtOutputs.insert_or_assign(std::move(output.outputName), std::move(realisation)); } return res; } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const BuildResult & res) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WorkerProto::WriteConn conn, const BuildResult & res) { - conn.to - << res.status - << res.errorMsg; + conn.to << res.status << res.errorMsg; if (GET_PROTOCOL_MINOR(conn.version) >= 29) { - conn.to - << res.timesBuilt - << res.isNonDeterministic - << res.startTime - << res.stopTime; + conn.to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime; } if (GET_PROTOCOL_MINOR(conn.version) >= 37) { WorkerProto::write(store, conn, res.cpuUser); @@ -202,29 +201,29 @@ void WorkerProto::Serialise::write(const StoreDirConfig & store, Wo } } - ValidPathInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { auto path = WorkerProto::Serialise::read(store, conn); - return ValidPathInfo { + return ValidPathInfo{ std::move(path), WorkerProto::Serialise::read(store, conn), }; } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const ValidPathInfo & pathInfo) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WriteConn conn, const ValidPathInfo & pathInfo) { WorkerProto::write(store, conn, pathInfo.path); WorkerProto::write(store, conn, static_cast(pathInfo)); } - UnkeyedValidPathInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { auto deriver = readString(conn.from); auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); UnkeyedValidPathInfo info(narHash); - if (deriver != "") info.deriver = store.parseStorePath(deriver); + if (deriver != "") + info.deriver = store.parseStorePath(deriver); info.references = WorkerProto::Serialise::read(store, conn); conn.from >> info.registrationTime >> info.narSize; if (GET_PROTOCOL_MINOR(conn.version) >= 16) { @@ -235,23 +234,20 @@ UnkeyedValidPathInfo WorkerProto::Serialise::read(const St return info; } -void WorkerProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) { - conn.to - << (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "") - << pathInfo.narHash.to_string(HashFormat::Base16, false); + conn.to << (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "") + << pathInfo.narHash.to_string(HashFormat::Base16, false); WorkerProto::write(store, conn, pathInfo.references); conn.to << pathInfo.registrationTime << pathInfo.narSize; if (GET_PROTOCOL_MINOR(conn.version) >= 16) { - conn.to - << pathInfo.ultimate - << pathInfo.sigs - << renderContentAddress(pathInfo.ca); + conn.to << pathInfo.ultimate << pathInfo.sigs << renderContentAddress(pathInfo.ca); } } - -WorkerProto::ClientHandshakeInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +WorkerProto::ClientHandshakeInfo +WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { WorkerProto::ClientHandshakeInfo res; @@ -260,7 +256,7 @@ WorkerProto::ClientHandshakeInfo WorkerProto::Serialise= 35) { - res.remoteTrustsUs = WorkerProto::Serialise>::read(store, conn); + res.remoteTrustsUs = WorkerProto::Serialise>::read(store, conn); } else { // We don't know the answer; protocol to old. res.remoteTrustsUs = std::nullopt; @@ -269,7 +265,8 @@ WorkerProto::ClientHandshakeInfo WorkerProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const WorkerProto::ClientHandshakeInfo & info) +void WorkerProto::Serialise::write( + const StoreDirConfig & store, WriteConn conn, const WorkerProto::ClientHandshakeInfo & info) { if (GET_PROTOCOL_MINOR(conn.version) >= 33) { assert(info.daemonNixVersion); @@ -281,4 +278,4 @@ void WorkerProto::Serialise::write(const Store } } -} +} // namespace nix diff --git a/src/libutil-c/nix_api_util_internal.h b/src/libutil-c/nix_api_util_internal.h index 8fbf3d91a..664cd6e23 100644 --- a/src/libutil-c/nix_api_util_internal.h +++ b/src/libutil-c/nix_api_util_internal.h @@ -32,18 +32,18 @@ nix_err nix_context_error(nix_c_context * context); */ nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callback callback, void * user_data); -#define NIXC_CATCH_ERRS \ - catch (...) \ - { \ +#define NIXC_CATCH_ERRS \ + catch (...) \ + { \ return nix_context_error(context); \ - } \ + } \ return NIX_OK; -#define NIXC_CATCH_ERRS_RES(def) \ - catch (...) \ - { \ +#define NIXC_CATCH_ERRS_RES(def) \ + catch (...) \ + { \ nix_context_error(context); \ - return def; \ + return def; \ } #define NIXC_CATCH_ERRS_NULL NIXC_CATCH_ERRS_RES(nullptr) diff --git a/src/libutil-test-support/hash.cc b/src/libutil-test-support/hash.cc index d047f4073..ffff27926 100644 --- a/src/libutil-test-support/hash.cc +++ b/src/libutil-test-support/hash.cc @@ -12,16 +12,14 @@ using namespace nix; Gen Arbitrary::arbitrary() { Hash prototype(HashAlgorithm::SHA1); - return - gen::apply( - [](const std::vector & v) { - Hash hash(HashAlgorithm::SHA1); - assert(v.size() == hash.hashSize); - std::copy(v.begin(), v.end(), hash.hash); - return hash; - }, - gen::container>(prototype.hashSize, gen::arbitrary()) - ); + return gen::apply( + [](const std::vector & v) { + Hash hash(HashAlgorithm::SHA1); + assert(v.size() == hash.hashSize); + std::copy(v.begin(), v.end(), hash.hash); + return hash; + }, + gen::container>(prototype.hashSize, gen::arbitrary())); } -} +} // namespace rc diff --git a/src/libutil-test-support/include/nix/util/tests/characterization.hh b/src/libutil-test-support/include/nix/util/tests/characterization.hh index 3e8effe8b..0434590f7 100644 --- a/src/libutil-test-support/include/nix/util/tests/characterization.hh +++ b/src/libutil-test-support/include/nix/util/tests/characterization.hh @@ -13,7 +13,8 @@ namespace nix { * The path to the unit test data directory. See the contributing guide * in the manual for further details. */ -static inline std::filesystem::path getUnitTestData() { +static inline std::filesystem::path getUnitTestData() +{ return getEnv("_NIX_TEST_UNIT_DATA").value(); } @@ -22,7 +23,8 @@ static inline std::filesystem::path getUnitTestData() { * against them. See the contributing guide in the manual for further * details. */ -static inline bool testAccept() { +static inline bool testAccept() +{ return getEnv("_NIX_TEST_ACCEPT") == "1"; } @@ -49,15 +51,9 @@ public: { auto file = goldenMaster(testStem); - if (testAccept()) - { - GTEST_SKIP() - << "Cannot read golden master " - << file - << "because another test is also updating it"; - } - else - { + if (testAccept()) { + GTEST_SKIP() << "Cannot read golden master " << file << "because another test is also updating it"; + } else { test(readFile(file)); } } @@ -68,23 +64,17 @@ public: * @param test hook that produces contents of the file and does the * actual work */ - void writeTest( - PathView testStem, auto && test, auto && readFile2, auto && writeFile2) + void writeTest(PathView testStem, auto && test, auto && readFile2, auto && writeFile2) { auto file = goldenMaster(testStem); auto got = test(); - if (testAccept()) - { + if (testAccept()) { std::filesystem::create_directories(file.parent_path()); writeFile2(file, got); - GTEST_SKIP() - << "Updating golden master " - << file; - } - else - { + GTEST_SKIP() << "Updating golden master " << file; + } else { decltype(got) expected = readFile2(file); ASSERT_EQ(got, expected); } @@ -96,14 +86,11 @@ public: void writeTest(PathView testStem, auto && test) { writeTest( - testStem, test, - [](const std::filesystem::path & f) -> std::string { - return readFile(f); - }, - [](const std::filesystem::path & f, const std::string & c) { - return writeFile(f, c); - }); + testStem, + test, + [](const std::filesystem::path & f) -> std::string { return readFile(f); }, + [](const std::filesystem::path & f, const std::string & c) { return writeFile(f, c); }); } }; -} +} // namespace nix diff --git a/src/libutil-test-support/include/nix/util/tests/gtest-with-params.hh b/src/libutil-test-support/include/nix/util/tests/gtest-with-params.hh index a6e23ad89..a086bbeea 100644 --- a/src/libutil-test-support/include/nix/util/tests/gtest-with-params.hh +++ b/src/libutil-test-support/include/nix/util/tests/gtest-with-params.hh @@ -43,7 +43,7 @@ void checkGTestWith(Testable && testable, MakeTestParams makeTestParams) throw std::runtime_error(ss.str()); } } -} +} // namespace rc::detail #define RC_GTEST_PROP_WITH_PARAMS(TestCase, Name, MakeParams, ArgList) \ void rapidCheck_propImpl_##TestCase##_##Name ArgList; \ diff --git a/src/libutil-test-support/include/nix/util/tests/hash.hh b/src/libutil-test-support/include/nix/util/tests/hash.hh index de832c12f..633f7bbf7 100644 --- a/src/libutil-test-support/include/nix/util/tests/hash.hh +++ b/src/libutil-test-support/include/nix/util/tests/hash.hh @@ -9,8 +9,9 @@ namespace rc { using namespace nix; template<> -struct Arbitrary { +struct Arbitrary +{ static Gen arbitrary(); }; -} +} // namespace rc diff --git a/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh b/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh index 382c7b292..57f7f1ecf 100644 --- a/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh +++ b/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh @@ -40,6 +40,7 @@ protected: std::string msg(p, n); throw std::runtime_error(loc(file, line) + ": nix_err_code(ctx) != NIX_OK, message: " + msg); } + #define assert_ctx_ok() assert_ctx_ok(__FILE__, __LINE__) inline void assert_ctx_err(const char * file, int line) @@ -49,7 +50,8 @@ protected: } throw std::runtime_error(loc(file, line) + ": Got NIX_OK, but expected an error!"); } + #define assert_ctx_err() assert_ctx_err(__FILE__, __LINE__) }; -} +} // namespace nixC diff --git a/src/libutil-test-support/include/nix/util/tests/string_callback.hh b/src/libutil-test-support/include/nix/util/tests/string_callback.hh index 9a7e8d85d..c7eb9d013 100644 --- a/src/libutil-test-support/include/nix/util/tests/string_callback.hh +++ b/src/libutil-test-support/include/nix/util/tests/string_callback.hh @@ -12,4 +12,4 @@ inline void * observe_string_cb_data(std::string & out) #define OBSERVE_STRING(str) nix::testing::observe_string_cb, nix::testing::observe_string_cb_data(str) -} +} // namespace nix::testing diff --git a/src/libutil-test-support/string_callback.cc b/src/libutil-test-support/string_callback.cc index 4f6a9cf40..b64389e4a 100644 --- a/src/libutil-test-support/string_callback.cc +++ b/src/libutil-test-support/string_callback.cc @@ -8,4 +8,4 @@ void observe_string_cb(const char * start, unsigned int n, void * user_data) *user_data_casted = std::string(start); } -} +} // namespace nix::testing diff --git a/src/libutil-tests/args.cc b/src/libutil-tests/args.cc index f5ad43a55..7aa996233 100644 --- a/src/libutil-tests/args.cc +++ b/src/libutil-tests/args.cc @@ -7,97 +7,110 @@ namespace nix { - TEST(parseShebangContent, basic) { - std::list r = parseShebangContent("hi there"); - ASSERT_EQ(r.size(), 2u); - auto i = r.begin(); - ASSERT_EQ(*i++, "hi"); - ASSERT_EQ(*i++, "there"); - } +TEST(parseShebangContent, basic) +{ + std::list r = parseShebangContent("hi there"); + ASSERT_EQ(r.size(), 2u); + auto i = r.begin(); + ASSERT_EQ(*i++, "hi"); + ASSERT_EQ(*i++, "there"); +} - TEST(parseShebangContent, empty) { - std::list r = parseShebangContent(""); - ASSERT_EQ(r.size(), 0u); - } +TEST(parseShebangContent, empty) +{ + std::list r = parseShebangContent(""); + ASSERT_EQ(r.size(), 0u); +} - TEST(parseShebangContent, doubleBacktick) { - std::list r = parseShebangContent("``\"ain't that nice\"``"); - ASSERT_EQ(r.size(), 1u); - auto i = r.begin(); - ASSERT_EQ(*i++, "\"ain't that nice\""); - } +TEST(parseShebangContent, doubleBacktick) +{ + std::list r = parseShebangContent("``\"ain't that nice\"``"); + ASSERT_EQ(r.size(), 1u); + auto i = r.begin(); + ASSERT_EQ(*i++, "\"ain't that nice\""); +} - TEST(parseShebangContent, doubleBacktickEmpty) { - std::list r = parseShebangContent("````"); - ASSERT_EQ(r.size(), 1u); - auto i = r.begin(); - ASSERT_EQ(*i++, ""); - } +TEST(parseShebangContent, doubleBacktickEmpty) +{ + std::list r = parseShebangContent("````"); + ASSERT_EQ(r.size(), 1u); + auto i = r.begin(); + ASSERT_EQ(*i++, ""); +} - TEST(parseShebangContent, doubleBacktickMarkdownInlineCode) { - std::list r = parseShebangContent("``# I'm markdown section about `coolFunction` ``"); - ASSERT_EQ(r.size(), 1u); - auto i = r.begin(); - ASSERT_EQ(*i++, "# I'm markdown section about `coolFunction`"); - } +TEST(parseShebangContent, doubleBacktickMarkdownInlineCode) +{ + std::list r = parseShebangContent("``# I'm markdown section about `coolFunction` ``"); + ASSERT_EQ(r.size(), 1u); + auto i = r.begin(); + ASSERT_EQ(*i++, "# I'm markdown section about `coolFunction`"); +} - TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockNaive) { - std::list r = parseShebangContent("``Example 1\n```nix\na: a\n``` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "Example 1\n``nix\na: a\n``"); - } +TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockNaive) +{ + std::list r = parseShebangContent("``Example 1\n```nix\na: a\n``` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "Example 1\n``nix\na: a\n``"); +} - TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockCorrect) { - std::list r = parseShebangContent("``Example 1\n````nix\na: a\n```` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```"); - } +TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockCorrect) +{ + std::list r = parseShebangContent("``Example 1\n````nix\na: a\n```` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```"); +} - TEST(parseShebangContent, doubleBacktickMarkdownCodeBlock2) { - std::list r = parseShebangContent("``Example 1\n````nix\na: a\n````\nExample 2\n````nix\na: a\n```` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```\nExample 2\n```nix\na: a\n```"); - } +TEST(parseShebangContent, doubleBacktickMarkdownCodeBlock2) +{ + std::list r = + parseShebangContent("``Example 1\n````nix\na: a\n````\nExample 2\n````nix\na: a\n```` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```\nExample 2\n```nix\na: a\n```"); +} - TEST(parseShebangContent, singleBacktickInDoubleBacktickQuotes) { - std::list r = parseShebangContent("``` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "`"); - } +TEST(parseShebangContent, singleBacktickInDoubleBacktickQuotes) +{ + std::list r = parseShebangContent("``` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "`"); +} - TEST(parseShebangContent, singleBacktickAndSpaceInDoubleBacktickQuotes) { - std::list r = parseShebangContent("``` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "` "); - } +TEST(parseShebangContent, singleBacktickAndSpaceInDoubleBacktickQuotes) +{ + std::list r = parseShebangContent("``` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "` "); +} - TEST(parseShebangContent, doubleBacktickInDoubleBacktickQuotes) { - std::list r = parseShebangContent("````` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 1u); - ASSERT_EQ(*i++, "``"); - } - - TEST(parseShebangContent, increasingQuotes) { - std::list r = parseShebangContent("```` ``` `` ````` `` `````` ``"); - auto i = r.begin(); - ASSERT_EQ(r.size(), 4u); - ASSERT_EQ(*i++, ""); - ASSERT_EQ(*i++, "`"); - ASSERT_EQ(*i++, "``"); - ASSERT_EQ(*i++, "```"); - } +TEST(parseShebangContent, doubleBacktickInDoubleBacktickQuotes) +{ + std::list r = parseShebangContent("````` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 1u); + ASSERT_EQ(*i++, "``"); +} +TEST(parseShebangContent, increasingQuotes) +{ + std::list r = parseShebangContent("```` ``` `` ````` `` `````` ``"); + auto i = r.begin(); + ASSERT_EQ(r.size(), 4u); + ASSERT_EQ(*i++, ""); + ASSERT_EQ(*i++, "`"); + ASSERT_EQ(*i++, "``"); + ASSERT_EQ(*i++, "```"); +} #ifndef COVERAGE // quick and dirty -static inline std::string escape(std::string_view s_) { +static inline std::string escape(std::string_view s_) +{ std::string_view s = s_; std::string r = "``"; @@ -125,11 +138,7 @@ static inline std::string escape(std::string_view s_) { } } - if (!r.empty() - && ( - r[r.size() - 1] == '`' - || r[r.size() - 1] == ' ' - )) { + if (!r.empty() && (r[r.size() - 1] == '`' || r[r.size() - 1] == ' ')) { r += " "; } @@ -138,10 +147,7 @@ static inline std::string escape(std::string_view s_) { return r; }; -RC_GTEST_PROP( - parseShebangContent, - prop_round_trip_single, - (const std::string & orig)) +RC_GTEST_PROP(parseShebangContent, prop_round_trip_single, (const std::string & orig)) { auto escaped = escape(orig); // RC_LOG() << "escaped: <[[" << escaped << "]]>" << std::endl; @@ -150,10 +156,7 @@ RC_GTEST_PROP( RC_ASSERT(*ss.begin() == orig); } -RC_GTEST_PROP( - parseShebangContent, - prop_round_trip_two, - (const std::string & one, const std::string & two)) +RC_GTEST_PROP(parseShebangContent, prop_round_trip_two, (const std::string & one, const std::string & two)) { auto ss = parseShebangContent(escape(one) + " " + escape(two)); RC_ASSERT(ss.size() == 2u); @@ -162,7 +165,6 @@ RC_GTEST_PROP( RC_ASSERT(*i++ == two); } - #endif -} +} // namespace nix diff --git a/src/libutil-tests/canon-path.cc b/src/libutil-tests/canon-path.cc index c6808bf66..971a9cc96 100644 --- a/src/libutil-tests/canon-path.cc +++ b/src/libutil-tests/canon-path.cc @@ -4,177 +4,189 @@ namespace nix { - TEST(CanonPath, basic) { - { - CanonPath p("/"); - ASSERT_EQ(p.abs(), "/"); - ASSERT_EQ(p.rel(), ""); - ASSERT_EQ(p.baseName(), std::nullopt); - ASSERT_EQ(p.dirOf(), std::nullopt); - ASSERT_FALSE(p.parent()); - } - - { - CanonPath p("/foo//"); - ASSERT_EQ(p.abs(), "/foo"); - ASSERT_EQ(p.rel(), "foo"); - ASSERT_EQ(*p.baseName(), "foo"); - ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this? - ASSERT_EQ(p.parent()->abs(), "/"); - } - - { - CanonPath p("foo/bar"); - ASSERT_EQ(p.abs(), "/foo/bar"); - ASSERT_EQ(p.rel(), "foo/bar"); - ASSERT_EQ(*p.baseName(), "bar"); - ASSERT_EQ(*p.dirOf(), "/foo"); - ASSERT_EQ(p.parent()->abs(), "/foo"); - } - - { - CanonPath p("foo//bar/"); - ASSERT_EQ(p.abs(), "/foo/bar"); - ASSERT_EQ(p.rel(), "foo/bar"); - ASSERT_EQ(*p.baseName(), "bar"); - ASSERT_EQ(*p.dirOf(), "/foo"); - } - } - - TEST(CanonPath, from_existing) { - CanonPath p0("foo//bar/"); - { - CanonPath p("/baz//quux/", p0); - ASSERT_EQ(p.abs(), "/baz/quux"); - ASSERT_EQ(p.rel(), "baz/quux"); - ASSERT_EQ(*p.baseName(), "quux"); - ASSERT_EQ(*p.dirOf(), "/baz"); - } - { - CanonPath p("baz//quux/", p0); - ASSERT_EQ(p.abs(), "/foo/bar/baz/quux"); - ASSERT_EQ(p.rel(), "foo/bar/baz/quux"); - ASSERT_EQ(*p.baseName(), "quux"); - ASSERT_EQ(*p.dirOf(), "/foo/bar/baz"); - } - } - - TEST(CanonPath, pop) { - CanonPath p("foo/bar/x"); - ASSERT_EQ(p.abs(), "/foo/bar/x"); - p.pop(); - ASSERT_EQ(p.abs(), "/foo/bar"); - p.pop(); - ASSERT_EQ(p.abs(), "/foo"); - p.pop(); +TEST(CanonPath, basic) +{ + { + CanonPath p("/"); ASSERT_EQ(p.abs(), "/"); + ASSERT_EQ(p.rel(), ""); + ASSERT_EQ(p.baseName(), std::nullopt); + ASSERT_EQ(p.dirOf(), std::nullopt); + ASSERT_FALSE(p.parent()); } - TEST(CanonPath, removePrefix) { - CanonPath p1("foo/bar"); - CanonPath p2("foo/bar/a/b/c"); - ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c"); - ASSERT_EQ(p1.removePrefix(p1).abs(), "/"); - ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar"); + { + CanonPath p("/foo//"); + ASSERT_EQ(p.abs(), "/foo"); + ASSERT_EQ(p.rel(), "foo"); + ASSERT_EQ(*p.baseName(), "foo"); + ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this? + ASSERT_EQ(p.parent()->abs(), "/"); } - TEST(CanonPath, iter) { - { - CanonPath p("a//foo/bar//"); - std::vector ss; - for (auto & c : p) ss.push_back(c); - ASSERT_EQ(ss, std::vector({"a", "foo", "bar"})); - } - - { - CanonPath p("/"); - std::vector ss; - for (auto & c : p) ss.push_back(c); - ASSERT_EQ(ss, std::vector()); - } + { + CanonPath p("foo/bar"); + ASSERT_EQ(p.abs(), "/foo/bar"); + ASSERT_EQ(p.rel(), "foo/bar"); + ASSERT_EQ(*p.baseName(), "bar"); + ASSERT_EQ(*p.dirOf(), "/foo"); + ASSERT_EQ(p.parent()->abs(), "/foo"); } - TEST(CanonPath, concat) { - { - CanonPath p1("a//foo/bar//"); - CanonPath p2("xyzzy/bla"); - ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla"); - } - - { - CanonPath p1("/"); - CanonPath p2("/a/b"); - ASSERT_EQ((p1 / p2).abs(), "/a/b"); - } - - { - CanonPath p1("/a/b"); - CanonPath p2("/"); - ASSERT_EQ((p1 / p2).abs(), "/a/b"); - } - - { - CanonPath p("/foo/bar"); - ASSERT_EQ((p / "x").abs(), "/foo/bar/x"); - } - - { - CanonPath p("/"); - ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar"); - } - } - - TEST(CanonPath, within) { - ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo"))); - ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar"))); - ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo"))); - ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo"))); - ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar"))); - ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/"))); - ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/"))); - } - - TEST(CanonPath, sort) { - ASSERT_FALSE(CanonPath("foo") < CanonPath("foo")); - ASSERT_TRUE (CanonPath("foo") < CanonPath("foo/bar")); - ASSERT_TRUE (CanonPath("foo/bar") < CanonPath("foo!")); - ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo")); - ASSERT_TRUE (CanonPath("foo") < CanonPath("foo!")); - } - - TEST(CanonPath, allowed) { - std::set allowed { - CanonPath("foo/bar"), - CanonPath("foo!"), - CanonPath("xyzzy"), - CanonPath("a/b/c"), - }; - - ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("foo").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("bar").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("a").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed)); - ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed)); - ASSERT_TRUE (CanonPath("/").isAllowed(allowed)); - } - - TEST(CanonPath, makeRelative) { - CanonPath d("/foo/bar"); - ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar")), "."); - ASSERT_EQ(d.makeRelative(CanonPath("/foo")), ".."); - ASSERT_EQ(d.makeRelative(CanonPath("/")), "../.."); - ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy")), "xyzzy"); - ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy/bla")), "xyzzy/bla"); - ASSERT_EQ(d.makeRelative(CanonPath("/foo/xyzzy/bla")), "../xyzzy/bla"); - ASSERT_EQ(d.makeRelative(CanonPath("/xyzzy/bla")), "../../xyzzy/bla"); + { + CanonPath p("foo//bar/"); + ASSERT_EQ(p.abs(), "/foo/bar"); + ASSERT_EQ(p.rel(), "foo/bar"); + ASSERT_EQ(*p.baseName(), "bar"); + ASSERT_EQ(*p.dirOf(), "/foo"); } } + +TEST(CanonPath, from_existing) +{ + CanonPath p0("foo//bar/"); + { + CanonPath p("/baz//quux/", p0); + ASSERT_EQ(p.abs(), "/baz/quux"); + ASSERT_EQ(p.rel(), "baz/quux"); + ASSERT_EQ(*p.baseName(), "quux"); + ASSERT_EQ(*p.dirOf(), "/baz"); + } + { + CanonPath p("baz//quux/", p0); + ASSERT_EQ(p.abs(), "/foo/bar/baz/quux"); + ASSERT_EQ(p.rel(), "foo/bar/baz/quux"); + ASSERT_EQ(*p.baseName(), "quux"); + ASSERT_EQ(*p.dirOf(), "/foo/bar/baz"); + } +} + +TEST(CanonPath, pop) +{ + CanonPath p("foo/bar/x"); + ASSERT_EQ(p.abs(), "/foo/bar/x"); + p.pop(); + ASSERT_EQ(p.abs(), "/foo/bar"); + p.pop(); + ASSERT_EQ(p.abs(), "/foo"); + p.pop(); + ASSERT_EQ(p.abs(), "/"); +} + +TEST(CanonPath, removePrefix) +{ + CanonPath p1("foo/bar"); + CanonPath p2("foo/bar/a/b/c"); + ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c"); + ASSERT_EQ(p1.removePrefix(p1).abs(), "/"); + ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar"); +} + +TEST(CanonPath, iter) +{ + { + CanonPath p("a//foo/bar//"); + std::vector ss; + for (auto & c : p) + ss.push_back(c); + ASSERT_EQ(ss, std::vector({"a", "foo", "bar"})); + } + + { + CanonPath p("/"); + std::vector ss; + for (auto & c : p) + ss.push_back(c); + ASSERT_EQ(ss, std::vector()); + } +} + +TEST(CanonPath, concat) +{ + { + CanonPath p1("a//foo/bar//"); + CanonPath p2("xyzzy/bla"); + ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla"); + } + + { + CanonPath p1("/"); + CanonPath p2("/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); + } + + { + CanonPath p1("/a/b"); + CanonPath p2("/"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); + } + + { + CanonPath p("/foo/bar"); + ASSERT_EQ((p / "x").abs(), "/foo/bar/x"); + } + + { + CanonPath p("/"); + ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar"); + } +} + +TEST(CanonPath, within) +{ + ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo"))); + ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar"))); + ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo"))); + ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo"))); + ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar"))); + ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/"))); + ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/"))); +} + +TEST(CanonPath, sort) +{ + ASSERT_FALSE(CanonPath("foo") < CanonPath("foo")); + ASSERT_TRUE(CanonPath("foo") < CanonPath("foo/bar")); + ASSERT_TRUE(CanonPath("foo/bar") < CanonPath("foo!")); + ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo")); + ASSERT_TRUE(CanonPath("foo") < CanonPath("foo!")); +} + +TEST(CanonPath, allowed) +{ + std::set allowed{ + CanonPath("foo/bar"), + CanonPath("foo!"), + CanonPath("xyzzy"), + CanonPath("a/b/c"), + }; + + ASSERT_TRUE(CanonPath("foo/bar").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("foo/bar/bla").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("foo").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("bar").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("a").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("a/b").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("a/b/c").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("a/b/c/d").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("a/b/c/d/e").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed)); + ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed)); + ASSERT_TRUE(CanonPath("/").isAllowed(allowed)); +} + +TEST(CanonPath, makeRelative) +{ + CanonPath d("/foo/bar"); + ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar")), "."); + ASSERT_EQ(d.makeRelative(CanonPath("/foo")), ".."); + ASSERT_EQ(d.makeRelative(CanonPath("/")), "../.."); + ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy")), "xyzzy"); + ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy/bla")), "xyzzy/bla"); + ASSERT_EQ(d.makeRelative(CanonPath("/foo/xyzzy/bla")), "../xyzzy/bla"); + ASSERT_EQ(d.makeRelative(CanonPath("/xyzzy/bla")), "../../xyzzy/bla"); +} +} // namespace nix diff --git a/src/libutil-tests/checked-arithmetic.cc b/src/libutil-tests/checked-arithmetic.cc index 8056a430a..2b5970fb6 100644 --- a/src/libutil-tests/checked-arithmetic.cc +++ b/src/libutil-tests/checked-arithmetic.cc @@ -21,7 +21,7 @@ struct Arbitrary> } }; -} +} // namespace rc namespace nix::checked { @@ -155,4 +155,4 @@ TEST(Checked, div_signed_special_cases) checkDivision(0, 0); } -} +} // namespace nix::checked diff --git a/src/libutil-tests/chunked-vector.cc b/src/libutil-tests/chunked-vector.cc index c4f1d3858..52f87a0d5 100644 --- a/src/libutil-tests/chunked-vector.cc +++ b/src/libutil-tests/chunked-vector.cc @@ -3,52 +3,54 @@ #include namespace nix { - TEST(ChunkedVector, InitEmpty) { - auto v = ChunkedVector(100); - ASSERT_EQ(v.size(), 0u); - } - - TEST(ChunkedVector, GrowsCorrectly) { - auto v = ChunkedVector(100); - for (uint32_t i = 1; i < 20; i++) { - v.add(i); - ASSERT_EQ(v.size(), i); - } - } - - TEST(ChunkedVector, AddAndGet) { - auto v = ChunkedVector(100); - for (auto i = 1; i < 20; i++) { - auto [i2, idx] = v.add(i); - auto & i3 = v[idx]; - ASSERT_EQ(i, i2); - ASSERT_EQ(&i2, &i3); - } - } - - TEST(ChunkedVector, ForEach) { - auto v = ChunkedVector(100); - for (auto i = 1; i < 20; i++) { - v.add(i); - } - uint32_t count = 0; - v.forEach([&count](int elt) { - count++; - }); - ASSERT_EQ(count, v.size()); - } - - TEST(ChunkedVector, OverflowOK) { - // Similar to the AddAndGet, but intentionnally use a small - // initial ChunkedVector to force it to overflow - auto v = ChunkedVector(2); - for (auto i = 1; i < 20; i++) { - auto [i2, idx] = v.add(i); - auto & i3 = v[idx]; - ASSERT_EQ(i, i2); - ASSERT_EQ(&i2, &i3); - } - } - +TEST(ChunkedVector, InitEmpty) +{ + auto v = ChunkedVector(100); + ASSERT_EQ(v.size(), 0u); } +TEST(ChunkedVector, GrowsCorrectly) +{ + auto v = ChunkedVector(100); + for (uint32_t i = 1; i < 20; i++) { + v.add(i); + ASSERT_EQ(v.size(), i); + } +} + +TEST(ChunkedVector, AddAndGet) +{ + auto v = ChunkedVector(100); + for (auto i = 1; i < 20; i++) { + auto [i2, idx] = v.add(i); + auto & i3 = v[idx]; + ASSERT_EQ(i, i2); + ASSERT_EQ(&i2, &i3); + } +} + +TEST(ChunkedVector, ForEach) +{ + auto v = ChunkedVector(100); + for (auto i = 1; i < 20; i++) { + v.add(i); + } + uint32_t count = 0; + v.forEach([&count](int elt) { count++; }); + ASSERT_EQ(count, v.size()); +} + +TEST(ChunkedVector, OverflowOK) +{ + // Similar to the AddAndGet, but intentionnally use a small + // initial ChunkedVector to force it to overflow + auto v = ChunkedVector(2); + for (auto i = 1; i < 20; i++) { + auto [i2, idx] = v.add(i); + auto & i3 = v[idx]; + ASSERT_EQ(i, i2); + ASSERT_EQ(&i2, &i3); + } +} + +} // namespace nix diff --git a/src/libutil-tests/closure.cc b/src/libutil-tests/closure.cc index 6bbc128c2..9973ceeb0 100644 --- a/src/libutil-tests/closure.cc +++ b/src/libutil-tests/closure.cc @@ -6,48 +6,48 @@ namespace nix { using namespace std; map> testGraph = { - { "A", { "B", "C", "G" } }, - { "B", { "A" } }, // Loops back to A - { "C", { "F" } }, // Indirect reference - { "D", { "A" } }, // Not reachable, but has backreferences - { "E", {} }, // Just not reachable - { "F", {} }, - { "G", { "G" } }, // Self reference + {"A", {"B", "C", "G"}}, + {"B", {"A"}}, // Loops back to A + {"C", {"F"}}, // Indirect reference + {"D", {"A"}}, // Not reachable, but has backreferences + {"E", {}}, // Just not reachable + {"F", {}}, + {"G", {"G"}}, // Self reference }; -TEST(closure, correctClosure) { +TEST(closure, correctClosure) +{ set aClosure; set expectedClosure = {"A", "B", "C", "F", "G"}; computeClosure( - {"A"}, - aClosure, - [&](const string currentNode, function> &)> processEdges) { + {"A"}, aClosure, [&](const string currentNode, function> &)> processEdges) { promise> promisedNodes; promisedNodes.set_value(testGraph[currentNode]); processEdges(promisedNodes); - } - ); + }); ASSERT_EQ(aClosure, expectedClosure); } -TEST(closure, properlyHandlesDirectExceptions) { - struct TestExn {}; +TEST(closure, properlyHandlesDirectExceptions) +{ + struct TestExn + {}; + set aClosure; EXPECT_THROW( computeClosure( {"A"}, aClosure, - [&](const string currentNode, function> &)> processEdges) { - throw TestExn(); - } - ), - TestExn - ); + [&](const string currentNode, function> &)> processEdges) { throw TestExn(); }), + TestExn); } -TEST(closure, properlyHandlesExceptionsInPromise) { - struct TestExn {}; +TEST(closure, properlyHandlesExceptionsInPromise) +{ + struct TestExn + {}; + set aClosure; EXPECT_THROW( computeClosure( @@ -61,10 +61,8 @@ TEST(closure, properlyHandlesExceptionsInPromise) { promise.set_exception(std::current_exception()); } processEdges(promise); - } - ), - TestExn - ); + }), + TestExn); } -} +} // namespace nix diff --git a/src/libutil-tests/compression.cc b/src/libutil-tests/compression.cc index de0c7cdb6..c6d570471 100644 --- a/src/libutil-tests/compression.cc +++ b/src/libutil-tests/compression.cc @@ -3,94 +3,104 @@ namespace nix { - /* ---------------------------------------------------------------------------- - * compress / decompress - * --------------------------------------------------------------------------*/ - - TEST(compress, compressWithUnknownMethod) { - ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod); - } - - TEST(compress, noneMethodDoesNothingToTheInput) { - auto o = compress("none", "this-is-a-test"); - - ASSERT_EQ(o, "this-is-a-test"); - } - - TEST(decompress, decompressNoneCompressed) { - auto method = "none"; - auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto o = decompress(method, str); - - ASSERT_EQ(o, str); - } - - TEST(decompress, decompressEmptyCompressed) { - // Empty-method decompression used e.g. by S3 store - // (Content-Encoding == ""). - auto method = ""; - auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto o = decompress(method, str); - - ASSERT_EQ(o, str); - } - - TEST(decompress, decompressXzCompressed) { - auto method = "xz"; - auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto o = decompress(method, compress(method, str)); - - ASSERT_EQ(o, str); - } - - TEST(decompress, decompressBzip2Compressed) { - auto method = "bzip2"; - auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto o = decompress(method, compress(method, str)); - - ASSERT_EQ(o, str); - } - - TEST(decompress, decompressBrCompressed) { - auto method = "br"; - auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto o = decompress(method, compress(method, str)); - - ASSERT_EQ(o, str); - } - - TEST(decompress, decompressInvalidInputThrowsCompressionError) { - auto method = "bzip2"; - auto str = "this is a string that does not qualify as valid bzip2 data"; - - ASSERT_THROW(decompress(method, str), CompressionError); - } - - /* ---------------------------------------------------------------------------- - * compression sinks - * --------------------------------------------------------------------------*/ - - TEST(makeCompressionSink, noneSinkDoesNothingToInput) { - StringSink strSink; - auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto sink = makeCompressionSink("none", strSink); - (*sink)(inputString); - sink->finish(); - - ASSERT_STREQ(strSink.s.c_str(), inputString); - } - - TEST(makeCompressionSink, compressAndDecompress) { - StringSink strSink; - auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto decompressionSink = makeDecompressionSink("bzip2", strSink); - auto sink = makeCompressionSink("bzip2", *decompressionSink); - - (*sink)(inputString); - sink->finish(); - decompressionSink->finish(); - - ASSERT_STREQ(strSink.s.c_str(), inputString); - } +/* ---------------------------------------------------------------------------- + * compress / decompress + * --------------------------------------------------------------------------*/ +TEST(compress, compressWithUnknownMethod) +{ + ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod); } + +TEST(compress, noneMethodDoesNothingToTheInput) +{ + auto o = compress("none", "this-is-a-test"); + + ASSERT_EQ(o, "this-is-a-test"); +} + +TEST(decompress, decompressNoneCompressed) +{ + auto method = "none"; + auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto o = decompress(method, str); + + ASSERT_EQ(o, str); +} + +TEST(decompress, decompressEmptyCompressed) +{ + // Empty-method decompression used e.g. by S3 store + // (Content-Encoding == ""). + auto method = ""; + auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto o = decompress(method, str); + + ASSERT_EQ(o, str); +} + +TEST(decompress, decompressXzCompressed) +{ + auto method = "xz"; + auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto o = decompress(method, compress(method, str)); + + ASSERT_EQ(o, str); +} + +TEST(decompress, decompressBzip2Compressed) +{ + auto method = "bzip2"; + auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto o = decompress(method, compress(method, str)); + + ASSERT_EQ(o, str); +} + +TEST(decompress, decompressBrCompressed) +{ + auto method = "br"; + auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto o = decompress(method, compress(method, str)); + + ASSERT_EQ(o, str); +} + +TEST(decompress, decompressInvalidInputThrowsCompressionError) +{ + auto method = "bzip2"; + auto str = "this is a string that does not qualify as valid bzip2 data"; + + ASSERT_THROW(decompress(method, str), CompressionError); +} + +/* ---------------------------------------------------------------------------- + * compression sinks + * --------------------------------------------------------------------------*/ + +TEST(makeCompressionSink, noneSinkDoesNothingToInput) +{ + StringSink strSink; + auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto sink = makeCompressionSink("none", strSink); + (*sink)(inputString); + sink->finish(); + + ASSERT_STREQ(strSink.s.c_str(), inputString); +} + +TEST(makeCompressionSink, compressAndDecompress) +{ + StringSink strSink; + auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; + auto decompressionSink = makeDecompressionSink("bzip2", strSink); + auto sink = makeCompressionSink("bzip2", *decompressionSink); + + (*sink)(inputString); + sink->finish(); + decompressionSink->finish(); + + ASSERT_STREQ(strSink.s.c_str(), inputString); +} + +} // namespace nix diff --git a/src/libutil-tests/config.cc b/src/libutil-tests/config.cc index bc7db251b..5fb2229b6 100644 --- a/src/libutil-tests/config.cc +++ b/src/libutil-tests/config.cc @@ -7,169 +7,195 @@ namespace nix { - /* ---------------------------------------------------------------------------- - * Config - * --------------------------------------------------------------------------*/ +/* ---------------------------------------------------------------------------- + * Config + * --------------------------------------------------------------------------*/ - TEST(Config, setUndefinedSetting) { - Config config; - ASSERT_EQ(config.set("undefined-key", "value"), false); - } +TEST(Config, setUndefinedSetting) +{ + Config config; + ASSERT_EQ(config.set("undefined-key", "value"), false); +} - TEST(Config, setDefinedSetting) { - Config config; - std::string value; - Setting foo{&config, value, "name-of-the-setting", "description"}; - ASSERT_EQ(config.set("name-of-the-setting", "value"), true); - } +TEST(Config, setDefinedSetting) +{ + Config config; + std::string value; + Setting foo{&config, value, "name-of-the-setting", "description"}; + ASSERT_EQ(config.set("name-of-the-setting", "value"), true); +} - TEST(Config, getDefinedSetting) { - Config config; - std::string value; +TEST(Config, getDefinedSetting) +{ + Config config; + std::string value; + std::map settings; + Setting foo{&config, value, "name-of-the-setting", "description"}; + + config.getSettings(settings, /* overriddenOnly = */ false); + const auto iter = settings.find("name-of-the-setting"); + ASSERT_NE(iter, settings.end()); + ASSERT_EQ(iter->second.value, ""); + ASSERT_EQ(iter->second.description, "description\n"); +} + +TEST(Config, getDefinedOverriddenSettingNotSet) +{ + Config config; + std::string value; + std::map settings; + Setting foo{&config, value, "name-of-the-setting", "description"}; + + config.getSettings(settings, /* overriddenOnly = */ true); + const auto e = settings.find("name-of-the-setting"); + ASSERT_EQ(e, settings.end()); +} + +TEST(Config, getDefinedSettingSet1) +{ + Config config; + std::string value; + std::map settings; + Setting setting{&config, value, "name-of-the-setting", "description"}; + + setting.assign("value"); + + config.getSettings(settings, /* overriddenOnly = */ false); + const auto iter = settings.find("name-of-the-setting"); + ASSERT_NE(iter, settings.end()); + ASSERT_EQ(iter->second.value, "value"); + ASSERT_EQ(iter->second.description, "description\n"); +} + +TEST(Config, getDefinedSettingSet2) +{ + Config config; + std::map settings; + Setting setting{&config, "", "name-of-the-setting", "description"}; + + ASSERT_TRUE(config.set("name-of-the-setting", "value")); + + config.getSettings(settings, /* overriddenOnly = */ false); + const auto e = settings.find("name-of-the-setting"); + ASSERT_NE(e, settings.end()); + ASSERT_EQ(e->second.value, "value"); + ASSERT_EQ(e->second.description, "description\n"); +} + +TEST(Config, addSetting) +{ + class TestSetting : public AbstractSetting + { + public: + TestSetting() + : AbstractSetting("test", "test", {}) + { + } + + void set(const std::string & value, bool append) override {} + + std::string to_string() const override + { + return {}; + } + + bool isAppendable() override + { + return false; + } + }; + + Config config; + TestSetting setting; + + ASSERT_FALSE(config.set("test", "value")); + config.addSetting(&setting); + ASSERT_TRUE(config.set("test", "value")); + ASSERT_FALSE(config.set("extra-test", "value")); +} + +TEST(Config, withInitialValue) +{ + const StringMap initials = { + {"key", "value"}, + }; + Config config(initials); + + { std::map settings; - Setting foo{&config, value, "name-of-the-setting", "description"}; - config.getSettings(settings, /* overriddenOnly = */ false); - const auto iter = settings.find("name-of-the-setting"); - ASSERT_NE(iter, settings.end()); - ASSERT_EQ(iter->second.value, ""); - ASSERT_EQ(iter->second.description, "description\n"); + ASSERT_EQ(settings.find("key"), settings.end()); } - TEST(Config, getDefinedOverriddenSettingNotSet) { - Config config; - std::string value; - std::map settings; - Setting foo{&config, value, "name-of-the-setting", "description"}; + Setting setting{&config, "default-value", "key", "description"}; + { + std::map settings; + config.getSettings(settings, /* overriddenOnly = */ false); + ASSERT_EQ(settings["key"].value, "value"); + } +} + +TEST(Config, resetOverridden) +{ + Config config; + config.resetOverridden(); +} + +TEST(Config, resetOverriddenWithSetting) +{ + Config config; + Setting setting{&config, "", "name-of-the-setting", "description"}; + + { + std::map settings; + + setting.set("foo"); + ASSERT_EQ(setting.get(), "foo"); config.getSettings(settings, /* overriddenOnly = */ true); - const auto e = settings.find("name-of-the-setting"); - ASSERT_EQ(e, settings.end()); + ASSERT_TRUE(settings.empty()); } - TEST(Config, getDefinedSettingSet1) { - Config config; - std::string value; + { std::map settings; - Setting setting{&config, value, "name-of-the-setting", "description"}; - setting.assign("value"); - - config.getSettings(settings, /* overriddenOnly = */ false); - const auto iter = settings.find("name-of-the-setting"); - ASSERT_NE(iter, settings.end()); - ASSERT_EQ(iter->second.value, "value"); - ASSERT_EQ(iter->second.description, "description\n"); + setting.override("bar"); + ASSERT_TRUE(setting.overridden); + ASSERT_EQ(setting.get(), "bar"); + config.getSettings(settings, /* overriddenOnly = */ true); + ASSERT_FALSE(settings.empty()); } - TEST(Config, getDefinedSettingSet2) { - Config config; + { std::map settings; - Setting setting{&config, "", "name-of-the-setting", "description"}; - ASSERT_TRUE(config.set("name-of-the-setting", "value")); - - config.getSettings(settings, /* overriddenOnly = */ false); - const auto e = settings.find("name-of-the-setting"); - ASSERT_NE(e, settings.end()); - ASSERT_EQ(e->second.value, "value"); - ASSERT_EQ(e->second.description, "description\n"); - } - - TEST(Config, addSetting) { - class TestSetting : public AbstractSetting { - public: - TestSetting() : AbstractSetting("test", "test", {}) {} - void set(const std::string & value, bool append) override {} - std::string to_string() const override { return {}; } - bool isAppendable() override { return false; } - }; - - Config config; - TestSetting setting; - - ASSERT_FALSE(config.set("test", "value")); - config.addSetting(&setting); - ASSERT_TRUE(config.set("test", "value")); - ASSERT_FALSE(config.set("extra-test", "value")); - } - - TEST(Config, withInitialValue) { - const StringMap initials = { - { "key", "value" }, - }; - Config config(initials); - - { - std::map settings; - config.getSettings(settings, /* overriddenOnly = */ false); - ASSERT_EQ(settings.find("key"), settings.end()); - } - - Setting setting{&config, "default-value", "key", "description"}; - - { - std::map settings; - config.getSettings(settings, /* overriddenOnly = */ false); - ASSERT_EQ(settings["key"].value, "value"); - } - } - - TEST(Config, resetOverridden) { - Config config; config.resetOverridden(); + ASSERT_FALSE(setting.overridden); + config.getSettings(settings, /* overriddenOnly = */ true); + ASSERT_TRUE(settings.empty()); } +} - TEST(Config, resetOverriddenWithSetting) { - Config config; - Setting setting{&config, "", "name-of-the-setting", "description"}; +TEST(Config, toJSONOnEmptyConfig) +{ + ASSERT_EQ(Config().toJSON().dump(), "{}"); +} - { - std::map settings; +TEST(Config, toJSONOnNonEmptyConfig) +{ + using nlohmann::literals::operator"" _json; + Config config; + Setting setting{ + &config, + "", + "name-of-the-setting", + "description", + }; + setting.assign("value"); - setting.set("foo"); - ASSERT_EQ(setting.get(), "foo"); - config.getSettings(settings, /* overriddenOnly = */ true); - ASSERT_TRUE(settings.empty()); - } - - { - std::map settings; - - setting.override("bar"); - ASSERT_TRUE(setting.overridden); - ASSERT_EQ(setting.get(), "bar"); - config.getSettings(settings, /* overriddenOnly = */ true); - ASSERT_FALSE(settings.empty()); - } - - { - std::map settings; - - config.resetOverridden(); - ASSERT_FALSE(setting.overridden); - config.getSettings(settings, /* overriddenOnly = */ true); - ASSERT_TRUE(settings.empty()); - } - } - - TEST(Config, toJSONOnEmptyConfig) { - ASSERT_EQ(Config().toJSON().dump(), "{}"); - } - - TEST(Config, toJSONOnNonEmptyConfig) { - using nlohmann::literals::operator "" _json; - Config config; - Setting setting{ - &config, - "", - "name-of-the-setting", - "description", - }; - setting.assign("value"); - - ASSERT_EQ(config.toJSON(), - R"#({ + ASSERT_EQ( + config.toJSON(), + R"#({ "name-of-the-setting": { "aliases": [], "defaultValue": "", @@ -179,24 +205,26 @@ namespace nix { "experimentalFeature": null } })#"_json); - } +} - TEST(Config, toJSONOnNonEmptyConfigWithExperimentalSetting) { - using nlohmann::literals::operator "" _json; - Config config; - Setting setting{ - &config, - "", - "name-of-the-setting", - "description", - {}, - true, - Xp::Flakes, - }; - setting.assign("value"); +TEST(Config, toJSONOnNonEmptyConfigWithExperimentalSetting) +{ + using nlohmann::literals::operator"" _json; + Config config; + Setting setting{ + &config, + "", + "name-of-the-setting", + "description", + {}, + true, + Xp::Flakes, + }; + setting.assign("value"); - ASSERT_EQ(config.toJSON(), - R"#({ + ASSERT_EQ( + config.toJSON(), + R"#({ "name-of-the-setting": { "aliases": [], "defaultValue": "", @@ -206,90 +234,97 @@ namespace nix { "experimentalFeature": "flakes" } })#"_json); - } +} - TEST(Config, setSettingAlias) { - Config config; - Setting setting{&config, "", "some-int", "best number", { "another-int" }}; - ASSERT_TRUE(config.set("some-int", "1")); - ASSERT_EQ(setting.get(), "1"); - ASSERT_TRUE(config.set("another-int", "2")); - ASSERT_EQ(setting.get(), "2"); - ASSERT_TRUE(config.set("some-int", "3")); - ASSERT_EQ(setting.get(), "3"); - } +TEST(Config, setSettingAlias) +{ + Config config; + Setting setting{&config, "", "some-int", "best number", {"another-int"}}; + ASSERT_TRUE(config.set("some-int", "1")); + ASSERT_EQ(setting.get(), "1"); + ASSERT_TRUE(config.set("another-int", "2")); + ASSERT_EQ(setting.get(), "2"); + ASSERT_TRUE(config.set("some-int", "3")); + ASSERT_EQ(setting.get(), "3"); +} - /* FIXME: The reapplyUnknownSettings method doesn't seem to do anything - * useful (these days). Whenever we add a new setting to Config the - * unknown settings are always considered. In which case is this function - * actually useful? Is there some way to register a Setting without calling - * addSetting? */ - TEST(Config, DISABLED_reapplyUnknownSettings) { - Config config; - ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue")); - Setting setting{&config, "default", "name-of-the-setting", "description"}; - ASSERT_EQ(setting.get(), "default"); - config.reapplyUnknownSettings(); - ASSERT_EQ(setting.get(), "unknownvalue"); - } +/* FIXME: The reapplyUnknownSettings method doesn't seem to do anything + * useful (these days). Whenever we add a new setting to Config the + * unknown settings are always considered. In which case is this function + * actually useful? Is there some way to register a Setting without calling + * addSetting? */ +TEST(Config, DISABLED_reapplyUnknownSettings) +{ + Config config; + ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue")); + Setting setting{&config, "default", "name-of-the-setting", "description"}; + ASSERT_EQ(setting.get(), "default"); + config.reapplyUnknownSettings(); + ASSERT_EQ(setting.get(), "unknownvalue"); +} - TEST(Config, applyConfigEmpty) { - Config config; - std::map settings; - config.applyConfig(""); - config.getSettings(settings); - ASSERT_TRUE(settings.empty()); - } +TEST(Config, applyConfigEmpty) +{ + Config config; + std::map settings; + config.applyConfig(""); + config.getSettings(settings); + ASSERT_TRUE(settings.empty()); +} - TEST(Config, applyConfigEmptyWithComment) { - Config config; - std::map settings; - config.applyConfig("# just a comment"); - config.getSettings(settings); - ASSERT_TRUE(settings.empty()); - } +TEST(Config, applyConfigEmptyWithComment) +{ + Config config; + std::map settings; + config.applyConfig("# just a comment"); + config.getSettings(settings); + ASSERT_TRUE(settings.empty()); +} - TEST(Config, applyConfigAssignment) { - Config config; - std::map settings; - Setting setting{&config, "", "name-of-the-setting", "description"}; +TEST(Config, applyConfigAssignment) +{ + Config config; + std::map settings; + Setting setting{&config, "", "name-of-the-setting", "description"}; + config.applyConfig( + "name-of-the-setting = value-from-file #useful comment\n" + "# name-of-the-setting = foo\n"); + config.getSettings(settings); + ASSERT_FALSE(settings.empty()); + ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file"); +} + +TEST(Config, applyConfigWithReassignedSetting) +{ + Config config; + std::map settings; + Setting setting{&config, "", "name-of-the-setting", "description"}; + config.applyConfig( + "name-of-the-setting = first-value\n" + "name-of-the-setting = second-value\n"); + config.getSettings(settings); + ASSERT_FALSE(settings.empty()); + ASSERT_EQ(settings["name-of-the-setting"].value, "second-value"); +} + +TEST(Config, applyConfigFailsOnMissingIncludes) +{ + Config config; + std::map settings; + Setting setting{&config, "", "name-of-the-setting", "description"}; + + ASSERT_THROW( config.applyConfig( - "name-of-the-setting = value-from-file #useful comment\n" - "# name-of-the-setting = foo\n" - ); - config.getSettings(settings); - ASSERT_FALSE(settings.empty()); - ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file"); - } - - TEST(Config, applyConfigWithReassignedSetting) { - Config config; - std::map settings; - Setting setting{&config, "", "name-of-the-setting", "description"}; - config.applyConfig( - "name-of-the-setting = first-value\n" - "name-of-the-setting = second-value\n" - ); - config.getSettings(settings); - ASSERT_FALSE(settings.empty()); - ASSERT_EQ(settings["name-of-the-setting"].value, "second-value"); - } - - TEST(Config, applyConfigFailsOnMissingIncludes) { - Config config; - std::map settings; - Setting setting{&config, "", "name-of-the-setting", "description"}; - - ASSERT_THROW(config.applyConfig( "name-of-the-setting = value-from-file\n" "# name-of-the-setting = foo\n" - "include /nix/store/does/not/exist.nix" - ), Error); - } - - TEST(Config, applyConfigInvalidThrows) { - Config config; - ASSERT_THROW(config.applyConfig("value == key"), UsageError); - ASSERT_THROW(config.applyConfig("value "), UsageError); - } + "include /nix/store/does/not/exist.nix"), + Error); } + +TEST(Config, applyConfigInvalidThrows) +{ + Config config; + ASSERT_THROW(config.applyConfig("value == key"), UsageError); + ASSERT_THROW(config.applyConfig("value "), UsageError); +} +} // namespace nix diff --git a/src/libutil-tests/executable-path.cc b/src/libutil-tests/executable-path.cc index 7229b14e6..d000c1fb9 100644 --- a/src/libutil-tests/executable-path.cc +++ b/src/libutil-tests/executable-path.cc @@ -61,4 +61,4 @@ TEST(ExecutablePath, elementyElemNormalize) EXPECT_EQ(s2, OS_STR("." PATH_VAR_SEP "." PATH_VAR_SEP "." PATH_VAR_SEP ".")); } -} +} // namespace nix diff --git a/src/libutil-tests/file-content-address.cc b/src/libutil-tests/file-content-address.cc index 92c6059a4..a6b10d4f6 100644 --- a/src/libutil-tests/file-content-address.cc +++ b/src/libutil-tests/file-content-address.cc @@ -9,20 +9,22 @@ namespace nix { * parseFileSerialisationMethod, renderFileSerialisationMethod * --------------------------------------------------------------------------*/ -TEST(FileSerialisationMethod, testRoundTripPrintParse_1) { +TEST(FileSerialisationMethod, testRoundTripPrintParse_1) +{ for (const FileSerialisationMethod fim : { - FileSerialisationMethod::Flat, - FileSerialisationMethod::NixArchive, - }) { + FileSerialisationMethod::Flat, + FileSerialisationMethod::NixArchive, + }) { EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim); } } -TEST(FileSerialisationMethod, testRoundTripPrintParse_2) { +TEST(FileSerialisationMethod, testRoundTripPrintParse_2) +{ for (const std::string_view fimS : { - "flat", - "nar", - }) { + "flat", + "nar", + }) { EXPECT_EQ(renderFileSerialisationMethod(parseFileSerialisationMethod(fimS)), fimS); } } @@ -38,22 +40,24 @@ TEST(FileSerialisationMethod, testParseFileSerialisationMethodOptException) * parseFileIngestionMethod, renderFileIngestionMethod * --------------------------------------------------------------------------*/ -TEST(FileIngestionMethod, testRoundTripPrintParse_1) { +TEST(FileIngestionMethod, testRoundTripPrintParse_1) +{ for (const FileIngestionMethod fim : { - FileIngestionMethod::Flat, - FileIngestionMethod::NixArchive, - FileIngestionMethod::Git, - }) { + FileIngestionMethod::Flat, + FileIngestionMethod::NixArchive, + FileIngestionMethod::Git, + }) { EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim); } } -TEST(FileIngestionMethod, testRoundTripPrintParse_2) { +TEST(FileIngestionMethod, testRoundTripPrintParse_2) +{ for (const std::string_view fimS : { - "flat", - "nar", - "git", - }) { + "flat", + "nar", + "git", + }) { EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS); } } @@ -65,4 +69,4 @@ TEST(FileIngestionMethod, testParseFileIngestionMethodOptException) testing::ThrowsMessage(testing::HasSubstr("narwhal"))); } -} +} // namespace nix diff --git a/src/libutil-tests/file-system.cc b/src/libutil-tests/file-system.cc index 2d1058c4f..dfdd26088 100644 --- a/src/libutil-tests/file-system.cc +++ b/src/libutil-tests/file-system.cc @@ -318,4 +318,4 @@ TEST(DirectoryIterator, nonexistent) ASSERT_THROW(DirectoryIterator("/schnitzel/darmstadt/pommes"), SysError); } -} +} // namespace nix diff --git a/src/libutil-tests/git.cc b/src/libutil-tests/git.cc index 91432b76b..389f8583d 100644 --- a/src/libutil-tests/git.cc +++ b/src/libutil-tests/git.cc @@ -15,7 +15,8 @@ class GitTest : public CharacterizationTest public: - std::filesystem::path goldenMaster(std::string_view testStem) const override { + std::filesystem::path goldenMaster(std::string_view testStem) const override + { return unitTestData / std::string(testStem); } @@ -33,39 +34,44 @@ private: } }; -TEST(GitMode, gitMode_directory) { +TEST(GitMode, gitMode_directory) +{ Mode m = Mode::Directory; RawMode r = 0040000; ASSERT_EQ(static_cast(m), r); - ASSERT_EQ(decodeMode(r), std::optional { m }); + ASSERT_EQ(decodeMode(r), std::optional{m}); }; -TEST(GitMode, gitMode_executable) { +TEST(GitMode, gitMode_executable) +{ Mode m = Mode::Executable; RawMode r = 0100755; ASSERT_EQ(static_cast(m), r); - ASSERT_EQ(decodeMode(r), std::optional { m }); + ASSERT_EQ(decodeMode(r), std::optional{m}); }; -TEST(GitMode, gitMode_regular) { +TEST(GitMode, gitMode_regular) +{ Mode m = Mode::Regular; RawMode r = 0100644; ASSERT_EQ(static_cast(m), r); - ASSERT_EQ(decodeMode(r), std::optional { m }); + ASSERT_EQ(decodeMode(r), std::optional{m}); }; -TEST(GitMode, gitMode_symlink) { +TEST(GitMode, gitMode_symlink) +{ Mode m = Mode::Symlink; RawMode r = 0120000; ASSERT_EQ(static_cast(m), r); - ASSERT_EQ(decodeMode(r), std::optional { m }); + ASSERT_EQ(decodeMode(r), std::optional{m}); }; -TEST_F(GitTest, blob_read) { +TEST_F(GitTest, blob_read) +{ readTest("hello-world-blob.bin", [&](const auto & encoded) { - StringSource in { encoded }; + StringSource in{encoded}; StringSink out; - RegularFileSink out2 { out }; + RegularFileSink out2{out}; ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob); parseBlob(out2, CanonPath::root, in, BlobMode::Regular, mockXpSettings); @@ -75,7 +81,8 @@ TEST_F(GitTest, blob_read) { }); } -TEST_F(GitTest, blob_write) { +TEST_F(GitTest, blob_write) +{ writeTest("hello-world-blob.bin", [&]() { auto decoded = readFile(goldenMaster("hello-world.bin")); StringSink s; @@ -126,24 +133,31 @@ const static Tree tree = { }, }; -TEST_F(GitTest, tree_read) { +TEST_F(GitTest, tree_read) +{ readTest("tree.bin", [&](const auto & encoded) { - StringSource in { encoded }; + StringSource in{encoded}; NullFileSystemObjectSink out; Tree got; ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree); - parseTree(out, CanonPath::root, in, [&](auto & name, auto entry) { - auto name2 = std::string{name.rel()}; - if (entry.mode == Mode::Directory) - name2 += '/'; - got.insert_or_assign(name2, std::move(entry)); - }, mockXpSettings); + parseTree( + out, + CanonPath::root, + in, + [&](auto & name, auto entry) { + auto name2 = std::string{name.rel()}; + if (entry.mode == Mode::Directory) + name2 += '/'; + got.insert_or_assign(name2, std::move(entry)); + }, + mockXpSettings); ASSERT_EQ(got, tree); }); } -TEST_F(GitTest, tree_write) { +TEST_F(GitTest, tree_write) +{ writeTest("tree.bin", [&]() { StringSink s; dumpTree(tree, s, mockXpSettings); @@ -151,36 +165,38 @@ TEST_F(GitTest, tree_write) { }); } -TEST_F(GitTest, both_roundrip) { +TEST_F(GitTest, both_roundrip) +{ using File = MemorySourceAccessor::File; auto files = make_ref(); - files->root = File::Directory { - .contents { + files->root = File::Directory{ + .contents{ { "foo", - File::Regular { + File::Regular{ .contents = "hello\n\0\n\tworld!", }, }, { "bar", - File::Directory { - .contents = { + File::Directory{ + .contents = { - "baz", - File::Regular { - .executable = true, - .contents = "good day,\n\0\n\tworld!", + { + "baz", + File::Regular{ + .executable = true, + .contents = "good day,\n\0\n\tworld!", + }, + }, + { + "quux", + File::Symlink{ + .target = "/over/there", + }, }, }, - { - "quux", - File::Symlink { - .target = "/over/there", - }, - }, - }, }, }, }, @@ -191,14 +207,12 @@ TEST_F(GitTest, both_roundrip) { std::function dumpHook; dumpHook = [&](const SourcePath & path) { StringSink s; - HashSink hashSink { HashAlgorithm::SHA1 }; - TeeSink s2 { s, hashSink }; - auto mode = dump( - path, s2, dumpHook, - defaultPathFilter, mockXpSettings); + HashSink hashSink{HashAlgorithm::SHA1}; + TeeSink s2{s, hashSink}; + auto mode = dump(path, s2, dumpHook, defaultPathFilter, mockXpSettings); auto hash = hashSink.finish().first; cas.insert_or_assign(hash, std::move(s.s)); - return TreeEntry { + return TreeEntry{ .mode = mode, .hash = hash, }; @@ -208,13 +222,16 @@ TEST_F(GitTest, both_roundrip) { auto files2 = make_ref(); - MemorySink sinkFiles2 { *files2 }; + MemorySink sinkFiles2{*files2}; std::function mkSinkHook; mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) { - StringSource in { cas[hash] }; + StringSource in{cas[hash]}; parse( - sinkFiles2, prefix, in, blobMode, + sinkFiles2, + prefix, + in, + blobMode, [&](const CanonPath & name, const auto & entry) { mkSinkHook( prefix / name, @@ -232,7 +249,8 @@ TEST_F(GitTest, both_roundrip) { ASSERT_EQ(files->root, files2->root); } -TEST(GitLsRemote, parseSymrefLineWithReference) { +TEST(GitLsRemote, parseSymrefLineWithReference) +{ auto line = "ref: refs/head/main HEAD"; auto res = parseLsRemoteLine(line); ASSERT_TRUE(res.has_value()); @@ -241,7 +259,8 @@ TEST(GitLsRemote, parseSymrefLineWithReference) { ASSERT_EQ(res->reference, "HEAD"); } -TEST(GitLsRemote, parseSymrefLineWithNoReference) { +TEST(GitLsRemote, parseSymrefLineWithNoReference) +{ auto line = "ref: refs/head/main"; auto res = parseLsRemoteLine(line); ASSERT_TRUE(res.has_value()); @@ -250,7 +269,8 @@ TEST(GitLsRemote, parseSymrefLineWithNoReference) { ASSERT_EQ(res->reference, std::nullopt); } -TEST(GitLsRemote, parseObjectRefLine) { +TEST(GitLsRemote, parseObjectRefLine) +{ auto line = "abc123 refs/head/main"; auto res = parseLsRemoteLine(line); ASSERT_TRUE(res.has_value()); @@ -259,4 +279,4 @@ TEST(GitLsRemote, parseObjectRefLine) { ASSERT_EQ(res->reference, "refs/head/main"); } -} +} // namespace nix diff --git a/src/libutil-tests/hash.cc b/src/libutil-tests/hash.cc index 3c71b0486..f9d425d92 100644 --- a/src/libutil-tests/hash.cc +++ b/src/libutil-tests/hash.cc @@ -24,111 +24,133 @@ private: } }; - /* ---------------------------------------------------------------------------- - * hashString - * --------------------------------------------------------------------------*/ +/* ---------------------------------------------------------------------------- + * hashString + * --------------------------------------------------------------------------*/ - TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes1) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abc"; - auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "blake3:6437b3ac38465133ffb63b75273a8db548c558465d79db03fd359c6cd5bd9d85"); - } +TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes1) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abc"; + auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "blake3:6437b3ac38465133ffb63b75273a8db548c558465d79db03fd359c6cd5bd9d85"); +} - TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes2) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "blake3:c19012cc2aaf0dc3d8e5c45a1b79114d2df42abb2a410bf54be09e891af06ff8"); - } +TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes2) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; + auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "blake3:c19012cc2aaf0dc3d8e5c45a1b79114d2df42abb2a410bf54be09e891af06ff8"); +} - TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes3) { - // values taken from: https://www.ietf.org/archive/id/draft-aumasson-blake3-00.txt - auto s = "IETF"; - auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "blake3:83a2de1ee6f4e6ab686889248f4ec0cf4cc5709446a682ffd1cbb4d6165181e2"); - } +TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes3) +{ + // values taken from: https://www.ietf.org/archive/id/draft-aumasson-blake3-00.txt + auto s = "IETF"; + auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "blake3:83a2de1ee6f4e6ab686889248f4ec0cf4cc5709446a682ffd1cbb4d6165181e2"); +} - TEST(hashString, testKnownMD5Hashes1) { - // values taken from: https://tools.ietf.org/html/rfc1321 - auto s1 = ""; - auto hash = hashString(HashAlgorithm::MD5, s1); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); - } +TEST(hashString, testKnownMD5Hashes1) +{ + // values taken from: https://tools.ietf.org/html/rfc1321 + auto s1 = ""; + auto hash = hashString(HashAlgorithm::MD5, s1); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); +} - TEST(hashString, testKnownMD5Hashes2) { - // values taken from: https://tools.ietf.org/html/rfc1321 - auto s2 = "abc"; - auto hash = hashString(HashAlgorithm::MD5, s2); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); - } +TEST(hashString, testKnownMD5Hashes2) +{ + // values taken from: https://tools.ietf.org/html/rfc1321 + auto s2 = "abc"; + auto hash = hashString(HashAlgorithm::MD5, s2); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); +} - TEST(hashString, testKnownSHA1Hashes1) { - // values taken from: https://tools.ietf.org/html/rfc3174 - auto s = "abc"; - auto hash = hashString(HashAlgorithm::SHA1, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); - } +TEST(hashString, testKnownSHA1Hashes1) +{ + // values taken from: https://tools.ietf.org/html/rfc3174 + auto s = "abc"; + auto hash = hashString(HashAlgorithm::SHA1, s); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); +} - TEST(hashString, testKnownSHA1Hashes2) { - // values taken from: https://tools.ietf.org/html/rfc3174 - auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashAlgorithm::SHA1, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); - } +TEST(hashString, testKnownSHA1Hashes2) +{ + // values taken from: https://tools.ietf.org/html/rfc3174 + auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; + auto hash = hashString(HashAlgorithm::SHA1, s); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); +} - TEST(hashString, testKnownSHA256Hashes1) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abc"; +TEST(hashString, testKnownSHA256Hashes1) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abc"; - auto hash = hashString(HashAlgorithm::SHA256, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); - } + auto hash = hashString(HashAlgorithm::SHA256, s); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); +} - TEST(hashString, testKnownSHA256Hashes2) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashAlgorithm::SHA256, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); - } +TEST(hashString, testKnownSHA256Hashes2) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; + auto hash = hashString(HashAlgorithm::SHA256, s); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); +} - TEST(hashString, testKnownSHA512Hashes1) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abc"; - auto hash = hashString(HashAlgorithm::SHA512, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" - "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" - "454d4423643ce80e2a9ac94fa54ca49f"); - } - TEST(hashString, testKnownSHA512Hashes2) { - // values taken from: https://tools.ietf.org/html/rfc4634 - auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; +TEST(hashString, testKnownSHA512Hashes1) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abc"; + auto hash = hashString(HashAlgorithm::SHA512, s); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" + "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" + "454d4423643ce80e2a9ac94fa54ca49f"); +} - auto hash = hashString(HashAlgorithm::SHA512, s); - ASSERT_EQ(hash.to_string(HashFormat::Base16, true), - "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" - "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" - "c7d329eeb6dd26545e96e55b874be909"); - } +TEST(hashString, testKnownSHA512Hashes2) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = + "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; - /* ---------------------------------------------------------------------------- - * parseHashFormat, parseHashFormatOpt, printHashFormat - * --------------------------------------------------------------------------*/ + auto hash = hashString(HashAlgorithm::SHA512, s); + ASSERT_EQ( + hash.to_string(HashFormat::Base16, true), + "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" + "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" + "c7d329eeb6dd26545e96e55b874be909"); +} - TEST(hashFormat, testRoundTripPrintParse) { - for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) { - ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat); - ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat); - } - } +/* ---------------------------------------------------------------------------- + * parseHashFormat, parseHashFormatOpt, printHashFormat + * --------------------------------------------------------------------------*/ - TEST(hashFormat, testParseHashFormatOptException) { - ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt); +TEST(hashFormat, testRoundTripPrintParse) +{ + for (const HashFormat hashFormat : {HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) { + ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat); + ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat); } } + +TEST(hashFormat, testParseHashFormatOptException) +{ + ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt); +} +} // namespace nix diff --git a/src/libutil-tests/hilite.cc b/src/libutil-tests/hilite.cc index 98773afcf..6436ad684 100644 --- a/src/libutil-tests/hilite.cc +++ b/src/libutil-tests/hilite.cc @@ -5,61 +5,57 @@ namespace nix { /* ----------- tests for fmt.hh -------------------------------------------------*/ - TEST(hiliteMatches, noHighlight) { - ASSERT_STREQ(hiliteMatches("Hello, world!", std::vector(), "(", ")").c_str(), "Hello, world!"); - } - - TEST(hiliteMatches, simpleHighlight) { - std::string str = "Hello, world!"; - std::regex re = std::regex("world"); - auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); - ASSERT_STREQ( - hiliteMatches(str, matches, "(", ")").c_str(), - "Hello, (world)!" - ); - } - - TEST(hiliteMatches, multipleMatches) { - std::string str = "Hello, world, world, world, world, world, world, Hello!"; - std::regex re = std::regex("world"); - auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); - ASSERT_STREQ( - hiliteMatches(str, matches, "(", ")").c_str(), - "Hello, (world), (world), (world), (world), (world), (world), Hello!" - ); - } - - TEST(hiliteMatches, overlappingMatches) { - std::string str = "world, Hello, world, Hello, world, Hello, world, Hello, world!"; - std::regex re = std::regex("Hello, world"); - std::regex re2 = std::regex("world, Hello"); - auto v = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); - for(auto it = std::sregex_iterator(str.begin(), str.end(), re2); it != std::sregex_iterator(); ++it) { - v.push_back(*it); - } - ASSERT_STREQ( - hiliteMatches(str, v, "(", ")").c_str(), - "(world, Hello, world, Hello, world, Hello, world, Hello, world)!" - ); - } - - TEST(hiliteMatches, complexOverlappingMatches) { - std::string str = "legacyPackages.x86_64-linux.git-crypt"; - std::vector regexes = { - std::regex("t-cry"), - std::regex("ux\\.git-cry"), - std::regex("git-c"), - std::regex("pt"), - }; - std::vector matches; - for (const auto & regex : regexes) { - for(auto it = std::sregex_iterator(str.begin(), str.end(), regex); it != std::sregex_iterator(); ++it) { - matches.push_back(*it); - } - } - ASSERT_STREQ( - hiliteMatches(str, matches, "(", ")").c_str(), - "legacyPackages.x86_64-lin(ux.git-crypt)" - ); - } +TEST(hiliteMatches, noHighlight) +{ + ASSERT_STREQ(hiliteMatches("Hello, world!", std::vector(), "(", ")").c_str(), "Hello, world!"); } + +TEST(hiliteMatches, simpleHighlight) +{ + std::string str = "Hello, world!"; + std::regex re = std::regex("world"); + auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); + ASSERT_STREQ(hiliteMatches(str, matches, "(", ")").c_str(), "Hello, (world)!"); +} + +TEST(hiliteMatches, multipleMatches) +{ + std::string str = "Hello, world, world, world, world, world, world, Hello!"; + std::regex re = std::regex("world"); + auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); + ASSERT_STREQ( + hiliteMatches(str, matches, "(", ")").c_str(), + "Hello, (world), (world), (world), (world), (world), (world), Hello!"); +} + +TEST(hiliteMatches, overlappingMatches) +{ + std::string str = "world, Hello, world, Hello, world, Hello, world, Hello, world!"; + std::regex re = std::regex("Hello, world"); + std::regex re2 = std::regex("world, Hello"); + auto v = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator()); + for (auto it = std::sregex_iterator(str.begin(), str.end(), re2); it != std::sregex_iterator(); ++it) { + v.push_back(*it); + } + ASSERT_STREQ( + hiliteMatches(str, v, "(", ")").c_str(), "(world, Hello, world, Hello, world, Hello, world, Hello, world)!"); +} + +TEST(hiliteMatches, complexOverlappingMatches) +{ + std::string str = "legacyPackages.x86_64-linux.git-crypt"; + std::vector regexes = { + std::regex("t-cry"), + std::regex("ux\\.git-cry"), + std::regex("git-c"), + std::regex("pt"), + }; + std::vector matches; + for (const auto & regex : regexes) { + for (auto it = std::sregex_iterator(str.begin(), str.end(), regex); it != std::sregex_iterator(); ++it) { + matches.push_back(*it); + } + } + ASSERT_STREQ(hiliteMatches(str, matches, "(", ")").c_str(), "legacyPackages.x86_64-lin(ux.git-crypt)"); +} +} // namespace nix diff --git a/src/libutil-tests/json-utils.cc b/src/libutil-tests/json-utils.cc index 211f8bf1e..7d02894c6 100644 --- a/src/libutil-tests/json-utils.cc +++ b/src/libutil-tests/json-utils.cc @@ -12,14 +12,16 @@ namespace nix { * We are specifically interested in whether we can _nest_ optionals in STL * containers so we that we can leverage existing adl_serializer templates. */ -TEST(to_json, optionalInt) { +TEST(to_json, optionalInt) +{ std::optional val = std::make_optional(420); ASSERT_EQ(nlohmann::json(val), nlohmann::json(420)); val = std::nullopt; ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr)); } -TEST(to_json, vectorOfOptionalInts) { +TEST(to_json, vectorOfOptionalInts) +{ std::vector> vals = { std::make_optional(420), std::nullopt, @@ -27,17 +29,20 @@ TEST(to_json, vectorOfOptionalInts) { ASSERT_EQ(nlohmann::json(vals), nlohmann::json::parse("[420,null]")); } -TEST(to_json, optionalVectorOfInts) { - std::optional> val = std::make_optional(std::vector { - -420, - 420, - }); +TEST(to_json, optionalVectorOfInts) +{ + std::optional> val = std::make_optional( + std::vector{ + -420, + 420, + }); ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]")); val = std::nullopt; ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr)); } -TEST(from_json, optionalInt) { +TEST(from_json, optionalInt) +{ nlohmann::json json = 420; std::optional val = json; ASSERT_TRUE(val.has_value()); @@ -47,8 +52,9 @@ TEST(from_json, optionalInt) { ASSERT_FALSE(val.has_value()); } -TEST(from_json, vectorOfOptionalInts) { - nlohmann::json json = { 420, nullptr }; +TEST(from_json, vectorOfOptionalInts) +{ + nlohmann::json json = {420, nullptr}; std::vector> vals = json; ASSERT_EQ(vals.size(), 2u); ASSERT_TRUE(vals.at(0).has_value()); @@ -56,7 +62,8 @@ TEST(from_json, vectorOfOptionalInts) { ASSERT_FALSE(vals.at(1).has_value()); } -TEST(valueAt, simpleObject) { +TEST(valueAt, simpleObject) +{ auto simple = R"({ "hello": "world" })"_json; ASSERT_EQ(valueAt(getObject(simple), "hello"), "world"); @@ -66,7 +73,8 @@ TEST(valueAt, simpleObject) { ASSERT_EQ(valueAt(valueAt(getObject(nested), "hello"), "world"), ""); } -TEST(valueAt, missingKey) { +TEST(valueAt, missingKey) +{ auto json = R"({ "hello": { "nested": "world" } })"_json; auto & obj = getObject(json); @@ -74,20 +82,22 @@ TEST(valueAt, missingKey) { ASSERT_THROW(valueAt(obj, "foo"), Error); } -TEST(getObject, rightAssertions) { +TEST(getObject, rightAssertions) +{ auto simple = R"({ "object": {} })"_json; - ASSERT_EQ(getObject(valueAt(getObject(simple), "object")), (nlohmann::json::object_t {})); + ASSERT_EQ(getObject(valueAt(getObject(simple), "object")), (nlohmann::json::object_t{})); auto nested = R"({ "object": { "object": {} } })"_json; auto nestedObject = getObject(valueAt(getObject(nested), "object")); ASSERT_EQ(nestedObject, getObject(nlohmann::json::parse(R"({ "object": {} })"))); - ASSERT_EQ(getObject(valueAt(getObject(nestedObject), "object")), (nlohmann::json::object_t {})); + ASSERT_EQ(getObject(valueAt(getObject(nestedObject), "object")), (nlohmann::json::object_t{})); } -TEST(getObject, wrongAssertions) { +TEST(getObject, wrongAssertions) +{ auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; auto & obj = getObject(json); @@ -98,13 +108,15 @@ TEST(getObject, wrongAssertions) { ASSERT_THROW(getObject(valueAt(obj, "boolean")), Error); } -TEST(getArray, rightAssertions) { +TEST(getArray, rightAssertions) +{ auto simple = R"({ "array": [] })"_json; - ASSERT_EQ(getArray(valueAt(getObject(simple), "array")), (nlohmann::json::array_t {})); + ASSERT_EQ(getArray(valueAt(getObject(simple), "array")), (nlohmann::json::array_t{})); } -TEST(getArray, wrongAssertions) { +TEST(getArray, wrongAssertions) +{ auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; ASSERT_THROW(getArray(valueAt(json, "object")), Error); @@ -113,13 +125,15 @@ TEST(getArray, wrongAssertions) { ASSERT_THROW(getArray(valueAt(json, "boolean")), Error); } -TEST(getString, rightAssertions) { +TEST(getString, rightAssertions) +{ auto simple = R"({ "string": "" })"_json; ASSERT_EQ(getString(valueAt(getObject(simple), "string")), ""); } -TEST(getString, wrongAssertions) { +TEST(getString, wrongAssertions) +{ auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; ASSERT_THROW(getString(valueAt(json, "object")), Error); @@ -128,7 +142,8 @@ TEST(getString, wrongAssertions) { ASSERT_THROW(getString(valueAt(json, "boolean")), Error); } -TEST(getIntegralNumber, rightAssertions) { +TEST(getIntegralNumber, rightAssertions) +{ auto simple = R"({ "int": 0, "signed": -1 })"_json; ASSERT_EQ(getUnsigned(valueAt(getObject(simple), "int")), 0u); @@ -136,8 +151,10 @@ TEST(getIntegralNumber, rightAssertions) { ASSERT_EQ(getInteger(valueAt(getObject(simple), "signed")), -1); } -TEST(getIntegralNumber, wrongAssertions) { - auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "signed": -256, "large": 128, "boolean": false })"_json; +TEST(getIntegralNumber, wrongAssertions) +{ + auto json = + R"({ "object": {}, "array": [], "string": "", "int": 0, "signed": -256, "large": 128, "boolean": false })"_json; ASSERT_THROW(getUnsigned(valueAt(json, "object")), Error); ASSERT_THROW(getUnsigned(valueAt(json, "array")), Error); @@ -153,13 +170,15 @@ TEST(getIntegralNumber, wrongAssertions) { ASSERT_THROW(getInteger(valueAt(json, "signed")), Error); } -TEST(getBoolean, rightAssertions) { +TEST(getBoolean, rightAssertions) +{ auto simple = R"({ "boolean": false })"_json; ASSERT_EQ(getBoolean(valueAt(getObject(simple), "boolean")), false); } -TEST(getBoolean, wrongAssertions) { +TEST(getBoolean, wrongAssertions) +{ auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; ASSERT_THROW(getBoolean(valueAt(json, "object")), Error); @@ -168,25 +187,29 @@ TEST(getBoolean, wrongAssertions) { ASSERT_THROW(getBoolean(valueAt(json, "int")), Error); } -TEST(optionalValueAt, existing) { +TEST(optionalValueAt, existing) +{ auto json = R"({ "string": "ssh-rsa" })"_json; - ASSERT_EQ(optionalValueAt(json, "string"), std::optional { "ssh-rsa" }); + ASSERT_EQ(optionalValueAt(json, "string"), std::optional{"ssh-rsa"}); } -TEST(optionalValueAt, empty) { +TEST(optionalValueAt, empty) +{ auto json = R"({})"_json; ASSERT_EQ(optionalValueAt(json, "string"), std::nullopt); } -TEST(getNullable, null) { +TEST(getNullable, null) +{ auto json = R"(null)"_json; ASSERT_EQ(getNullable(json), nullptr); } -TEST(getNullable, empty) { +TEST(getNullable, empty) +{ auto json = R"({})"_json; auto * p = getNullable(json); diff --git a/src/libutil-tests/logging.cc b/src/libutil-tests/logging.cc index 5c9fcfe8f..e4ebccd49 100644 --- a/src/libutil-tests/logging.cc +++ b/src/libutil-tests/logging.cc @@ -1,10 +1,10 @@ #if 0 -#include "nix/util/logging.hh" -#include "nix/expr/nixexpr.hh" -#include +# include "nix/util/logging.hh" +# include "nix/expr/nixexpr.hh" +# include -#include +# include namespace nix { diff --git a/src/libutil-tests/lru-cache.cc b/src/libutil-tests/lru-cache.cc index a6a27cd3e..ed603cd44 100644 --- a/src/libutil-tests/lru-cache.cc +++ b/src/libutil-tests/lru-cache.cc @@ -3,128 +3,141 @@ namespace nix { - /* ---------------------------------------------------------------------------- - * size - * --------------------------------------------------------------------------*/ +/* ---------------------------------------------------------------------------- + * size + * --------------------------------------------------------------------------*/ - TEST(LRUCache, sizeOfEmptyCacheIsZero) { - LRUCache c(10); - ASSERT_EQ(c.size(), 0u); - } - - TEST(LRUCache, sizeOfSingleElementCacheIsOne) { - LRUCache c(10); - c.upsert("foo", "bar"); - ASSERT_EQ(c.size(), 1u); - } - - /* ---------------------------------------------------------------------------- - * upsert / get - * --------------------------------------------------------------------------*/ - - TEST(LRUCache, getFromEmptyCache) { - LRUCache c(10); - auto val = c.get("x"); - ASSERT_EQ(val.has_value(), false); - } - - TEST(LRUCache, getExistingValue) { - LRUCache c(10); - c.upsert("foo", "bar"); - auto val = c.get("foo"); - ASSERT_EQ(val, "bar"); - } - - TEST(LRUCache, getNonExistingValueFromNonEmptyCache) { - LRUCache c(10); - c.upsert("foo", "bar"); - auto val = c.get("another"); - ASSERT_EQ(val.has_value(), false); - } - - TEST(LRUCache, upsertOnZeroCapacityCache) { - LRUCache c(0); - c.upsert("foo", "bar"); - auto val = c.get("foo"); - ASSERT_EQ(val.has_value(), false); - } - - TEST(LRUCache, updateExistingValue) { - LRUCache c(1); - c.upsert("foo", "bar"); - - auto val = c.get("foo"); - ASSERT_EQ(val.value_or("error"), "bar"); - ASSERT_EQ(c.size(), 1u); - - c.upsert("foo", "changed"); - val = c.get("foo"); - ASSERT_EQ(val.value_or("error"), "changed"); - ASSERT_EQ(c.size(), 1u); - } - - TEST(LRUCache, overwriteOldestWhenCapacityIsReached) { - LRUCache c(3); - c.upsert("one", "eins"); - c.upsert("two", "zwei"); - c.upsert("three", "drei"); - - ASSERT_EQ(c.size(), 3u); - ASSERT_EQ(c.get("one").value_or("error"), "eins"); - - // exceed capacity - c.upsert("another", "whatever"); - - ASSERT_EQ(c.size(), 3u); - // Retrieving "one" makes it the most recent element thus - // two will be the oldest one and thus replaced. - ASSERT_EQ(c.get("two").has_value(), false); - ASSERT_EQ(c.get("another").value(), "whatever"); - } - - /* ---------------------------------------------------------------------------- - * clear - * --------------------------------------------------------------------------*/ - - TEST(LRUCache, clearEmptyCache) { - LRUCache c(10); - c.clear(); - ASSERT_EQ(c.size(), 0u); - } - - TEST(LRUCache, clearNonEmptyCache) { - LRUCache c(10); - c.upsert("one", "eins"); - c.upsert("two", "zwei"); - c.upsert("three", "drei"); - ASSERT_EQ(c.size(), 3u); - c.clear(); - ASSERT_EQ(c.size(), 0u); - } - - /* ---------------------------------------------------------------------------- - * erase - * --------------------------------------------------------------------------*/ - - TEST(LRUCache, eraseFromEmptyCache) { - LRUCache c(10); - ASSERT_EQ(c.erase("foo"), false); - ASSERT_EQ(c.size(), 0u); - } - - TEST(LRUCache, eraseMissingFromNonEmptyCache) { - LRUCache c(10); - c.upsert("one", "eins"); - ASSERT_EQ(c.erase("foo"), false); - ASSERT_EQ(c.size(), 1u); - ASSERT_EQ(c.get("one").value_or("error"), "eins"); - } - - TEST(LRUCache, eraseFromNonEmptyCache) { - LRUCache c(10); - c.upsert("one", "eins"); - ASSERT_EQ(c.erase("one"), true); - ASSERT_EQ(c.size(), 0u); - ASSERT_EQ(c.get("one").value_or("empty"), "empty"); - } +TEST(LRUCache, sizeOfEmptyCacheIsZero) +{ + LRUCache c(10); + ASSERT_EQ(c.size(), 0u); } + +TEST(LRUCache, sizeOfSingleElementCacheIsOne) +{ + LRUCache c(10); + c.upsert("foo", "bar"); + ASSERT_EQ(c.size(), 1u); +} + +/* ---------------------------------------------------------------------------- + * upsert / get + * --------------------------------------------------------------------------*/ + +TEST(LRUCache, getFromEmptyCache) +{ + LRUCache c(10); + auto val = c.get("x"); + ASSERT_EQ(val.has_value(), false); +} + +TEST(LRUCache, getExistingValue) +{ + LRUCache c(10); + c.upsert("foo", "bar"); + auto val = c.get("foo"); + ASSERT_EQ(val, "bar"); +} + +TEST(LRUCache, getNonExistingValueFromNonEmptyCache) +{ + LRUCache c(10); + c.upsert("foo", "bar"); + auto val = c.get("another"); + ASSERT_EQ(val.has_value(), false); +} + +TEST(LRUCache, upsertOnZeroCapacityCache) +{ + LRUCache c(0); + c.upsert("foo", "bar"); + auto val = c.get("foo"); + ASSERT_EQ(val.has_value(), false); +} + +TEST(LRUCache, updateExistingValue) +{ + LRUCache c(1); + c.upsert("foo", "bar"); + + auto val = c.get("foo"); + ASSERT_EQ(val.value_or("error"), "bar"); + ASSERT_EQ(c.size(), 1u); + + c.upsert("foo", "changed"); + val = c.get("foo"); + ASSERT_EQ(val.value_or("error"), "changed"); + ASSERT_EQ(c.size(), 1u); +} + +TEST(LRUCache, overwriteOldestWhenCapacityIsReached) +{ + LRUCache c(3); + c.upsert("one", "eins"); + c.upsert("two", "zwei"); + c.upsert("three", "drei"); + + ASSERT_EQ(c.size(), 3u); + ASSERT_EQ(c.get("one").value_or("error"), "eins"); + + // exceed capacity + c.upsert("another", "whatever"); + + ASSERT_EQ(c.size(), 3u); + // Retrieving "one" makes it the most recent element thus + // two will be the oldest one and thus replaced. + ASSERT_EQ(c.get("two").has_value(), false); + ASSERT_EQ(c.get("another").value(), "whatever"); +} + +/* ---------------------------------------------------------------------------- + * clear + * --------------------------------------------------------------------------*/ + +TEST(LRUCache, clearEmptyCache) +{ + LRUCache c(10); + c.clear(); + ASSERT_EQ(c.size(), 0u); +} + +TEST(LRUCache, clearNonEmptyCache) +{ + LRUCache c(10); + c.upsert("one", "eins"); + c.upsert("two", "zwei"); + c.upsert("three", "drei"); + ASSERT_EQ(c.size(), 3u); + c.clear(); + ASSERT_EQ(c.size(), 0u); +} + +/* ---------------------------------------------------------------------------- + * erase + * --------------------------------------------------------------------------*/ + +TEST(LRUCache, eraseFromEmptyCache) +{ + LRUCache c(10); + ASSERT_EQ(c.erase("foo"), false); + ASSERT_EQ(c.size(), 0u); +} + +TEST(LRUCache, eraseMissingFromNonEmptyCache) +{ + LRUCache c(10); + c.upsert("one", "eins"); + ASSERT_EQ(c.erase("foo"), false); + ASSERT_EQ(c.size(), 1u); + ASSERT_EQ(c.get("one").value_or("error"), "eins"); +} + +TEST(LRUCache, eraseFromNonEmptyCache) +{ + LRUCache c(10); + c.upsert("one", "eins"); + ASSERT_EQ(c.erase("one"), true); + ASSERT_EQ(c.size(), 0u); + ASSERT_EQ(c.get("one").value_or("empty"), "empty"); +} +} // namespace nix diff --git a/src/libutil-tests/monitorfdhup.cc b/src/libutil-tests/monitorfdhup.cc index 8e6fed6f0..d591b2fed 100644 --- a/src/libutil-tests/monitorfdhup.cc +++ b/src/libutil-tests/monitorfdhup.cc @@ -17,6 +17,6 @@ TEST(MonitorFdHup, shouldNotBlock) MonitorFdHup monitor(p.readSide.get()); } } -} +} // namespace nix #endif diff --git a/src/libutil-tests/nix_api_util.cc b/src/libutil-tests/nix_api_util.cc index baaaa81fc..9693ab3a5 100644 --- a/src/libutil-tests/nix_api_util.cc +++ b/src/libutil-tests/nix_api_util.cc @@ -155,4 +155,4 @@ TEST_F(nix_api_util_context, nix_err_code) ASSERT_EQ(nix_err_code(ctx), NIX_ERR_UNKNOWN); } -} +} // namespace nixC diff --git a/src/libutil-tests/pool.cc b/src/libutil-tests/pool.cc index d41bab8ed..68448a1cb 100644 --- a/src/libutil-tests/pool.cc +++ b/src/libutil-tests/pool.cc @@ -3,125 +3,133 @@ namespace nix { - struct TestResource +struct TestResource +{ + + TestResource() { - - TestResource() { - static int counter = 0; - num = counter++; - } - - int dummyValue = 1; - bool good = true; - int num; - }; - - /* ---------------------------------------------------------------------------- - * Pool - * --------------------------------------------------------------------------*/ - - TEST(Pool, freshPoolHasZeroCountAndSpecifiedCapacity) { - auto isGood = [](const ref & r) { return r->good; }; - auto createResource = []() { return make_ref(); }; - - Pool pool = Pool((size_t)1, createResource, isGood); - - ASSERT_EQ(pool.count(), 0u); - ASSERT_EQ(pool.capacity(), 1u); + static int counter = 0; + num = counter++; } - TEST(Pool, freshPoolCanGetAResource) { - auto isGood = [](const ref & r) { return r->good; }; - auto createResource = []() { return make_ref(); }; + int dummyValue = 1; + bool good = true; + int num; +}; - Pool pool = Pool((size_t)1, createResource, isGood); - ASSERT_EQ(pool.count(), 0u); +/* ---------------------------------------------------------------------------- + * Pool + * --------------------------------------------------------------------------*/ - TestResource r = *(pool.get()); +TEST(Pool, freshPoolHasZeroCountAndSpecifiedCapacity) +{ + auto isGood = [](const ref & r) { return r->good; }; + auto createResource = []() { return make_ref(); }; + Pool pool = Pool((size_t) 1, createResource, isGood); + + ASSERT_EQ(pool.count(), 0u); + ASSERT_EQ(pool.capacity(), 1u); +} + +TEST(Pool, freshPoolCanGetAResource) +{ + auto isGood = [](const ref & r) { return r->good; }; + auto createResource = []() { return make_ref(); }; + + Pool pool = Pool((size_t) 1, createResource, isGood); + ASSERT_EQ(pool.count(), 0u); + + TestResource r = *(pool.get()); + + ASSERT_EQ(pool.count(), 1u); + ASSERT_EQ(pool.capacity(), 1u); + ASSERT_EQ(r.dummyValue, 1); + ASSERT_EQ(r.good, true); +} + +TEST(Pool, capacityCanBeIncremented) +{ + auto isGood = [](const ref & r) { return r->good; }; + auto createResource = []() { return make_ref(); }; + + Pool pool = Pool((size_t) 1, createResource, isGood); + ASSERT_EQ(pool.capacity(), 1u); + pool.incCapacity(); + ASSERT_EQ(pool.capacity(), 2u); +} + +TEST(Pool, capacityCanBeDecremented) +{ + auto isGood = [](const ref & r) { return r->good; }; + auto createResource = []() { return make_ref(); }; + + Pool pool = Pool((size_t) 1, createResource, isGood); + ASSERT_EQ(pool.capacity(), 1u); + pool.decCapacity(); + ASSERT_EQ(pool.capacity(), 0u); +} + +TEST(Pool, flushBadDropsOutOfScopeResources) +{ + auto isGood = [](const ref & r) { return false; }; + auto createResource = []() { return make_ref(); }; + + Pool pool = Pool((size_t) 1, createResource, isGood); + + { + auto _r = pool.get(); ASSERT_EQ(pool.count(), 1u); - ASSERT_EQ(pool.capacity(), 1u); - ASSERT_EQ(r.dummyValue, 1); - ASSERT_EQ(r.good, true); } - TEST(Pool, capacityCanBeIncremented) { - auto isGood = [](const ref & r) { return r->good; }; - auto createResource = []() { return make_ref(); }; + pool.flushBad(); + ASSERT_EQ(pool.count(), 0u); +} - Pool pool = Pool((size_t)1, createResource, isGood); - ASSERT_EQ(pool.capacity(), 1u); - pool.incCapacity(); - ASSERT_EQ(pool.capacity(), 2u); - } +// Test that the resources we allocate are being reused when they are still good. +TEST(Pool, reuseResource) +{ + auto isGood = [](const ref & r) { return true; }; + auto createResource = []() { return make_ref(); }; - TEST(Pool, capacityCanBeDecremented) { - auto isGood = [](const ref & r) { return r->good; }; - auto createResource = []() { return make_ref(); }; + Pool pool = Pool((size_t) 1, createResource, isGood); - Pool pool = Pool((size_t)1, createResource, isGood); - ASSERT_EQ(pool.capacity(), 1u); - pool.decCapacity(); - ASSERT_EQ(pool.capacity(), 0u); - } + // Compare the instance counter between the two handles. We expect them to be equal + // as the pool should hand out the same (still) good one again. + int counter = -1; + { + Pool::Handle h = pool.get(); + counter = h->num; + } // the first handle goes out of scope - TEST(Pool, flushBadDropsOutOfScopeResources) { - auto isGood = [](const ref & r) { return false; }; - auto createResource = []() { return make_ref(); }; - - Pool pool = Pool((size_t)1, createResource, isGood); - - { - auto _r = pool.get(); - ASSERT_EQ(pool.count(), 1u); - } - - pool.flushBad(); - ASSERT_EQ(pool.count(), 0u); - } - - // Test that the resources we allocate are being reused when they are still good. - TEST(Pool, reuseResource) { - auto isGood = [](const ref & r) { return true; }; - auto createResource = []() { return make_ref(); }; - - Pool pool = Pool((size_t)1, createResource, isGood); - - // Compare the instance counter between the two handles. We expect them to be equal - // as the pool should hand out the same (still) good one again. - int counter = -1; - { - Pool::Handle h = pool.get(); - counter = h->num; - } // the first handle goes out of scope - - { // the second handle should contain the same resource (with the same counter value) - Pool::Handle h = pool.get(); - ASSERT_EQ(h->num, counter); - } - } - - // Test that the resources we allocate are being thrown away when they are no longer good. - TEST(Pool, badResourceIsNotReused) { - auto isGood = [](const ref & r) { return false; }; - auto createResource = []() { return make_ref(); }; - - Pool pool = Pool((size_t)1, createResource, isGood); - - // Compare the instance counter between the two handles. We expect them - // to *not* be equal as the pool should hand out a new instance after - // the first one was returned. - int counter = -1; - { - Pool::Handle h = pool.get(); - counter = h->num; - } // the first handle goes out of scope - - { - // the second handle should contain a different resource (with a - //different counter value) - Pool::Handle h = pool.get(); - ASSERT_NE(h->num, counter); - } + { // the second handle should contain the same resource (with the same counter value) + Pool::Handle h = pool.get(); + ASSERT_EQ(h->num, counter); } } + +// Test that the resources we allocate are being thrown away when they are no longer good. +TEST(Pool, badResourceIsNotReused) +{ + auto isGood = [](const ref & r) { return false; }; + auto createResource = []() { return make_ref(); }; + + Pool pool = Pool((size_t) 1, createResource, isGood); + + // Compare the instance counter between the two handles. We expect them + // to *not* be equal as the pool should hand out a new instance after + // the first one was returned. + int counter = -1; + { + Pool::Handle h = pool.get(); + counter = h->num; + } // the first handle goes out of scope + + { + // the second handle should contain a different resource (with a + // different counter value) + Pool::Handle h = pool.get(); + ASSERT_NE(h->num, counter); + } +} +} // namespace nix diff --git a/src/libutil-tests/position.cc b/src/libutil-tests/position.cc index fd65acd03..9a2354923 100644 --- a/src/libutil-tests/position.cc +++ b/src/libutil-tests/position.cc @@ -15,6 +15,7 @@ TEST(Position, getSnippetUpTo_0) Pos p(1, 1, o); ASSERT_EQ(p.getSnippetUpTo(p), ""); } + TEST(Position, getSnippetUpTo_1) { Pos::Origin o = makeStdin("x"); @@ -56,6 +57,7 @@ TEST(Position, getSnippetUpTo_1) ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt); } } + TEST(Position, getSnippetUpTo_2) { Pos::Origin o = makeStdin("asdf\njkl\nqwer"); diff --git a/src/libutil-tests/references.cc b/src/libutil-tests/references.cc index 622b3c35a..b76db67cf 100644 --- a/src/libutil-tests/references.cc +++ b/src/libutil-tests/references.cc @@ -5,25 +5,27 @@ namespace nix { using std::string; -struct RewriteParams { +struct RewriteParams +{ string originalString, finalString; StringMap rewrites; - friend std::ostream& operator<<(std::ostream& os, const RewriteParams& bar) { + friend std::ostream & operator<<(std::ostream & os, const RewriteParams & bar) + { StringSet strRewrites; for (auto & [from, to] : bar.rewrites) strRewrites.insert(from + "->" + to); - return os << - "OriginalString: " << bar.originalString << std::endl << - "Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl << - "Expected result: " << bar.finalString; + return os << "OriginalString: " << bar.originalString << std::endl + << "Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl + << "Expected result: " << bar.finalString; } }; -class RewriteTest : public ::testing::TestWithParam { -}; +class RewriteTest : public ::testing::TestWithParam +{}; -TEST_P(RewriteTest, IdentityRewriteIsIdentity) { +TEST_P(RewriteTest, IdentityRewriteIsIdentity) +{ RewriteParams param = GetParam(); StringSink rewritten; auto rewriter = RewritingSink(param.rewrites, rewritten); @@ -36,11 +38,8 @@ INSTANTIATE_TEST_CASE_P( references, RewriteTest, ::testing::Values( - RewriteParams{ "foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}}, - RewriteParams{ "foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}}, - RewriteParams{ "foooo", "foooo", {}} - ) -); - -} + RewriteParams{"foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}}, + RewriteParams{"foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}}, + RewriteParams{"foooo", "foooo", {}})); +} // namespace nix diff --git a/src/libutil-tests/spawn.cc b/src/libutil-tests/spawn.cc index 594bced59..cf3645260 100644 --- a/src/libutil-tests/spawn.cc +++ b/src/libutil-tests/spawn.cc @@ -33,4 +33,4 @@ TEST(SpawnTest, windowsEscape) ASSERT_EQ(space, R"("hello world")"); } #endif -} +} // namespace nix diff --git a/src/libutil-tests/suggestions.cc b/src/libutil-tests/suggestions.cc index d21b286c8..a23e5d3f4 100644 --- a/src/libutil-tests/suggestions.cc +++ b/src/libutil-tests/suggestions.cc @@ -3,41 +3,43 @@ namespace nix { - struct LevenshteinDistanceParam { - std::string s1, s2; - int distance; - }; +struct LevenshteinDistanceParam +{ + std::string s1, s2; + int distance; +}; - class LevenshteinDistanceTest : - public testing::TestWithParam { - }; +class LevenshteinDistanceTest : public testing::TestWithParam +{}; - TEST_P(LevenshteinDistanceTest, CorrectlyComputed) { - auto params = GetParam(); +TEST_P(LevenshteinDistanceTest, CorrectlyComputed) +{ + auto params = GetParam(); - ASSERT_EQ(levenshteinDistance(params.s1, params.s2), params.distance); - ASSERT_EQ(levenshteinDistance(params.s2, params.s1), params.distance); - } - - INSTANTIATE_TEST_SUITE_P(LevenshteinDistance, LevenshteinDistanceTest, - testing::Values( - LevenshteinDistanceParam{"foo", "foo", 0}, - LevenshteinDistanceParam{"foo", "", 3}, - LevenshteinDistanceParam{"", "", 0}, - LevenshteinDistanceParam{"foo", "fo", 1}, - LevenshteinDistanceParam{"foo", "oo", 1}, - LevenshteinDistanceParam{"foo", "fao", 1}, - LevenshteinDistanceParam{"foo", "abc", 3} - ) - ); - - TEST(Suggestions, Trim) { - auto suggestions = Suggestions::bestMatches({"foooo", "bar", "fo", "gao"}, "foo"); - auto onlyOne = suggestions.trim(1); - ASSERT_EQ(onlyOne.suggestions.size(), 1u); - ASSERT_TRUE(onlyOne.suggestions.begin()->suggestion == "fo"); - - auto closest = suggestions.trim(999, 2); - ASSERT_EQ(closest.suggestions.size(), 3u); - } + ASSERT_EQ(levenshteinDistance(params.s1, params.s2), params.distance); + ASSERT_EQ(levenshteinDistance(params.s2, params.s1), params.distance); } + +INSTANTIATE_TEST_SUITE_P( + LevenshteinDistance, + LevenshteinDistanceTest, + testing::Values( + LevenshteinDistanceParam{"foo", "foo", 0}, + LevenshteinDistanceParam{"foo", "", 3}, + LevenshteinDistanceParam{"", "", 0}, + LevenshteinDistanceParam{"foo", "fo", 1}, + LevenshteinDistanceParam{"foo", "oo", 1}, + LevenshteinDistanceParam{"foo", "fao", 1}, + LevenshteinDistanceParam{"foo", "abc", 3})); + +TEST(Suggestions, Trim) +{ + auto suggestions = Suggestions::bestMatches({"foooo", "bar", "fo", "gao"}, "foo"); + auto onlyOne = suggestions.trim(1); + ASSERT_EQ(onlyOne.suggestions.size(), 1u); + ASSERT_TRUE(onlyOne.suggestions.begin()->suggestion == "fo"); + + auto closest = suggestions.trim(999, 2); + ASSERT_EQ(closest.suggestions.size(), 3u); +} +} // namespace nix diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index c93a96d84..2a2bba880 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -5,313 +5,338 @@ namespace nix { /* ----------- tests for url.hh --------------------------------------------------*/ - std::string print_map(StringMap m) { - StringMap::iterator it; - std::string s = "{ "; - for (it = m.begin(); it != m.end(); ++it) { - s += "{ "; - s += it->first; - s += " = "; - s += it->second; - s += " } "; - } - s += "}"; - return s; +std::string print_map(StringMap m) +{ + StringMap::iterator it; + std::string s = "{ "; + for (it = m.begin(); it != m.end(); ++it) { + s += "{ "; + s += it->first; + s += " = "; + s += it->second; + s += " } "; } - - - TEST(parseURL, parsesSimpleHttpUrl) { - auto s = "http://www.example.org/file.tar.gz"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "www.example.org", - .path = "/file.tar.gz", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parsesSimpleHttpsUrl) { - auto s = "https://www.example.org/file.tar.gz"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "https", - .authority = "www.example.org", - .path = "/file.tar.gz", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) { - auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "https", - .authority = "www.example.org", - .path = "/file.tar.gz", - .query = (StringMap) { { "download", "fast" }, { "when", "now" } }, - .fragment = "hello", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) { - auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "www.example.org", - .path = "/file.tar.gz", - .query = (StringMap) { { "field", "value" } }, - .fragment = "?foo=bar#", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parsesFilePlusHttpsUrl) { - auto s = "file+https://www.example.org/video.mp4"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "file+https", - .authority = "www.example.org", - .path = "/video.mp4", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) { - auto s = "file://www.example.org/video.mp4"; - ASSERT_THROW(parseURL(s), Error); - } - - TEST(parseURL, parseIPv4Address) { - auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "127.0.0.1:8080", - .path = "/file.tar.gz", - .query = (StringMap) { { "download", "fast" }, { "when", "now" } }, - .fragment = "hello", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parseScopedRFC4007IPv6Address) { - auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080", - .path = "", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - - } - - TEST(parseURL, parseIPv6Address) { - auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080", - .path = "", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - - } - - TEST(parseURL, parseEmptyQueryParams) { - auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&"; - auto parsed = parseURL(s); - ASSERT_EQ(parsed.query, (StringMap) { }); - } - - TEST(parseURL, parseUserPassword) { - auto s = "http://user:pass@www.example.org:8080/file.tar.gz"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "http", - .authority = "user:pass@www.example.org:8080", - .path = "/file.tar.gz", - .query = (StringMap) { }, - .fragment = "", - }; - - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parseFileURLWithQueryAndFragment) { - auto s = "file:///none/of//your/business"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "file", - .authority = "", - .path = "/none/of//your/business", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - - } - - TEST(parseURL, parsedUrlsIsEqualToItself) { - auto s = "http://www.example.org/file.tar.gz"; - auto url = parseURL(s); - - ASSERT_TRUE(url == url); - } - - TEST(parseURL, parseFTPUrl) { - auto s = "ftp://ftp.nixos.org/downloads/nixos.iso"; - auto parsed = parseURL(s); - - ParsedURL expected { - .scheme = "ftp", - .authority = "ftp.nixos.org", - .path = "/downloads/nixos.iso", - .query = (StringMap) { }, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - } - - TEST(parseURL, parsesAnythingInUriFormat) { - auto s = "whatever://github.com/NixOS/nixpkgs.git"; - auto parsed = parseURL(s); - } - - TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) { - auto s = "whatever:github.com/NixOS/nixpkgs.git"; - auto parsed = parseURL(s); - } - - TEST(parseURL, emptyStringIsInvalidURL) { - ASSERT_THROW(parseURL(""), Error); - } - - /* ---------------------------------------------------------------------------- - * decodeQuery - * --------------------------------------------------------------------------*/ - - TEST(decodeQuery, emptyStringYieldsEmptyMap) { - auto d = decodeQuery(""); - ASSERT_EQ(d, (StringMap) { }); - } - - TEST(decodeQuery, simpleDecode) { - auto d = decodeQuery("yi=one&er=two"); - ASSERT_EQ(d, ((StringMap) { { "yi", "one" }, { "er", "two" } })); - } - - TEST(decodeQuery, decodeUrlEncodedArgs) { - auto d = decodeQuery("arg=%3D%3D%40%3D%3D"); - ASSERT_EQ(d, ((StringMap) { { "arg", "==@==" } })); - } - - TEST(decodeQuery, decodeArgWithEmptyValue) { - auto d = decodeQuery("arg="); - ASSERT_EQ(d, ((StringMap) { { "arg", ""} })); - } - - /* ---------------------------------------------------------------------------- - * percentDecode - * --------------------------------------------------------------------------*/ - - TEST(percentDecode, decodesUrlEncodedString) { - std::string s = "==@=="; - std::string d = percentDecode("%3D%3D%40%3D%3D"); - ASSERT_EQ(d, s); - } - - TEST(percentDecode, multipleDecodesAreIdempotent) { - std::string once = percentDecode("%3D%3D%40%3D%3D"); - std::string twice = percentDecode(once); - - ASSERT_EQ(once, twice); - } - - TEST(percentDecode, trailingPercent) { - std::string s = "==@==%"; - std::string d = percentDecode("%3D%3D%40%3D%3D%25"); - - ASSERT_EQ(d, s); - } - - - /* ---------------------------------------------------------------------------- - * percentEncode - * --------------------------------------------------------------------------*/ - - TEST(percentEncode, encodesUrlEncodedString) { - std::string s = percentEncode("==@=="); - std::string d = "%3D%3D%40%3D%3D"; - ASSERT_EQ(d, s); - } - - TEST(percentEncode, keepArgument) { - std::string a = percentEncode("abd / def"); - std::string b = percentEncode("abd / def", "/"); - ASSERT_EQ(a, "abd%20%2F%20def"); - ASSERT_EQ(b, "abd%20/%20def"); - } - - TEST(percentEncode, inverseOfDecode) { - std::string original = "%3D%3D%40%3D%3D"; - std::string once = percentEncode(original); - std::string back = percentDecode(once); - - ASSERT_EQ(back, original); - } - - TEST(percentEncode, trailingPercent) { - std::string s = percentEncode("==@==%"); - std::string d = "%3D%3D%40%3D%3D%25"; - - ASSERT_EQ(d, s); - } - - TEST(percentEncode, yen) { - // https://en.wikipedia.org/wiki/Percent-encoding#Character_data - std::string s = reinterpret_cast(u8"円"); - std::string e = "%E5%86%86"; - - ASSERT_EQ(percentEncode(s), e); - ASSERT_EQ(percentDecode(e), s); - } - -TEST(nix, isValidSchemeName) { + s += "}"; + return s; +} + +TEST(parseURL, parsesSimpleHttpUrl) +{ + auto s = "http://www.example.org/file.tar.gz"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "www.example.org", + .path = "/file.tar.gz", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsesSimpleHttpsUrl) +{ + auto s = "https://www.example.org/file.tar.gz"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "https", + .authority = "www.example.org", + .path = "/file.tar.gz", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) +{ + auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "https", + .authority = "www.example.org", + .path = "/file.tar.gz", + .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, + .fragment = "hello", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) +{ + auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "www.example.org", + .path = "/file.tar.gz", + .query = (StringMap) {{"field", "value"}}, + .fragment = "?foo=bar#", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsesFilePlusHttpsUrl) +{ + auto s = "file+https://www.example.org/video.mp4"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "file+https", + .authority = "www.example.org", + .path = "/video.mp4", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) +{ + auto s = "file://www.example.org/video.mp4"; + ASSERT_THROW(parseURL(s), Error); +} + +TEST(parseURL, parseIPv4Address) +{ + auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "127.0.0.1:8080", + .path = "/file.tar.gz", + .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, + .fragment = "hello", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parseScopedRFC4007IPv6Address) +{ + auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080", + .path = "", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parseIPv6Address) +{ + auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080", + .path = "", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parseEmptyQueryParams) +{ + auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&"; + auto parsed = parseURL(s); + ASSERT_EQ(parsed.query, (StringMap) {}); +} + +TEST(parseURL, parseUserPassword) +{ + auto s = "http://user:pass@www.example.org:8080/file.tar.gz"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = "user:pass@www.example.org:8080", + .path = "/file.tar.gz", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parseFileURLWithQueryAndFragment) +{ + auto s = "file:///none/of//your/business"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "file", + .authority = "", + .path = "/none/of//your/business", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsedUrlsIsEqualToItself) +{ + auto s = "http://www.example.org/file.tar.gz"; + auto url = parseURL(s); + + ASSERT_TRUE(url == url); +} + +TEST(parseURL, parseFTPUrl) +{ + auto s = "ftp://ftp.nixos.org/downloads/nixos.iso"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "ftp", + .authority = "ftp.nixos.org", + .path = "/downloads/nixos.iso", + .query = (StringMap) {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURL, parsesAnythingInUriFormat) +{ + auto s = "whatever://github.com/NixOS/nixpkgs.git"; + auto parsed = parseURL(s); +} + +TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) +{ + auto s = "whatever:github.com/NixOS/nixpkgs.git"; + auto parsed = parseURL(s); +} + +TEST(parseURL, emptyStringIsInvalidURL) +{ + ASSERT_THROW(parseURL(""), Error); +} + +/* ---------------------------------------------------------------------------- + * decodeQuery + * --------------------------------------------------------------------------*/ + +TEST(decodeQuery, emptyStringYieldsEmptyMap) +{ + auto d = decodeQuery(""); + ASSERT_EQ(d, (StringMap) {}); +} + +TEST(decodeQuery, simpleDecode) +{ + auto d = decodeQuery("yi=one&er=two"); + ASSERT_EQ(d, ((StringMap) {{"yi", "one"}, {"er", "two"}})); +} + +TEST(decodeQuery, decodeUrlEncodedArgs) +{ + auto d = decodeQuery("arg=%3D%3D%40%3D%3D"); + ASSERT_EQ(d, ((StringMap) {{"arg", "==@=="}})); +} + +TEST(decodeQuery, decodeArgWithEmptyValue) +{ + auto d = decodeQuery("arg="); + ASSERT_EQ(d, ((StringMap) {{"arg", ""}})); +} + +/* ---------------------------------------------------------------------------- + * percentDecode + * --------------------------------------------------------------------------*/ + +TEST(percentDecode, decodesUrlEncodedString) +{ + std::string s = "==@=="; + std::string d = percentDecode("%3D%3D%40%3D%3D"); + ASSERT_EQ(d, s); +} + +TEST(percentDecode, multipleDecodesAreIdempotent) +{ + std::string once = percentDecode("%3D%3D%40%3D%3D"); + std::string twice = percentDecode(once); + + ASSERT_EQ(once, twice); +} + +TEST(percentDecode, trailingPercent) +{ + std::string s = "==@==%"; + std::string d = percentDecode("%3D%3D%40%3D%3D%25"); + + ASSERT_EQ(d, s); +} + +/* ---------------------------------------------------------------------------- + * percentEncode + * --------------------------------------------------------------------------*/ + +TEST(percentEncode, encodesUrlEncodedString) +{ + std::string s = percentEncode("==@=="); + std::string d = "%3D%3D%40%3D%3D"; + ASSERT_EQ(d, s); +} + +TEST(percentEncode, keepArgument) +{ + std::string a = percentEncode("abd / def"); + std::string b = percentEncode("abd / def", "/"); + ASSERT_EQ(a, "abd%20%2F%20def"); + ASSERT_EQ(b, "abd%20/%20def"); +} + +TEST(percentEncode, inverseOfDecode) +{ + std::string original = "%3D%3D%40%3D%3D"; + std::string once = percentEncode(original); + std::string back = percentDecode(once); + + ASSERT_EQ(back, original); +} + +TEST(percentEncode, trailingPercent) +{ + std::string s = percentEncode("==@==%"); + std::string d = "%3D%3D%40%3D%3D%25"; + + ASSERT_EQ(d, s); +} + +TEST(percentEncode, yen) +{ + // https://en.wikipedia.org/wiki/Percent-encoding#Character_data + std::string s = reinterpret_cast(u8"円"); + std::string e = "%E5%86%86"; + + ASSERT_EQ(percentEncode(s), e); + ASSERT_EQ(percentDecode(e), s); +} + +TEST(nix, isValidSchemeName) +{ ASSERT_TRUE(isValidSchemeName("http")); ASSERT_TRUE(isValidSchemeName("https")); ASSERT_TRUE(isValidSchemeName("file")); @@ -334,4 +359,4 @@ TEST(nix, isValidSchemeName) { ASSERT_FALSE(isValidSchemeName("http ")); } -} +} // namespace nix diff --git a/src/libutil-tests/xml-writer.cc b/src/libutil-tests/xml-writer.cc index 000af700c..d86baf32b 100644 --- a/src/libutil-tests/xml-writer.cc +++ b/src/libutil-tests/xml-writer.cc @@ -4,102 +4,101 @@ namespace nix { - /* ---------------------------------------------------------------------------- - * XMLWriter - * --------------------------------------------------------------------------*/ +/* ---------------------------------------------------------------------------- + * XMLWriter + * --------------------------------------------------------------------------*/ - TEST(XMLWriter, emptyObject) { - std::stringstream out; - { - XMLWriter t(false, out); - } - - ASSERT_EQ(out.str(), "\n"); - } - - TEST(XMLWriter, objectWithEmptyElement) { - std::stringstream out; - { - XMLWriter t(false, out); - t.openElement("foobar"); - } - - ASSERT_EQ(out.str(), "\n"); - } - - TEST(XMLWriter, objectWithElementWithAttrs) { - std::stringstream out; - { - XMLWriter t(false, out); - XMLAttrs attrs = { - { "foo", "bar" } - }; - t.openElement("foobar", attrs); - } - - ASSERT_EQ(out.str(), "\n"); - } - - TEST(XMLWriter, objectWithElementWithEmptyAttrs) { - std::stringstream out; - { - XMLWriter t(false, out); - XMLAttrs attrs = {}; - t.openElement("foobar", attrs); - } - - ASSERT_EQ(out.str(), "\n"); - } - - TEST(XMLWriter, objectWithElementWithAttrsEscaping) { - std::stringstream out; - { - XMLWriter t(false, out); - XMLAttrs attrs = { - { "", "" } - }; - t.openElement("foobar", attrs); - } - - // XXX: While "" is escaped, "" isn't which I think is a bug. - ASSERT_EQ(out.str(), "\n=\"<value>\">"); - } - - TEST(XMLWriter, objectWithElementWithAttrsIndented) { - std::stringstream out; - { - XMLWriter t(true, out); - XMLAttrs attrs = { - { "foo", "bar" } - }; - t.openElement("foobar", attrs); - } - - ASSERT_EQ(out.str(), "\n\n\n"); - } - - TEST(XMLWriter, writeEmptyElement) { - std::stringstream out; - { - XMLWriter t(false, out); - t.writeEmptyElement("foobar"); - } - - ASSERT_EQ(out.str(), "\n"); - } - - TEST(XMLWriter, writeEmptyElementWithAttributes) { - std::stringstream out; - { - XMLWriter t(false, out); - XMLAttrs attrs = { - { "foo", "bar" } - }; - t.writeEmptyElement("foobar", attrs); - - } - - ASSERT_EQ(out.str(), "\n"); +TEST(XMLWriter, emptyObject) +{ + std::stringstream out; + { + XMLWriter t(false, out); } + ASSERT_EQ(out.str(), "\n"); } + +TEST(XMLWriter, objectWithEmptyElement) +{ + std::stringstream out; + { + XMLWriter t(false, out); + t.openElement("foobar"); + } + + ASSERT_EQ(out.str(), "\n"); +} + +TEST(XMLWriter, objectWithElementWithAttrs) +{ + std::stringstream out; + { + XMLWriter t(false, out); + XMLAttrs attrs = {{"foo", "bar"}}; + t.openElement("foobar", attrs); + } + + ASSERT_EQ(out.str(), "\n"); +} + +TEST(XMLWriter, objectWithElementWithEmptyAttrs) +{ + std::stringstream out; + { + XMLWriter t(false, out); + XMLAttrs attrs = {}; + t.openElement("foobar", attrs); + } + + ASSERT_EQ(out.str(), "\n"); +} + +TEST(XMLWriter, objectWithElementWithAttrsEscaping) +{ + std::stringstream out; + { + XMLWriter t(false, out); + XMLAttrs attrs = {{"", ""}}; + t.openElement("foobar", attrs); + } + + // XXX: While "" is escaped, "" isn't which I think is a bug. + ASSERT_EQ(out.str(), "\n=\"<value>\">"); +} + +TEST(XMLWriter, objectWithElementWithAttrsIndented) +{ + std::stringstream out; + { + XMLWriter t(true, out); + XMLAttrs attrs = {{"foo", "bar"}}; + t.openElement("foobar", attrs); + } + + ASSERT_EQ(out.str(), "\n\n\n"); +} + +TEST(XMLWriter, writeEmptyElement) +{ + std::stringstream out; + { + XMLWriter t(false, out); + t.writeEmptyElement("foobar"); + } + + ASSERT_EQ(out.str(), "\n"); +} + +TEST(XMLWriter, writeEmptyElementWithAttributes) +{ + std::stringstream out; + { + XMLWriter t(false, out); + XMLAttrs attrs = {{"foo", "bar"}}; + t.writeEmptyElement("foobar", attrs); + } + + ASSERT_EQ(out.str(), "\n"); +} + +} // namespace nix diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 9069e4b49..b978ac4db 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -16,12 +16,13 @@ namespace nix { struct ArchiveSettings : Config { - Setting useCaseHack{this, - #ifdef __APPLE__ - true, - #else - false, - #endif + Setting useCaseHack{ + this, +#ifdef __APPLE__ + true, +#else + false, +#endif "use-case-hack", "Whether to enable a macOS-specific hack for dealing with file name case collisions."}; }; @@ -32,18 +33,12 @@ static GlobalConfig::Register rArchiveSettings(&archiveSettings); PathFilter defaultPathFilter = [](const Path &) { return true; }; - -void SourceAccessor::dumpPath( - const CanonPath & path, - Sink & sink, - PathFilter & filter) +void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter & filter) { - auto dumpContents = [&](const CanonPath & path) - { + auto dumpContents = [&](const CanonPath & path) { sink << "contents"; std::optional size; - readFile(path, sink, [&](uint64_t _size) - { + readFile(path, sink, [&](uint64_t _size) { size = _size; sink << _size; }); @@ -82,9 +77,8 @@ void SourceAccessor::dumpPath( name.erase(pos); } if (!unhacked.emplace(name, i.first).second) - throw Error("file name collision between '%s' and '%s'", - (path / unhacked[name]), - (path / i.first)); + throw Error( + "file name collision between '%s' and '%s'", (path / unhacked[name]), (path / i.first)); } else unhacked.emplace(i.first, i.first); @@ -99,7 +93,8 @@ void SourceAccessor::dumpPath( else if (st.type == tSymlink) sink << "type" << "symlink" << "target" << readLink(path); - else throw Error("file '%s' has an unsupported type", path); + else + throw Error("file '%s' has an unsupported type", path); sink << ")"; }; @@ -108,7 +103,6 @@ void SourceAccessor::dumpPath( dump(path); } - time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { auto path2 = PosixSourceAccessor::createAtRoot(path); @@ -121,20 +115,17 @@ void dumpPath(const Path & path, Sink & sink, PathFilter & filter) dumpPathAndGetMtime(path, sink, filter); } - void dumpString(std::string_view s, Sink & sink) { sink << narVersionMagic1 << "(" << "type" << "regular" << "contents" << s << ")"; } - template -static SerialisationError badArchive(std::string_view s, const Args & ... args) +static SerialisationError badArchive(std::string_view s, const Args &... args) { return SerialisationError("bad archive: " + s, args...); } - static void parseContents(CreateRegularFileSink & sink, Source & source) { uint64_t size = readLongLong(source); @@ -147,7 +138,8 @@ static void parseContents(CreateRegularFileSink & sink, Source & source) while (left) { checkInterrupt(); auto n = buf.size(); - if ((uint64_t)n > left) n = left; + if ((uint64_t) n > left) + n = left; source(buf.data(), n); sink({buf.data(), n}); left -= n; @@ -156,16 +148,14 @@ static void parseContents(CreateRegularFileSink & sink, Source & source) readPadding(size, source); } - struct CaseInsensitiveCompare { - bool operator() (const std::string & a, const std::string & b) const + bool operator()(const std::string & a, const std::string & b) const { return strcasecmp(a.c_str(), b.c_str()) < 0; } }; - static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath & path) { auto getString = [&]() { @@ -191,7 +181,8 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath if (tag == "executable") { auto s2 = getString(); - if (s2 != "") throw badArchive("executable marker has non-empty value"); + if (s2 != "") + throw badArchive("executable marker has non-empty value"); crf.isExecutable(); tag = getString(); } @@ -213,7 +204,8 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath while (1) { auto tag = getString(); - if (tag == ")") break; + if (tag == ")") + break; if (tag != "entry") throw badArchive("expected tag 'entry' or ')', got '%s'", tag); @@ -223,7 +215,8 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath expectTag("name"); auto name = getString(); - if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) + if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos + || name.find((char) 0) != std::string::npos) throw badArchive("NAR contains invalid file name '%1%'", name); if (name <= prevName) throw badArchive("NAR directory is not sorted"); @@ -236,7 +229,10 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath name += std::to_string(++i->second); auto j = names.find(name); if (j != names.end()) - throw badArchive("NAR contains file name '%s' that collides with case-hacked file name '%s'", prevName, j->first); + throw badArchive( + "NAR contains file name '%s' that collides with case-hacked file name '%s'", + prevName, + j->first); } else names[name] = 0; } @@ -258,10 +254,10 @@ static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath expectTag(")"); } - else throw badArchive("unknown file type '%s'", type); + else + throw badArchive("unknown file type '%s'", type); } - void parseDump(FileSystemObjectSink & sink, Source & source) { std::string version; @@ -276,7 +272,6 @@ void parseDump(FileSystemObjectSink & sink, Source & source) parse(sink, source, CanonPath::root); } - void restorePath(const std::filesystem::path & path, Source & source, bool startFsync) { RestoreSink sink{startFsync}; @@ -284,7 +279,6 @@ void restorePath(const std::filesystem::path & path, Source & source, bool start parseDump(sink, source); } - void copyNAR(Source & source, Sink & sink) { // FIXME: if 'source' is the output of dumpPath() followed by EOF, @@ -292,10 +286,9 @@ void copyNAR(Source & source, Sink & sink) NullFileSystemObjectSink parseSink; /* just parse the NAR */ - TeeSource wrapper { source, sink }; + TeeSource wrapper{source, sink}; parseDump(parseSink, wrapper); } - -} +} // namespace nix diff --git a/src/libutil/args.cc b/src/libutil/args.cc index d8d004e6f..2e6d85afd 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -10,7 +10,7 @@ #include #include #ifndef _WIN32 -# include +# include #endif namespace nix { @@ -24,14 +24,16 @@ void Args::addFlag(Flag && flag_) longFlags[flag->longName] = flag; for (auto & alias : flag->aliases) longFlags[alias] = flag; - if (flag->shortName) shortFlags[flag->shortName] = flag; + if (flag->shortName) + shortFlags[flag->shortName] = flag; } void Args::removeFlag(const std::string & longName) { auto flag = longFlags.find(longName); assert(flag != longFlags.end()); - if (flag->second->shortName) shortFlags.erase(flag->second->shortName); + if (flag->second->shortName) + shortFlags.erase(flag->second->shortName); longFlags.erase(flag); } @@ -51,10 +53,7 @@ void Completions::add(std::string completion, std::string description) if (needs_ellipsis) description.append(" [...]"); } - completions.insert(Completion { - .completion = completion, - .description = description - }); + completions.insert(Completion{.completion = completion, .description = description}); } auto Completion::operator<=>(const Completion & other) const noexcept = default; @@ -74,7 +73,8 @@ RootArgs & Args::getRoot() std::optional RootArgs::needsCompletion(std::string_view s) { - if (!completions) return {}; + if (!completions) + return {}; auto i = s.find(completionMarker); if (i != std::string::npos) return std::string(s.begin(), i); @@ -86,7 +86,8 @@ std::optional RootArgs::needsCompletion(std::string_view s) * * Except we can't recursively reference the Parser typedef, so we have to write a class. */ -struct Parser { +struct Parser +{ std::string_view remaining; /** @@ -94,12 +95,14 @@ struct Parser { */ virtual void operator()(std::shared_ptr & state, Strings & r) = 0; - Parser(std::string_view s) : remaining(s) {}; + Parser(std::string_view s) + : remaining(s) {}; - virtual ~Parser() { }; + virtual ~Parser() {}; }; -struct ParseQuoted : public Parser { +struct ParseQuoted : public Parser +{ /** * @brief Accumulated string * @@ -107,13 +110,14 @@ struct ParseQuoted : public Parser { */ std::string acc; - ParseQuoted(std::string_view s) : Parser(s) {}; + ParseQuoted(std::string_view s) + : Parser(s) {}; virtual void operator()(std::shared_ptr & state, Strings & r) override; }; - -struct ParseUnquoted : public Parser { +struct ParseUnquoted : public Parser +{ /** * @brief Accumulated string * @@ -122,9 +126,11 @@ struct ParseUnquoted : public Parser { */ std::string acc; - ParseUnquoted(std::string_view s) : Parser(s) {}; + ParseUnquoted(std::string_view s) + : Parser(s) {}; - virtual void operator()(std::shared_ptr & state, Strings & r) override { + virtual void operator()(std::shared_ptr & state, Strings & r) override + { if (remaining.empty()) { if (!acc.empty()) r.push_back(acc); @@ -132,111 +138,116 @@ struct ParseUnquoted : public Parser { return; } switch (remaining[0]) { - case ' ': case '\t': case '\n': case '\r': - if (!acc.empty()) - r.push_back(acc); - state = std::make_shared(ParseUnquoted(remaining.substr(1))); + case ' ': + case '\t': + case '\n': + case '\r': + if (!acc.empty()) + r.push_back(acc); + state = std::make_shared(ParseUnquoted(remaining.substr(1))); + return; + case '`': + if (remaining.size() > 1 && remaining[1] == '`') { + state = std::make_shared(ParseQuoted(remaining.substr(2))); return; - case '`': - if (remaining.size() > 1 && remaining[1] == '`') { - state = std::make_shared(ParseQuoted(remaining.substr(2))); - return; - } - else - throw Error("single backtick is not a supported syntax in the nix shebang."); + } else + throw Error("single backtick is not a supported syntax in the nix shebang."); - // reserved characters - // meaning to be determined, or may be reserved indefinitely so that - // #!nix syntax looks unambiguous - case '$': - case '*': - case '~': - case '<': - case '>': - case '|': - case ';': - case '(': - case ')': - case '[': - case ']': - case '{': - case '}': - case '\'': - case '"': - case '\\': - throw Error("unsupported unquoted character in nix shebang: " + std::string(1, remaining[0]) + ". Use double backticks to escape?"); + // reserved characters + // meaning to be determined, or may be reserved indefinitely so that + // #!nix syntax looks unambiguous + case '$': + case '*': + case '~': + case '<': + case '>': + case '|': + case ';': + case '(': + case ')': + case '[': + case ']': + case '{': + case '}': + case '\'': + case '"': + case '\\': + throw Error( + "unsupported unquoted character in nix shebang: " + std::string(1, remaining[0]) + + ". Use double backticks to escape?"); - case '#': - if (acc.empty()) { - throw Error ("unquoted nix shebang argument cannot start with #. Use double backticks to escape?"); - } else { - acc += remaining[0]; - remaining = remaining.substr(1); - return; - } - - default: + case '#': + if (acc.empty()) { + throw Error("unquoted nix shebang argument cannot start with #. Use double backticks to escape?"); + } else { acc += remaining[0]; remaining = remaining.substr(1); return; + } + + default: + acc += remaining[0]; + remaining = remaining.substr(1); + return; } assert(false); } }; -void ParseQuoted::operator()(std::shared_ptr &state, Strings & r) { +void ParseQuoted::operator()(std::shared_ptr & state, Strings & r) +{ if (remaining.empty()) { throw Error("unterminated quoted string in nix shebang"); } switch (remaining[0]) { - case ' ': - if ((remaining.size() == 3 && remaining[1] == '`' && remaining[2] == '`') - || (remaining.size() > 3 && remaining[1] == '`' && remaining[2] == '`' && remaining[3] != '`')) { - // exactly two backticks mark the end of a quoted string, but a preceding space is ignored if present. - state = std::make_shared(ParseUnquoted(remaining.substr(3))); - r.push_back(acc); - return; - } - else { - // just a normal space - acc += remaining[0]; - remaining = remaining.substr(1); - return; - } - case '`': - // exactly two backticks mark the end of a quoted string - if ((remaining.size() == 2 && remaining[1] == '`') - || (remaining.size() > 2 && remaining[1] == '`' && remaining[2] != '`')) { - state = std::make_shared(ParseUnquoted(remaining.substr(2))); - r.push_back(acc); - return; - } - - // a sequence of at least 3 backticks is one escape-backtick which is ignored, followed by any number of backticks, which are verbatim - else if (remaining.size() >= 3 && remaining[1] == '`' && remaining[2] == '`') { - // ignore "escape" backtick - remaining = remaining.substr(1); - // add the rest - while (remaining.size() > 0 && remaining[0] == '`') { - acc += '`'; - remaining = remaining.substr(1); - } - return; - } - else { - acc += remaining[0]; - remaining = remaining.substr(1); - return; - } - default: + case ' ': + if ((remaining.size() == 3 && remaining[1] == '`' && remaining[2] == '`') + || (remaining.size() > 3 && remaining[1] == '`' && remaining[2] == '`' && remaining[3] != '`')) { + // exactly two backticks mark the end of a quoted string, but a preceding space is ignored if present. + state = std::make_shared(ParseUnquoted(remaining.substr(3))); + r.push_back(acc); + return; + } else { + // just a normal space acc += remaining[0]; remaining = remaining.substr(1); return; + } + case '`': + // exactly two backticks mark the end of a quoted string + if ((remaining.size() == 2 && remaining[1] == '`') + || (remaining.size() > 2 && remaining[1] == '`' && remaining[2] != '`')) { + state = std::make_shared(ParseUnquoted(remaining.substr(2))); + r.push_back(acc); + return; + } + + // a sequence of at least 3 backticks is one escape-backtick which is ignored, followed by any number of + // backticks, which are verbatim + else if (remaining.size() >= 3 && remaining[1] == '`' && remaining[2] == '`') { + // ignore "escape" backtick + remaining = remaining.substr(1); + // add the rest + while (remaining.size() > 0 && remaining[0] == '`') { + acc += '`'; + remaining = remaining.substr(1); + } + return; + } else { + acc += remaining[0]; + remaining = remaining.substr(1); + return; + } + default: + acc += remaining[0]; + remaining = remaining.substr(1); + return; } assert(false); } -Strings parseShebangContent(std::string_view s) { +Strings parseShebangContent(std::string_view s) +{ Strings result; std::shared_ptr parserState(std::make_shared(ParseUnquoted(s))); @@ -268,22 +279,22 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) // if we have at least one argument, it's the name of an // executable file, and it starts with "#!". Strings savedArgs; - if (allowShebang){ + if (allowShebang) { auto script = *cmdline.begin(); try { std::ifstream stream(script); - char shebang[3]={0,0,0}; - stream.get(shebang,3); - if (strncmp(shebang,"#!",2) == 0){ - for (auto pos = std::next(cmdline.begin()); pos != cmdline.end();pos++) + char shebang[3] = {0, 0, 0}; + stream.get(shebang, 3); + if (strncmp(shebang, "#!", 2) == 0) { + for (auto pos = std::next(cmdline.begin()); pos != cmdline.end(); pos++) savedArgs.push_back(*pos); cmdline.clear(); std::string line; - std::getline(stream,line); + std::getline(stream, line); static const std::string commentChars("#/\\%@*-("); std::string shebangContent; - while (std::getline(stream,line) && !line.empty() && commentChars.find(line[0]) != std::string::npos){ + while (std::getline(stream, line) && !line.empty() && commentChars.find(line[0]) != std::string::npos) { line = chomp(line); std::smatch match; @@ -297,12 +308,13 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) } cmdline.push_back(script); commandBaseDir = dirOf(script); - for (auto pos = savedArgs.begin(); pos != savedArgs.end();pos++) + for (auto pos = savedArgs.begin(); pos != savedArgs.end(); pos++) cmdline.push_back(*pos); } - } catch (SystemError &) { } + } catch (SystemError &) { + } } - for (auto pos = cmdline.begin(); pos != cmdline.end(); ) { + for (auto pos = cmdline.begin(); pos != cmdline.end();) { auto arg = *pos; @@ -310,7 +322,8 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) `-j3` -> `-j 3`). */ if (!dashDash && arg.length() > 2 && arg[0] == '-' && arg[1] != '-' && isalpha(arg[1])) { *pos = (std::string) "-" + arg[1]; - auto next = pos; ++next; + auto next = pos; + ++next; for (unsigned int j = 2; j < arg.length(); j++) if (isalpha(arg[j])) cmdline.insert(next, (std::string) "-" + arg[j]); @@ -324,12 +337,10 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) if (!dashDash && arg == "--") { dashDash = true; ++pos; - } - else if (!dashDash && std::string(arg, 0, 1) == "-") { + } else if (!dashDash && std::string(arg, 0, 1) == "-") { if (!processFlag(pos, cmdline.end())) throw UsageError("unrecognised flag '%1%'", arg); - } - else { + } else { pos = rewriteArgs(cmdline, pos); pendingArgs.push_back(*pos++); if (processArgs(pendingArgs, false)) @@ -377,12 +388,12 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) std::vector args; bool anyCompleted = false; - for (size_t n = 0 ; n < flag.handler.arity; ++n) { + for (size_t n = 0; n < flag.handler.arity; ++n) { if (pos == end) { - if (flag.handler.arity == ArityAny || anyCompleted) break; + if (flag.handler.arity == ArityAny || anyCompleted) + break; throw UsageError( - "flag '%s' requires %d argument(s), but only %d were given", - name, flag.handler.arity, n); + "flag '%s' requires %d argument(s), but only %d were given", name, flag.handler.arity, n); } if (auto prefix = rootArgs.needsCompletion(*pos)) { anyCompleted = true; @@ -404,9 +415,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) if (std::string(*pos, 0, 2) == "--") { if (auto prefix = rootArgs.needsCompletion(*pos)) { for (auto & [name, flag] : longFlags) { - if (!hiddenCategories.count(flag->category) - && hasPrefix(name, std::string(*prefix, 2))) - { + if (!hiddenCategories.count(flag->category) && hasPrefix(name, std::string(*prefix, 2))) { if (auto & f = flag->experimentalFeature) rootArgs.flagExperimentalFeatures.insert(*f); rootArgs.completions->add("--" + name, flag->description); @@ -415,14 +424,16 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) return false; } auto i = longFlags.find(std::string(*pos, 2)); - if (i == longFlags.end()) return false; + if (i == longFlags.end()) + return false; return process("--" + i->first, *i->second); } if (std::string(*pos, 0, 1) == "-" && pos->size() == 2) { auto c = (*pos)[1]; auto i = shortFlags.find(c); - if (i == shortFlags.end()) return false; + if (i == shortFlags.end()) + return false; return process(std::string("-") + c, *i->second); } @@ -452,12 +463,11 @@ bool Args::processArgs(const Strings & args, bool finish) bool res = false; - if ((exp.handler.arity == ArityAny && finish) || - (exp.handler.arity != ArityAny && args.size() == exp.handler.arity)) - { + if ((exp.handler.arity == ArityAny && finish) + || (exp.handler.arity != ArityAny && args.size() == exp.handler.arity)) { std::vector ss; bool anyCompleted = false; - for (const auto &[n, s] : enumerate(args)) { + for (const auto & [n, s] : enumerate(args)) { if (auto prefix = rootArgs.needsCompletion(s)) { anyCompleted = true; ss.push_back(*prefix); @@ -479,11 +489,7 @@ bool Args::processArgs(const Strings & args, bool finish) except that it will only adjust the next and prev pointers of the list elements, meaning the actual contents don't move in memory. This is critical to prevent invalidating internal pointers! */ - processedArgs.splice( - processedArgs.end(), - expectedArgs, - expectedArgs.begin(), - ++expectedArgs.begin()); + processedArgs.splice(processedArgs.end(), expectedArgs, expectedArgs.begin(), ++expectedArgs.begin()); res = true; } @@ -501,7 +507,8 @@ nlohmann::json Args::toJSON() for (auto & [name, flag] : longFlags) { auto j = nlohmann::json::object(); j["hiddenCategory"] = hiddenCategories.count(flag->category) > 0; - if (flag->aliases.count(name)) continue; + if (flag->aliases.count(name)) + continue; if (flag->shortName) j["shortName"] = std::string(1, flag->shortName); if (flag->description != "") @@ -531,32 +538,34 @@ nlohmann::json Args::toJSON() res["flags"] = std::move(flags); res["args"] = std::move(args); auto s = doc(); - if (s != "") res.emplace("doc", stripIndentation(s)); + if (s != "") + res.emplace("doc", stripIndentation(s)); return res; } static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs) { completions.setType(Completions::Type::Filenames); - #ifndef _WIN32 // TODO implement globbing completions on Windows +#ifndef _WIN32 // TODO implement globbing completions on Windows glob_t globbuf; int flags = GLOB_NOESCAPE; - #ifdef GLOB_ONLYDIR +# ifdef GLOB_ONLYDIR if (onlyDirs) flags |= GLOB_ONLYDIR; - #endif +# endif // using expandTilde here instead of GLOB_TILDE(_CHECK) so that ~ expands to /home/user/ if (glob((expandTilde(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) { for (size_t i = 0; i < globbuf.gl_pathc; ++i) { if (onlyDirs) { auto st = stat(globbuf.gl_pathv[i]); - if (!S_ISDIR(st.st_mode)) continue; + if (!S_ISDIR(st.st_mode)) + continue; } completions.add(globbuf.gl_pathv[i]); } } globfree(&globbuf); - #endif +#endif } void Args::completePath(AddCompletions & completions, size_t, std::string_view prefix) @@ -569,53 +578,56 @@ void Args::completeDir(AddCompletions & completions, size_t, std::string_view pr _completePath(completions, prefix, true); } -Strings argvToStrings(int argc, char * * argv) +Strings argvToStrings(int argc, char ** argv) { Strings args; - argc--; argv++; - while (argc--) args.push_back(*argv++); + argc--; + argv++; + while (argc--) + args.push_back(*argv++); return args; } -std::optional Command::experimentalFeature () +std::optional Command::experimentalFeature() { - return { Xp::NixCommand }; + return {Xp::NixCommand}; } MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_) : commands(commands_) , commandName(commandName) { - expectArgs({ - .label = "subcommand", - .optional = true, - .handler = {[=,this](std::string s) { - assert(!command); - auto i = commands.find(s); - if (i == commands.end()) { - StringSet commandNames; - for (auto & [name, _] : commands) - commandNames.insert(name); - auto suggestions = Suggestions::bestMatches(commandNames, s); - throw UsageError(suggestions, "'%s' is not a recognised command", s); - } - command = {s, i->second()}; - command->second->parent = this; - }}, - .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { - for (auto & [name, command] : commands) - if (hasPrefix(name, prefix)) - completions.add(name); - }} - }); + expectArgs( + {.label = "subcommand", + .optional = true, + .handler = {[=, this](std::string s) { + assert(!command); + auto i = commands.find(s); + if (i == commands.end()) { + StringSet commandNames; + for (auto & [name, _] : commands) + commandNames.insert(name); + auto suggestions = Suggestions::bestMatches(commandNames, s); + throw UsageError(suggestions, "'%s' is not a recognised command", s); + } + command = {s, i->second()}; + command->second->parent = this; + }}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + for (auto & [name, command] : commands) + if (hasPrefix(name, prefix)) + completions.add(name); + }}}); categories[Command::catDefault] = "Available commands"; } bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end) { - if (Args::processFlag(pos, end)) return true; - if (command && command->second->processFlag(pos, end)) return true; + if (Args::processFlag(pos, end)) + return true; + if (command && command->second->processFlag(pos, end)) + return true; return false; } @@ -652,14 +664,15 @@ Strings::iterator MultiCommand::rewriteArgs(Strings & args, Strings::iterator po if (command) return command->second->rewriteArgs(args, pos); - if (aliasUsed || pos == args.end()) return pos; + if (aliasUsed || pos == args.end()) + return pos; auto arg = *pos; auto i = aliases.find(arg); - if (i == aliases.end()) return pos; + if (i == aliases.end()) + return pos; auto & info = i->second; if (info.status == AliasStatus::Deprecated) { - warn("'%s' is a deprecated alias for '%s'", - arg, concatStringsSep(" ", info.replacement)); + warn("'%s' is a deprecated alias for '%s'", arg, concatStringsSep(" ", info.replacement)); } pos = args.erase(pos); for (auto j = info.replacement.rbegin(); j != info.replacement.rend(); ++j) @@ -668,4 +681,4 @@ Strings::iterator MultiCommand::rewriteArgs(Strings & args, Strings::iterator po return pos; } -} +} // namespace nix diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 33ac700f0..07a3a6193 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -9,19 +9,18 @@ CanonPath CanonPath::root = CanonPath("/"); static std::string absPathPure(std::string_view path) { - return canonPathInner(path, [](auto &, auto &){}); + return canonPathInner(path, [](auto &, auto &) {}); } CanonPath::CanonPath(std::string_view raw) : path(absPathPure(concatStrings("/", raw))) -{ } +{ +} CanonPath::CanonPath(std::string_view raw, const CanonPath & root) - : path(absPathPure( - raw.size() > 0 && raw[0] == '/' - ? raw - : concatStrings(root.abs(), "/", raw))) -{ } + : path(absPathPure(raw.size() > 0 && raw[0] == '/' ? raw : concatStrings(root.abs(), "/", raw))) +{ +} CanonPath::CanonPath(const std::vector & elems) : path("/") @@ -32,7 +31,8 @@ CanonPath::CanonPath(const std::vector & elems) std::optional CanonPath::parent() const { - if (isRoot()) return std::nullopt; + if (isRoot()) + return std::nullopt; return CanonPath(unchecked_t(), path.substr(0, std::max((size_t) 1, path.rfind('/')))); } @@ -45,30 +45,31 @@ void CanonPath::pop() bool CanonPath::isWithin(const CanonPath & parent) const { return !( - path.size() < parent.path.size() - || path.substr(0, parent.path.size()) != parent.path - || (parent.path.size() > 1 && path.size() > parent.path.size() - && path[parent.path.size()] != '/')); + path.size() < parent.path.size() || path.substr(0, parent.path.size()) != parent.path + || (parent.path.size() > 1 && path.size() > parent.path.size() && path[parent.path.size()] != '/')); } CanonPath CanonPath::removePrefix(const CanonPath & prefix) const { assert(isWithin(prefix)); - if (prefix.isRoot()) return *this; - if (path.size() == prefix.path.size()) return root; + if (prefix.isRoot()) + return *this; + if (path.size() == prefix.path.size()) + return root; return CanonPath(unchecked_t(), path.substr(prefix.path.size())); } void CanonPath::extend(const CanonPath & x) { - if (x.isRoot()) return; + if (x.isRoot()) + return; if (isRoot()) path += x.rel(); else path += x.abs(); } -CanonPath CanonPath::operator / (const CanonPath & x) const +CanonPath CanonPath::operator/(const CanonPath & x) const { auto res = *this; res.extend(x); @@ -79,11 +80,12 @@ void CanonPath::push(std::string_view c) { assert(c.find('/') == c.npos); assert(c != "." && c != ".."); - if (!isRoot()) path += '/'; + if (!isRoot()) + path += '/'; path += c; } -CanonPath CanonPath::operator / (std::string_view c) const +CanonPath CanonPath::operator/(std::string_view c) const { auto res = *this; res.push(c); @@ -111,7 +113,7 @@ bool CanonPath::isAllowed(const std::set & allowed) const return false; } -std::ostream & operator << (std::ostream & stream, const CanonPath & path) +std::ostream & operator<<(std::ostream & stream, const CanonPath & path) { stream << path.abs(); return stream; @@ -122,7 +124,8 @@ std::string CanonPath::makeRelative(const CanonPath & path) const auto p1 = begin(); auto p2 = path.begin(); - for (; p1 != end() && p2 != path.end() && *p1 == *p2; ++p1, ++p2) ; + for (; p1 != end() && p2 != path.end() && *p1 == *p2; ++p1, ++p2) + ; if (p1 == end() && p2 == path.end()) return "."; @@ -132,15 +135,17 @@ std::string CanonPath::makeRelative(const CanonPath & path) const std::string res; while (p1 != end()) { ++p1; - if (!res.empty()) res += '/'; + if (!res.empty()) + res += '/'; res += ".."; } if (p2 != path.end()) { - if (!res.empty()) res += '/'; + if (!res.empty()) + res += '/'; res += p2.remaining; } return res; } } -} +} // namespace nix diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 0e38620d4..af04b719e 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -39,12 +39,15 @@ struct ArchiveDecompressionSource : Source std::unique_ptr archive = 0; Source & src; std::optional compressionMethod; + ArchiveDecompressionSource(Source & src, std::optional compressionMethod = std::nullopt) : src(src) , compressionMethod(std::move(compressionMethod)) { } + ~ArchiveDecompressionSource() override {} + size_t read(char * data, size_t len) override { struct archive_entry * ae; @@ -139,16 +142,19 @@ private: struct NoneSink : CompressionSink { Sink & nextSink; + NoneSink(Sink & nextSink, int level = COMPRESSION_LEVEL_DEFAULT) : nextSink(nextSink) { if (level != COMPRESSION_LEVEL_DEFAULT) warn("requested compression level '%d' not supported by compression method 'none'", level); } + void finish() override { flush(); } + void writeUnbuffered(std::string_view data) override { nextSink(data); @@ -307,4 +313,4 @@ std::string compress(const std::string & method, std::string_view in, const bool return std::move(ssink.s); } -} +} // namespace nix diff --git a/src/libutil/compute-levels.cc b/src/libutil/compute-levels.cc index dd221bd70..5bd81a893 100644 --- a/src/libutil/compute-levels.cc +++ b/src/libutil/compute-levels.cc @@ -11,15 +11,16 @@ namespace nix { #if HAVE_LIBCPUID -StringSet computeLevels() { +StringSet computeLevels() +{ StringSet levels; struct cpu_id_t data; const std::map feature_strings = { - { FEATURE_LEVEL_X86_64_V1, "x86_64-v1" }, - { FEATURE_LEVEL_X86_64_V2, "x86_64-v2" }, - { FEATURE_LEVEL_X86_64_V3, "x86_64-v3" }, - { FEATURE_LEVEL_X86_64_V4, "x86_64-v4" }, + {FEATURE_LEVEL_X86_64_V1, "x86_64-v1"}, + {FEATURE_LEVEL_X86_64_V2, "x86_64-v2"}, + {FEATURE_LEVEL_X86_64_V3, "x86_64-v3"}, + {FEATURE_LEVEL_X86_64_V4, "x86_64-v4"}, }; if (cpu_identify(NULL, &data) < 0) @@ -34,10 +35,11 @@ StringSet computeLevels() { #else -StringSet computeLevels() { +StringSet computeLevels() +{ return StringSet{}; } #endif // HAVE_LIBCPUID -} +} // namespace nix diff --git a/src/libutil/config-global.cc b/src/libutil/config-global.cc index 94d715443..3b1bc5af9 100644 --- a/src/libutil/config-global.cc +++ b/src/libutil/config-global.cc @@ -62,4 +62,4 @@ ExperimentalFeatureSettings experimentalFeatureSettings; static GlobalConfig::Register rSettings(&experimentalFeatureSettings); -} +} // namespace nix diff --git a/src/libutil/configuration.cc b/src/libutil/configuration.cc index 314ae34db..4db863e1f 100644 --- a/src/libutil/configuration.cc +++ b/src/libutil/configuration.cc @@ -16,7 +16,8 @@ namespace nix { Config::Config(StringMap initials) : AbstractConfig(std::move(initials)) -{ } +{ +} bool Config::set(const std::string & name, const std::string & value) { @@ -54,8 +55,7 @@ void Config::addSetting(AbstractSetting * setting) for (auto & alias : setting->aliases) { if (auto i = unknownSettings.find(alias); i != unknownSettings.end()) { if (set) - warn("setting '%s' is set, but it's an alias of '%s' which is also set", - alias, setting->name); + warn("setting '%s' is set, but it's an alias of '%s' which is also set", alias, setting->name); else { setting->set(std::move(i->second)); setting->overridden = true; @@ -68,7 +68,8 @@ void Config::addSetting(AbstractSetting * setting) AbstractConfig::AbstractConfig(StringMap initials) : unknownSettings(std::move(initials)) -{ } +{ +} void AbstractConfig::warnUnknownSettings() { @@ -87,21 +88,24 @@ void AbstractConfig::reapplyUnknownSettings() void Config::getSettings(std::map & res, bool overriddenOnly) { for (const auto & opt : _settings) - if (!opt.second.isAlias - && (!overriddenOnly || opt.second.setting->overridden) + if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden) && experimentalFeatureSettings.isEnabled(opt.second.setting->experimentalFeature)) res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description}); } - /** - * Parse configuration in `contents`, and also the configuration files included from there, with their location specified relative to `path`. + * Parse configuration in `contents`, and also the configuration files included from there, with their location + * specified relative to `path`. * * `contents` and `path` represent the file that is being parsed. * The result is only an intermediate list of key-value pairs of strings. * More parsing according to the settings-specific semantics is being done by `loadConfFile` in `libstore/globals.cc`. -*/ -static void parseConfigFiles(const std::string & contents, const std::string & path, std::vector> & parsedContents) { + */ +static void parseConfigFiles( + const std::string & contents, + const std::string & path, + std::vector> & parsedContents) +{ unsigned int pos = 0; while (pos < contents.size()) { @@ -114,7 +118,8 @@ static void parseConfigFiles(const std::string & contents, const std::string & p line = std::string(line, 0, hash); auto tokens = tokenizeString>(line); - if (tokens.empty()) continue; + if (tokens.empty()) + continue; if (tokens.size() < 2) throw UsageError("syntax error in configuration line '%1%' in '%2%'", line, path); @@ -160,7 +165,8 @@ static void parseConfigFiles(const std::string & contents, const std::string & p }; } -void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) { +void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) +{ std::vector> parsedContents; parseConfigFiles(contents, path, parsedContents); @@ -176,8 +182,7 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string // but at the time of writing it's not worth building that for just one thing for (const auto & [name, value] : parsedContents) { if (name != "experimental-features" && name != "extra-experimental-features") { - if ((name == "nix-path" || name == "extra-nix-path") - && getEnv("NIX_PATH").has_value()) { + if ((name == "nix-path" || name == "extra-nix-path") && getEnv("NIX_PATH").has_value()) { continue; } set(name, value); @@ -253,37 +258,42 @@ std::map AbstractSetting::toJSONObject() const return obj; } -void AbstractSetting::convertToArg(Args & args, const std::string & category) +void AbstractSetting::convertToArg(Args & args, const std::string & category) {} + +bool AbstractSetting::isOverridden() const { + return overridden; } - -bool AbstractSetting::isOverridden() const { return overridden; } - -template<> std::string BaseSetting::parse(const std::string & str) const +template<> +std::string BaseSetting::parse(const std::string & str) const { return str; } -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { return value; } -template<> std::optional BaseSetting>::parse(const std::string & str) const +template<> +std::optional BaseSetting>::parse(const std::string & str) const { if (str == "") return std::nullopt; else - return { str }; + return {str}; } -template<> std::string BaseSetting>::to_string() const +template<> +std::string BaseSetting>::to_string() const { return value ? *value : ""; } -template<> bool BaseSetting::parse(const std::string & str) const +template<> +bool BaseSetting::parse(const std::string & str) const { if (str == "true" || str == "yes" || str == "1") return true; @@ -293,12 +303,14 @@ template<> bool BaseSetting::parse(const std::string & str) const throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str); } -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { return value ? "true" : "false"; } -template<> void BaseSetting::convertToArg(Args & args, const std::string & category) +template<> +void BaseSetting::convertToArg(Args & args, const std::string & category) { args.addFlag({ .longName = name, @@ -318,40 +330,48 @@ template<> void BaseSetting::convertToArg(Args & args, const std::string & }); } -template<> Strings BaseSetting::parse(const std::string & str) const +template<> +Strings BaseSetting::parse(const std::string & str) const { return tokenizeString(str); } -template<> void BaseSetting::appendOrSet(Strings newValue, bool append) +template<> +void BaseSetting::appendOrSet(Strings newValue, bool append) { - if (!append) value.clear(); - value.insert(value.end(), std::make_move_iterator(newValue.begin()), - std::make_move_iterator(newValue.end())); + if (!append) + value.clear(); + value.insert(value.end(), std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); } -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { return concatStringsSep(" ", value); } -template<> StringSet BaseSetting::parse(const std::string & str) const +template<> +StringSet BaseSetting::parse(const std::string & str) const { return tokenizeString(str); } -template<> void BaseSetting::appendOrSet(StringSet newValue, bool append) +template<> +void BaseSetting::appendOrSet(StringSet newValue, bool append) { - if (!append) value.clear(); + if (!append) + value.clear(); value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); } -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { return concatStringsSep(" ", value); } -template<> std::set BaseSetting>::parse(const std::string & str) const +template<> +std::set BaseSetting>::parse(const std::string & str) const { std::set res; for (auto & s : tokenizeString(str)) { @@ -365,13 +385,16 @@ template<> std::set BaseSetting void BaseSetting>::appendOrSet(std::set newValue, bool append) +template<> +void BaseSetting>::appendOrSet(std::set newValue, bool append) { - if (!append) value.clear(); + if (!append) + value.clear(); value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); } -template<> std::string BaseSetting>::to_string() const +template<> +std::string BaseSetting>::to_string() const { StringSet stringifiedXpFeatures; for (const auto & feature : value) @@ -379,7 +402,8 @@ template<> std::string BaseSetting>::to_string() c return concatStringsSep(" ", stringifiedXpFeatures); } -template<> StringMap BaseSetting::parse(const std::string & str) const +template<> +StringMap BaseSetting::parse(const std::string & str) const { StringMap res; for (const auto & s : tokenizeString(str)) { @@ -390,17 +414,23 @@ template<> StringMap BaseSetting::parse(const std::string & str) cons return res; } -template<> void BaseSetting::appendOrSet(StringMap newValue, bool append) +template<> +void BaseSetting::appendOrSet(StringMap newValue, bool append) { - if (!append) value.clear(); + if (!append) + value.clear(); value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); } -template<> std::string BaseSetting::to_string() const +template<> +std::string BaseSetting::to_string() const { - return std::transform_reduce(value.cbegin(), value.cend(), std::string{}, - [](const auto & l, const auto &r) { return l + " " + r; }, - [](const auto & kvpair){ return kvpair.first + "=" + kvpair.second; }); + return std::transform_reduce( + value.cbegin(), + value.cend(), + std::string{}, + [](const auto & l, const auto & r) { return l + " " + r; }, + [](const auto & kvpair) { return kvpair.first + "=" + kvpair.second; }); } template class BaseSetting; @@ -424,7 +454,8 @@ static Path parsePath(const AbstractSetting & s, const std::string & str) return canonPath(str); } -PathSetting::PathSetting(Config * options, +PathSetting::PathSetting( + Config * options, const Path & def, const std::string & name, const std::string & description, @@ -439,8 +470,8 @@ Path PathSetting::parse(const std::string & str) const return parsePath(*this, str); } - -OptionalPathSetting::OptionalPathSetting(Config * options, +OptionalPathSetting::OptionalPathSetting( + Config * options, const std::optional & def, const std::string & name, const std::string & description, @@ -450,7 +481,6 @@ OptionalPathSetting::OptionalPathSetting(Config * options, options->addSetting(this); } - std::optional OptionalPathSetting::parse(const std::string & str) const { if (str == "") @@ -459,7 +489,7 @@ std::optional OptionalPathSetting::parse(const std::string & str) const return parsePath(*this, str); } -void OptionalPathSetting::operator =(const std::optional & v) +void OptionalPathSetting::operator=(const std::optional & v) { this->assign(v); } @@ -483,7 +513,8 @@ bool ExperimentalFeatureSettings::isEnabled(const std::optional & feature) const { - if (feature) require(*feature); + if (feature) + require(*feature); } -} +} // namespace nix diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 1afefbcb2..c7d3b78d0 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -10,28 +10,29 @@ #include #ifdef __APPLE__ -# include +# include #endif #ifdef __linux__ -# include -# include "nix/util/cgroup.hh" -# include "nix/util/linux-namespaces.hh" +# include +# include "nix/util/cgroup.hh" +# include "nix/util/linux-namespaces.hh" #endif #ifdef __FreeBSD__ -# include -# include +# include +# include #endif namespace nix { unsigned int getMaxCPU() { - #ifdef __linux__ +#ifdef __linux__ try { auto cgroupFS = getCgroupFS(); - if (!cgroupFS) return 0; + if (!cgroupFS) + return 0; auto cpuFile = *cgroupFS + "/" + getCurrentCgroup() + "/cpu.max"; @@ -45,17 +46,17 @@ unsigned int getMaxCPU() auto quota = cpuMaxParts[0]; auto period = cpuMaxParts[1]; if (quota != "max") - return std::ceil(std::stoi(quota) / std::stof(period)); - } catch (Error &) { ignoreExceptionInDestructor(lvlDebug); } - #endif + return std::ceil(std::stoi(quota) / std::stof(period)); + } catch (Error &) { + ignoreExceptionInDestructor(lvlDebug); + } +#endif return 0; } - ////////////////////////////////////////////////////////////////////// - #ifndef _WIN32 size_t savedStackSize = 0; @@ -73,9 +74,8 @@ void setStackSize(size_t stackSize) savedStackSize, stackSize, limit.rlim_max, - std::strerror(errno) - ).str() - ); + std::strerror(errno)) + .str()); } } } @@ -83,16 +83,16 @@ void setStackSize(size_t stackSize) void restoreProcessContext(bool restoreMounts) { - #ifndef _WIN32 +#ifndef _WIN32 unix::restoreSignals(); - #endif +#endif if (restoreMounts) { - #ifdef __linux__ +#ifdef __linux__ restoreMountNamespace(); - #endif +#endif } - #ifndef _WIN32 +#ifndef _WIN32 if (savedStackSize) { struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0) { @@ -100,27 +100,24 @@ void restoreProcessContext(bool restoreMounts) setrlimit(RLIMIT_STACK, &limit); } } - #endif +#endif } - ////////////////////////////////////////////////////////////////////// - std::optional getSelfExe() { - static auto cached = []() -> std::optional - { - #if defined(__linux__) || defined(__GNU__) + static auto cached = []() -> std::optional { +#if defined(__linux__) || defined(__GNU__) return readLink("/proc/self/exe"); - #elif defined(__APPLE__) +#elif defined(__APPLE__) char buf[1024]; uint32_t size = sizeof(buf); if (_NSGetExecutablePath(buf, &size) == 0) return buf; else return std::nullopt; - #elif defined(__FreeBSD__) +#elif defined(__FreeBSD__) int sysctlName[] = { CTL_KERN, KERN_PROC, @@ -129,7 +126,7 @@ std::optional getSelfExe() }; size_t pathLen = 0; if (sysctl(sysctlName, sizeof(sysctlName) / sizeof(sysctlName[0]), nullptr, &pathLen, nullptr, 0) < 0) { - return std::nullopt; + return std::nullopt; } std::vector path(pathLen); @@ -138,11 +135,11 @@ std::optional getSelfExe() } return Path(path.begin(), path.end()); - #else +#else return std::nullopt; - #endif +#endif }(); return cached; } -} +} // namespace nix diff --git a/src/libutil/english.cc b/src/libutil/english.cc index e697b8c30..421682eee 100644 --- a/src/libutil/english.cc +++ b/src/libutil/english.cc @@ -2,11 +2,8 @@ namespace nix { -std::ostream & pluralize( - std::ostream & output, - unsigned int count, - const std::string_view single, - const std::string_view plural) +std::ostream & +pluralize(std::ostream & output, unsigned int count, const std::string_view single, const std::string_view plural) { if (count == 1) output << "1 " << single; @@ -15,4 +12,4 @@ std::ostream & pluralize( return output; } -} +} // namespace nix diff --git a/src/libutil/environment-variables.cc b/src/libutil/environment-variables.cc index adae17734..f2f24f7be 100644 --- a/src/libutil/environment-variables.cc +++ b/src/libutil/environment-variables.cc @@ -48,4 +48,4 @@ void replaceEnv(const StringMap & newEnv) setEnv(newEnvVar.first.c_str(), newEnvVar.second.c_str()); } -} +} // namespace nix diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 049555ea3..b50b1f3be 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -15,13 +15,14 @@ namespace nix { void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, TracePrint print) { - err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .print = print }); + err.traces.push_front(Trace{.pos = std::move(e), .hint = hint, .print = print}); } void throwExceptionSelfCheck() { // This is meant to be caught in initLibUtil() - throw Error("C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded."); + throw Error( + "C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded."); } // c++ std::exception descendants must have a 'const char* what()' function. @@ -40,7 +41,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const HintFmt & hf) +std::ostream & operator<<(std::ostream & os, const HintFmt & hf) { return os << hf.str(); } @@ -48,7 +49,7 @@ std::ostream & operator <<(std::ostream & os, const HintFmt & hf) /** * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. */ -inline std::strong_ordering operator<=>(const Trace& lhs, const Trace& rhs) +inline std::strong_ordering operator<=>(const Trace & lhs, const Trace & rhs) { // `std::shared_ptr` does not have value semantics for its comparison // functions, so we need to check for nulls and compare the dereferenced @@ -66,27 +67,16 @@ inline std::strong_ordering operator<=>(const Trace& lhs, const Trace& rhs) } // print lines of code to the ostream, indicating the error column. -void printCodeLines(std::ostream & out, - const std::string & prefix, - const Pos & errPos, - const LinesOfCode & loc) +void printCodeLines(std::ostream & out, const std::string & prefix, const Pos & errPos, const LinesOfCode & loc) { // previous line of code. if (loc.prevLineOfCode.has_value()) { - out << std::endl - << fmt("%1% %|2$5d|| %3%", - prefix, - (errPos.line - 1), - *loc.prevLineOfCode); + out << std::endl << fmt("%1% %|2$5d|| %3%", prefix, (errPos.line - 1), *loc.prevLineOfCode); } if (loc.errLineOfCode.has_value()) { // line of code containing the error. - out << std::endl - << fmt("%1% %|2$5d|| %3%", - prefix, - (errPos.line), - *loc.errLineOfCode); + out << std::endl << fmt("%1% %|2$5d|| %3%", prefix, (errPos.line), *loc.errLineOfCode); // error arrows for the column range. if (errPos.column > 0) { int start = errPos.column; @@ -97,21 +87,13 @@ void printCodeLines(std::ostream & out, std::string arrows("^"); - out << std::endl - << fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL, - prefix, - spaces, - arrows); + out << std::endl << fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL, prefix, spaces, arrows); } } // next line of code. if (loc.nextLineOfCode.has_value()) { - out << std::endl - << fmt("%1% %|2$5d|| %3%", - prefix, - (errPos.line + 1), - *loc.nextLineOfCode); + out << std::endl << fmt("%1% %|2$5d|| %3%", prefix, (errPos.line + 1), *loc.nextLineOfCode); } } @@ -122,10 +104,12 @@ static std::string indent(std::string_view indentFirst, std::string_view indentR while (!s.empty()) { auto end = s.find('\n'); - if (!first) res += "\n"; + if (!first) + res += "\n"; res += chomp(std::string(first ? indentFirst : indentRest) + std::string(s.substr(0, end))); first = false; - if (end == s.npos) break; + if (end == s.npos) + break; s = s.substr(end + 1); } @@ -146,7 +130,8 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h * * @return true if a position was printed. */ -static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { +static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) +{ bool hasPos = pos && *pos; if (hasPos) { oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; @@ -161,11 +146,7 @@ static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std return hasPos; } -static void printTrace( - std::ostream & output, - const std::string_view & indent, - size_t & count, - const Trace & trace) +static void printTrace(std::ostream & output, const std::string_view & indent, size_t & count, const Trace & trace) { output << "\n" << "… " << trace.hint.str() << "\n"; @@ -188,7 +169,8 @@ void printSkippedTracesMaybe( printTrace(output, indent, count, trace); } } else { - output << "\n" << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n"; + output << "\n" + << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n"; // Clear the set of "seen" traces after printing a chunk of // `duplicate frames omitted`. // @@ -228,43 +210,43 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s { std::string prefix; switch (einfo.level) { - case Verbosity::lvlError: { - prefix = ANSI_RED "error"; - break; - } - case Verbosity::lvlNotice: { - prefix = ANSI_RED "note"; - break; - } - case Verbosity::lvlWarn: { - if (einfo.isFromExpr) - prefix = ANSI_WARNING "evaluation warning"; - else - prefix = ANSI_WARNING "warning"; - break; - } - case Verbosity::lvlInfo: { - prefix = ANSI_GREEN "info"; - break; - } - case Verbosity::lvlTalkative: { - prefix = ANSI_GREEN "talk"; - break; - } - case Verbosity::lvlChatty: { - prefix = ANSI_GREEN "chat"; - break; - } - case Verbosity::lvlVomit: { - prefix = ANSI_GREEN "vomit"; - break; - } - case Verbosity::lvlDebug: { - prefix = ANSI_WARNING "debug"; - break; - } - default: - assert(false); + case Verbosity::lvlError: { + prefix = ANSI_RED "error"; + break; + } + case Verbosity::lvlNotice: { + prefix = ANSI_RED "note"; + break; + } + case Verbosity::lvlWarn: { + if (einfo.isFromExpr) + prefix = ANSI_WARNING "evaluation warning"; + else + prefix = ANSI_WARNING "warning"; + break; + } + case Verbosity::lvlInfo: { + prefix = ANSI_GREEN "info"; + break; + } + case Verbosity::lvlTalkative: { + prefix = ANSI_GREEN "talk"; + break; + } + case Verbosity::lvlChatty: { + prefix = ANSI_GREEN "chat"; + break; + } + case Verbosity::lvlVomit: { + prefix = ANSI_GREEN "vomit"; + break; + } + case Verbosity::lvlDebug: { + prefix = ANSI_WARNING "debug"; + break; + } + default: + assert(false); } // FIXME: show the program name as part of the trace? @@ -383,7 +365,8 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s bool truncate = false; for (const auto & trace : einfo.traces) { - if (trace.hint.str().empty()) continue; + if (trace.hint.str().empty()) + continue; if (!showTrace && count > 3) { truncate = true; @@ -406,11 +389,13 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s } } - printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); if (truncate) { - oss << "\n" << ANSI_WARNING "(stack trace truncated; use '--show-trace' to show the full, detailed trace)" ANSI_NORMAL << "\n"; + oss << "\n" + << ANSI_WARNING + "(stack trace truncated; use '--show-trace' to show the full, detailed trace)" ANSI_NORMAL + << "\n"; } oss << "\n" << prefix; @@ -422,9 +407,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s auto suggestions = einfo.suggestions.trim(); if (!suggestions.suggestions.empty()) { - oss << "Did you mean " << - suggestions.trim() << - "?" << std::endl; + oss << "Did you mean " << suggestions.trim() << "?" << std::endl; } out << indent(prefix, std::string(filterANSIEscapes(prefix, true).size(), ' '), chomp(oss.str())); @@ -440,7 +423,8 @@ static void writeErr(std::string_view buf) while (!buf.empty()) { auto n = write(STDERR_FILENO, buf.data(), buf.size()); if (n < 0) { - if (errno == EINTR) continue; + if (errno == EINTR) + continue; abort(); } buf = buf.substr(n); @@ -449,7 +433,7 @@ static void writeErr(std::string_view buf) void panic(std::string_view msg) { - writeErr("\n\n" ANSI_RED "terminating due to unexpected unrecoverable internal error: " ANSI_NORMAL ); + writeErr("\n\n" ANSI_RED "terminating due to unexpected unrecoverable internal error: " ANSI_NORMAL); writeErr(msg); writeErr("\n"); abort(); @@ -464,4 +448,4 @@ void panic(const char * file, int line, const char * func) panic(std::string_view(buf, std::min(static_cast(sizeof(buf)), n))); } -} +} // namespace nix diff --git a/src/libutil/exit.cc b/src/libutil/exit.cc index 3c59e46af..313368ce4 100644 --- a/src/libutil/exit.cc +++ b/src/libutil/exit.cc @@ -4,4 +4,4 @@ namespace nix { Exit::~Exit() {} -} +} // namespace nix diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 88f3783f5..60d6bf74d 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -317,7 +317,7 @@ constexpr std::array xpFeatureDetails static_assert( []() constexpr { for (auto [index, feature] : enumerate(xpFeatureDetails)) - if (index != (size_t)feature.tag) + if (index != (size_t) feature.tag) return false; return true; }(), @@ -342,8 +342,8 @@ const std::optional parseExperimentalFeature(const std::str std::string_view showExperimentalFeature(const ExperimentalFeature tag) { - assert((size_t)tag < xpFeatureDetails.size()); - return xpFeatureDetails[(size_t)tag].name; + assert((size_t) tag < xpFeatureDetails.size()); + return xpFeatureDetails[(size_t) tag].name; } nlohmann::json documentExperimentalFeatures() @@ -352,7 +352,8 @@ nlohmann::json documentExperimentalFeatures() for (auto & xpFeature : xpFeatureDetails) { std::stringstream docOss; docOss << stripIndentation(xpFeature.description); - docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl); + docOss << fmt( + "\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl); res[std::string{xpFeature.name}] = trim(docOss.str()); } return (nlohmann::json) res; @@ -368,11 +369,14 @@ std::set parseFeatures(const StringSet & rawFeatures) } MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature) - : Error("experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", showExperimentalFeature(feature)) + : Error( + "experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", + showExperimentalFeature(feature)) , missingFeature(feature) -{} +{ +} -std::ostream & operator <<(std::ostream & str, const ExperimentalFeature & feature) +std::ostream & operator<<(std::ostream & str, const ExperimentalFeature & feature) { return str << showExperimentalFeature(feature); } @@ -393,4 +397,4 @@ void from_json(const nlohmann::json & j, ExperimentalFeature & feature) throw Error("Unknown experimental feature '%s' in JSON input", input); } -} +} // namespace nix diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc index d95781691..be381abfd 100644 --- a/src/libutil/file-content-address.cc +++ b/src/libutil/file-content-address.cc @@ -25,7 +25,6 @@ FileSerialisationMethod parseFileSerialisationMethod(std::string_view input) throw UsageError("Unknown file serialiation method '%s', expect `flat` or `nar`", input); } - FileIngestionMethod parseFileIngestionMethod(std::string_view input) { if (input == "git") { @@ -39,7 +38,6 @@ FileIngestionMethod parseFileIngestionMethod(std::string_view input) } } - std::string_view renderFileSerialisationMethod(FileSerialisationMethod method) { switch (method) { @@ -52,14 +50,12 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method) } } - std::string_view renderFileIngestionMethod(FileIngestionMethod method) { switch (method) { case FileIngestionMethod::Flat: case FileIngestionMethod::NixArchive: - return renderFileSerialisationMethod( - static_cast(method)); + return renderFileSerialisationMethod(static_cast(method)); case FileIngestionMethod::Git: return "git"; default: @@ -67,12 +63,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method) } } - -void dumpPath( - const SourcePath & path, - Sink & sink, - FileSerialisationMethod method, - PathFilter & filter) +void dumpPath(const SourcePath & path, Sink & sink, FileSerialisationMethod method, PathFilter & filter) { switch (method) { case FileSerialisationMethod::Flat: @@ -84,12 +75,7 @@ void dumpPath( } } - -void restorePath( - const Path & path, - Source & source, - FileSerialisationMethod method, - bool startFsync) +void restorePath(const Path & path, Source & source, FileSerialisationMethod method, bool startFsync) { switch (method) { case FileSerialisationMethod::Flat: @@ -101,22 +87,15 @@ void restorePath( } } - -HashResult hashPath( - const SourcePath & path, - FileSerialisationMethod method, HashAlgorithm ha, - PathFilter & filter) +HashResult hashPath(const SourcePath & path, FileSerialisationMethod method, HashAlgorithm ha, PathFilter & filter) { - HashSink sink { ha }; + HashSink sink{ha}; dumpPath(path, sink, method, filter); return sink.finish(); } - -std::pair> hashPath( - const SourcePath & path, - FileIngestionMethod method, HashAlgorithm ht, - PathFilter & filter) +std::pair> +hashPath(const SourcePath & path, FileIngestionMethod method, HashAlgorithm ht, PathFilter & filter) { switch (method) { case FileIngestionMethod::Flat: @@ -130,4 +109,4 @@ std::pair> hashPath( assert(false); } -} +} // namespace nix diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 9e0827442..6e07e6e88 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -4,9 +4,9 @@ #include #include #ifdef _WIN32 -# include -# include -# include "nix/util/windows-error.hh" +# include +# include +# include "nix/util/windows-error.hh" #endif namespace nix { @@ -17,7 +17,6 @@ void writeLine(Descriptor fd, std::string s) writeFull(fd, s); } - std::string drainFD(Descriptor fd, bool block, const size_t reserveSize) { // the parser needs two extra bytes to append terminating characters, other users will @@ -33,24 +32,27 @@ std::string drainFD(Descriptor fd, bool block, const size_t reserveSize) return std::move(sink.s); } - ////////////////////////////////////////////////////////////////////// +AutoCloseFD::AutoCloseFD() + : fd{INVALID_DESCRIPTOR} +{ +} -AutoCloseFD::AutoCloseFD() : fd{INVALID_DESCRIPTOR} {} - - -AutoCloseFD::AutoCloseFD(Descriptor fd) : fd{fd} {} +AutoCloseFD::AutoCloseFD(Descriptor fd) + : fd{fd} +{ +} // NOTE: This can be noexcept since we are just copying a value and resetting // the file descriptor in the rhs. -AutoCloseFD::AutoCloseFD(AutoCloseFD && that) noexcept : fd{that.fd} +AutoCloseFD::AutoCloseFD(AutoCloseFD && that) noexcept + : fd{that.fd} { that.fd = INVALID_DESCRIPTOR; } - -AutoCloseFD & AutoCloseFD::operator =(AutoCloseFD && that) +AutoCloseFD & AutoCloseFD::operator=(AutoCloseFD && that) { close(); fd = that.fd; @@ -58,7 +60,6 @@ AutoCloseFD & AutoCloseFD::operator =(AutoCloseFD && that) return *this; } - AutoCloseFD::~AutoCloseFD() { try { @@ -68,23 +69,21 @@ AutoCloseFD::~AutoCloseFD() } } - Descriptor AutoCloseFD::get() const { return fd; } - void AutoCloseFD::close() { if (fd != INVALID_DESCRIPTOR) { - if( + if ( #ifdef _WIN32 - ::CloseHandle(fd) + ::CloseHandle(fd) #else - ::close(fd) + ::close(fd) #endif - == -1) + == -1) /* This should never happen. */ throw NativeSysError("closing file descriptor %1%", fd); fd = INVALID_DESCRIPTOR; @@ -109,25 +108,21 @@ void AutoCloseFD::fsync() const } } - - void AutoCloseFD::startFsync() const { #ifdef __linux__ - if (fd != -1) { - /* Ignore failure, since fsync must be run later anyway. This is just a performance optimization. */ - ::sync_file_range(fd, 0, 0, SYNC_FILE_RANGE_WRITE); - } + if (fd != -1) { + /* Ignore failure, since fsync must be run later anyway. This is just a performance optimization. */ + ::sync_file_range(fd, 0, 0, SYNC_FILE_RANGE_WRITE); + } #endif } - AutoCloseFD::operator bool() const { return fd != INVALID_DESCRIPTOR; } - Descriptor AutoCloseFD::release() { Descriptor oldFD = fd; @@ -135,14 +130,12 @@ Descriptor AutoCloseFD::release() return oldFD; } - ////////////////////////////////////////////////////////////////////// - void Pipe::close() { readSide.close(); writeSide.close(); } -} +} // namespace nix diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 79e6cf354..fba92dc8e 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -24,28 +24,30 @@ #include #ifdef __FreeBSD__ -# include -# include +# include +# include #endif #ifdef _WIN32 -# include +# include #endif namespace nix { -DirectoryIterator::DirectoryIterator(const std::filesystem::path& p) { +DirectoryIterator::DirectoryIterator(const std::filesystem::path & p) +{ try { // **Attempt to create the underlying directory_iterator** it_ = std::filesystem::directory_iterator(p); - } catch (const std::filesystem::filesystem_error& e) { + } catch (const std::filesystem::filesystem_error & e) { // **Catch filesystem_error and throw SysError** // Adapt the error message as needed for SysError throw SysError("cannot read directory %s", p); } } -DirectoryIterator& DirectoryIterator::operator++() { +DirectoryIterator & DirectoryIterator::operator++() +{ // **Attempt to increment the underlying iterator** std::error_code ec; it_.increment(ec); @@ -64,10 +66,9 @@ DirectoryIterator& DirectoryIterator::operator++() { bool isAbsolute(PathView path) { - return std::filesystem::path { path }.is_absolute(); + return std::filesystem::path{path}.is_absolute(); } - Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { std::string scratch; @@ -82,7 +83,7 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) #ifdef __GNU__ /* GNU (aka. GNU/Hurd) doesn't have any limitation on path lengths and doesn't define `PATH_MAX'. */ - char *buf = getcwd(NULL, 0); + char * buf = getcwd(NULL, 0); if (buf == NULL) #else char buf[PATH_MAX]; @@ -113,7 +114,7 @@ Path canonPath(PathView path, bool resolveSymlinks) throw Error("not an absolute path: '%1%'", path); // For Windows - auto rootName = std::filesystem::path { path }.root_name(); + auto rootName = std::filesystem::path{path}.root_name(); /* This just exists because we cannot set the target of `remaining` (the callback parameter) directly to a newly-constructed string, @@ -125,9 +126,7 @@ Path canonPath(PathView path, bool resolveSymlinks) unsigned int followCount = 0, maxFollow = 1024; auto ret = canonPathInner>( - path, - [&followCount, &temp, maxFollow, resolveSymlinks] - (std::string & result, std::string_view & remaining) { + path, [&followCount, &temp, maxFollow, resolveSymlinks](std::string & result, std::string_view & remaining) { if (resolveSymlinks && std::filesystem::is_symlink(result)) { if (++followCount >= maxFollow) throw Error("infinite symlink recursion in path '%1%'", remaining); @@ -151,7 +150,6 @@ Path canonPath(PathView path, bool resolveSymlinks) return ret; } - Path dirOf(const PathView path) { Path::size_type pos = OsPathTrait::rfindPathSep(path); @@ -160,7 +158,6 @@ Path dirOf(const PathView path) return std::filesystem::path{path}.parent_path().string(); } - std::string_view baseNameOf(std::string_view path) { if (path.empty()) @@ -179,7 +176,6 @@ std::string_view baseNameOf(std::string_view path) return path.substr(pos, last - pos + 1); } - bool isInDir(const std::filesystem::path & path, const std::filesystem::path & dir) { /* Note that while the standard doesn't guarantee this, the @@ -190,13 +186,11 @@ bool isInDir(const std::filesystem::path & path, const std::filesystem::path & d return !rel.empty() && rel.native()[0] != OS_STR('.'); } - bool isDirOrInDir(const std::filesystem::path & path, const std::filesystem::path & dir) { return path == dir || isInDir(path, dir); } - struct stat stat(const Path & path) { struct stat st; @@ -206,9 +200,9 @@ struct stat stat(const Path & path) } #ifdef _WIN32 -# define STAT stat +# define STAT stat #else -# define STAT lstat +# define STAT lstat #endif struct stat lstat(const Path & path) @@ -219,12 +213,10 @@ struct stat lstat(const Path & path) return st; } - std::optional maybeLstat(const Path & path) { std::optional st{std::in_place}; - if (STAT(path.c_str(), &*st)) - { + if (STAT(path.c_str(), &*st)) { if (errno == ENOENT || errno == ENOTDIR) st.reset(); else @@ -233,7 +225,6 @@ std::optional maybeLstat(const Path & path) return st; } - bool pathExists(const std::filesystem::path & path) { return maybeLstat(path.string()).has_value(); @@ -245,27 +236,28 @@ bool pathAccessible(const std::filesystem::path & path) return pathExists(path.string()); } catch (SysError & e) { // swallow EPERM - if (e.errNo == EPERM) return false; + if (e.errNo == EPERM) + return false; throw; } } - Path readLink(const Path & path) { checkInterrupt(); return std::filesystem::read_symlink(path).string(); } - std::string readFile(const Path & path) { - AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY + AutoCloseFD fd = toDescriptor(open( + path.c_str(), + O_RDONLY // TODO #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - )); + )); if (!fd) throw SysError("opening file '%1%'", path); return readFile(fd.get()); @@ -273,7 +265,7 @@ std::string readFile(const Path & path) std::string readFile(const std::filesystem::path & path) { - return readFile(os_string_to_string(PathViewNG { path })); + return readFile(os_string_to_string(PathViewNG{path})); } void readFile(const Path & path, Sink & sink, bool memory_map) @@ -292,26 +284,30 @@ void readFile(const Path & path, Sink & sink, bool memory_map) } // Stream the file instead if memory-mapping fails or is disabled. - AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY + AutoCloseFD fd = toDescriptor(open( + path.c_str(), + O_RDONLY // TODO #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - )); + )); if (!fd) throw SysError("opening file '%s'", path); drainFD(fd.get(), sink); } - void writeFile(const Path & path, std::string_view s, mode_t mode, FsSync sync) { - AutoCloseFD fd = toDescriptor(open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT + AutoCloseFD fd = toDescriptor(open( + path.c_str(), + O_WRONLY | O_TRUNC | O_CREAT // TODO #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - , mode)); + , + mode)); if (!fd) throw SysError("opening file '%1%'", path); @@ -338,12 +334,15 @@ void writeFile(AutoCloseFD & fd, const Path & origPath, std::string_view s, mode void writeFile(const Path & path, Source & source, mode_t mode, FsSync sync) { - AutoCloseFD fd = toDescriptor(open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT + AutoCloseFD fd = toDescriptor(open( + path.c_str(), + O_WRONLY | O_TRUNC | O_CREAT // TODO #ifndef _WIN32 - | O_CLOEXEC + | O_CLOEXEC #endif - , mode)); + , + mode)); if (!fd) throw SysError("opening file '%1%'", path); @@ -354,7 +353,9 @@ void writeFile(const Path & path, Source & source, mode_t mode, FsSync sync) try { auto n = source.read(buf.data(), buf.size()); writeFull(fd.get(), {buf.data(), n}); - } catch (EndOfFile &) { break; } + } catch (EndOfFile &) { + break; + } } } catch (Error & e) { e.addTrace({}, "writing file '%1%'", path); @@ -377,11 +378,11 @@ void syncParent(const Path & path) } #ifdef __FreeBSD__ -#define MOUNTEDPATHS_PARAM , std::set &mountedPaths -#define MOUNTEDPATHS_ARG , mountedPaths +# define MOUNTEDPATHS_PARAM , std::set & mountedPaths +# define MOUNTEDPATHS_ARG , mountedPaths #else -#define MOUNTEDPATHS_PARAM -#define MOUNTEDPATHS_ARG +# define MOUNTEDPATHS_PARAM +# define MOUNTEDPATHS_ARG #endif void recursiveSync(const Path & path) @@ -428,27 +429,30 @@ void recursiveSync(const Path & path) } } - -static void _deletePath(Descriptor parentfd, const std::filesystem::path & path, uint64_t & bytesFreed, std::exception_ptr & ex MOUNTEDPATHS_PARAM) +static void _deletePath( + Descriptor parentfd, + const std::filesystem::path & path, + uint64_t & bytesFreed, + std::exception_ptr & ex MOUNTEDPATHS_PARAM) { #ifndef _WIN32 checkInterrupt(); -#ifdef __FreeBSD__ +# ifdef __FreeBSD__ // In case of emergency (unmount fails for some reason) not recurse into mountpoints. // This prevents us from tearing up the nullfs-mounted nix store. if (mountedPaths.find(path) != mountedPaths.end()) { return; } -#endif +# endif std::string name(path.filename()); assert(name != "." && name != ".." && !name.empty()); struct stat st; - if (fstatat(parentfd, name.c_str(), &st, - AT_SYMLINK_NOFOLLOW) == -1) { - if (errno == ENOENT) return; + if (fstatat(parentfd, name.c_str(), &st, AT_SYMLINK_NOFOLLOW) == -1) { + if (errno == ENOENT) + return; throw SysError("getting status of %1%", path); } @@ -456,23 +460,23 @@ static void _deletePath(Descriptor parentfd, const std::filesystem::path & path, /* We are about to delete a file. Will it likely free space? */ switch (st.st_nlink) { - /* Yes: last link. */ - case 1: - bytesFreed += st.st_size; - break; - /* Maybe: yes, if 'auto-optimise-store' or manual optimisation - was performed. Instead of checking for real let's assume - it's an optimised file and space will be freed. + /* Yes: last link. */ + case 1: + bytesFreed += st.st_size; + break; + /* Maybe: yes, if 'auto-optimise-store' or manual optimisation + was performed. Instead of checking for real let's assume + it's an optimised file and space will be freed. - In worst case we will double count on freed space for files - with exactly two hardlinks for unoptimised packages. - */ - case 2: - bytesFreed += st.st_size; - break; - /* No: 3+ links. */ - default: - break; + In worst case we will double count on freed space for files + with exactly two hardlinks for unoptimised packages. + */ + case 2: + bytesFreed += st.st_size; + break; + /* No: 3+ links. */ + default: + break; } } @@ -495,15 +499,18 @@ static void _deletePath(Descriptor parentfd, const std::filesystem::path & path, while (errno = 0, dirent = readdir(dir.get())) { /* sic */ checkInterrupt(); std::string childName = dirent->d_name; - if (childName == "." || childName == "..") continue; + if (childName == "." || childName == "..") + continue; _deletePath(dirfd(dir.get()), path / childName, bytesFreed, ex MOUNTEDPATHS_ARG); } - if (errno) throw SysError("reading directory %1%", path); + if (errno) + throw SysError("reading directory %1%", path); } int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0; if (unlinkat(parentfd, name.c_str(), flags) == -1) { - if (errno == ENOENT) return; + if (errno == ENOENT) + return; try { throw SysError("cannot unlink %1%", path); } catch (...) { @@ -526,7 +533,8 @@ static void _deletePath(const std::filesystem::path & path, uint64_t & bytesFree AutoCloseFD dirfd = toDescriptor(open(path.parent_path().string().c_str(), O_RDONLY)); if (!dirfd) { - if (errno == ENOENT) return; + if (errno == ENOENT) + return; throw SysError("opening directory %s", path.parent_path()); } @@ -538,7 +546,6 @@ static void _deletePath(const std::filesystem::path & path, uint64_t & bytesFree std::rethrow_exception(ex); } - void deletePath(const std::filesystem::path & path) { uint64_t dummy; @@ -547,30 +554,32 @@ void deletePath(const std::filesystem::path & path) void createDir(const Path & path, mode_t mode) { - if (mkdir(path.c_str() + if (mkdir( + path.c_str() #ifndef _WIN32 - , mode + , + mode #endif - ) == -1) + ) + == -1) throw SysError("creating directory '%1%'", path); } void createDirs(const std::filesystem::path & path) { try { - std::filesystem::create_directories(path); + std::filesystem::create_directories(path); } catch (std::filesystem::filesystem_error & e) { throw SysError("creating directory '%1%'", path.string()); } } - void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed) { - //Activity act(*logger, lvlDebug, "recursively deleting path '%1%'", path); + // Activity act(*logger, lvlDebug, "recursively deleting path '%1%'", path); #ifdef __FreeBSD__ std::set mountedPaths; - struct statfs *mntbuf; + struct statfs * mntbuf; int count; if ((count = getmntinfo(&mntbuf, MNT_WAIT)) < 0) { throw SysError("getmntinfo"); @@ -584,12 +593,15 @@ void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed) _deletePath(path, bytesFreed MOUNTEDPATHS_ARG); } - ////////////////////////////////////////////////////////////////////// -AutoDelete::AutoDelete() : del{false} {} +AutoDelete::AutoDelete() + : del{false} +{ +} -AutoDelete::AutoDelete(const std::filesystem::path & p, bool recursive) : _path(p) +AutoDelete::AutoDelete(const std::filesystem::path & p, bool recursive) + : _path(p) { del = true; this->recursive = recursive; @@ -615,7 +627,8 @@ void AutoDelete::cancel() del = false; } -void AutoDelete::reset(const std::filesystem::path & p, bool recursive) { +void AutoDelete::reset(const std::filesystem::path & p, bool recursive) +{ _path = p; this->recursive = recursive; del = true; @@ -624,9 +637,16 @@ void AutoDelete::reset(const std::filesystem::path & p, bool recursive) { ////////////////////////////////////////////////////////////////////// #ifdef __FreeBSD__ -AutoUnmount::AutoUnmount() : del{false} {} +AutoUnmount::AutoUnmount() + : del{false} +{ +} -AutoUnmount::AutoUnmount(Path &p) : path(p), del(true) {} +AutoUnmount::AutoUnmount(Path & p) + : path(p) + , del(true) +{ +} AutoUnmount::~AutoUnmount() { @@ -649,7 +669,8 @@ void AutoUnmount::cancel() ////////////////////////////////////////////////////////////////////// -std::string defaultTempDir() { +std::string defaultTempDir() +{ return getEnvNonEmpty("TMPDIR").value_or("/tmp"); } @@ -658,11 +679,14 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix, mode_t mode) while (1) { checkInterrupt(); Path tmpDir = makeTempPath(tmpRoot, prefix); - if (mkdir(tmpDir.c_str() + if (mkdir( + tmpDir.c_str() #ifndef _WIN32 // TODO abstract mkdir perms for Windows - , mode + , + mode #endif - ) == 0) { + ) + == 0) { #ifdef __FreeBSD__ /* Explicitly set the group of the directory. This is to work around around problems caused by BSD's group @@ -682,7 +706,6 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix, mode_t mode) } } - std::pair createTempFile(const Path & prefix) { Path tmpl(defaultTempDir() + "/" + prefix + ".XXXXXX"); @@ -717,24 +740,25 @@ void createSymlink(const Path & target, const Path & link) void replaceSymlink(const std::filesystem::path & target, const std::filesystem::path & link) { for (unsigned int n = 0; true; n++) { - auto tmp = link.parent_path() /std::filesystem::path{fmt(".%d_%s", n, link.filename().string())}; + auto tmp = link.parent_path() / std::filesystem::path{fmt(".%d_%s", n, link.filename().string())}; tmp = tmp.lexically_normal(); try { std::filesystem::create_symlink(target, tmp); } catch (std::filesystem::filesystem_error & e) { - if (e.code() == std::errc::file_exists) continue; + if (e.code() == std::errc::file_exists) + continue; throw SysError("creating symlink %1% -> %2%", tmp, target); } try { std::filesystem::rename(tmp, link); } catch (std::filesystem::filesystem_error & e) { - if (e.code() == std::errc::file_exists) continue; + if (e.code() == std::errc::file_exists) + continue; throw SysError("renaming %1% to %2%", tmp, link); } - break; } } @@ -746,15 +770,19 @@ void setWriteTime(const std::filesystem::path & path, const struct stat & st) void copyFile(const std::filesystem::path & from, const std::filesystem::path & to, bool andDelete) { - auto fromStatus =std::filesystem::symlink_status(from); + auto fromStatus = std::filesystem::symlink_status(from); // Mark the directory as writable so that we can delete its children - if (andDelete &&std::filesystem::is_directory(fromStatus)) { - std::filesystem::permissions(from, std::filesystem::perms::owner_write, std::filesystem::perm_options::add | std::filesystem::perm_options::nofollow); + if (andDelete && std::filesystem::is_directory(fromStatus)) { + std::filesystem::permissions( + from, + std::filesystem::perms::owner_write, + std::filesystem::perm_options::add | std::filesystem::perm_options::nofollow); } - if (std::filesystem::is_symlink(fromStatus) ||std::filesystem::is_regular_file(fromStatus)) { - std::filesystem::copy(from, to, std::filesystem::copy_options::copy_symlinks | std::filesystem::copy_options::overwrite_existing); + if (std::filesystem::is_symlink(fromStatus) || std::filesystem::is_regular_file(fromStatus)) { + std::filesystem::copy( + from, to, std::filesystem::copy_options::copy_symlinks | std::filesystem::copy_options::overwrite_existing); } else if (std::filesystem::is_directory(fromStatus)) { std::filesystem::create_directory(to); for (auto & entry : DirectoryIterator(from)) { @@ -767,7 +795,10 @@ void copyFile(const std::filesystem::path & from, const std::filesystem::path & setWriteTime(to, lstat(from.string().c_str())); if (andDelete) { if (!std::filesystem::is_symlink(fromStatus)) - std::filesystem::permissions(from, std::filesystem::perms::owner_write, std::filesystem::perm_options::add | std::filesystem::perm_options::nofollow); + std::filesystem::permissions( + from, + std::filesystem::perms::owner_write, + std::filesystem::perm_options::add | std::filesystem::perm_options::nofollow); std::filesystem::remove(from); } } @@ -781,9 +812,8 @@ void moveFile(const Path & oldName, const Path & newName) auto newPath = std::filesystem::path(newName); // For the move to be as atomic as possible, copy to a temporary // directory - std::filesystem::path temp = createTempDir( - os_string_to_string(PathViewNG { newPath.parent_path() }), - "rename-tmp"); + std::filesystem::path temp = + createTempDir(os_string_to_string(PathViewNG{newPath.parent_path()}), "rename-tmp"); Finally removeTemp = [&]() { std::filesystem::remove(temp); }; auto tempCopyTarget = temp / "copy-target"; if (e.code().value() == EXDEV) { @@ -791,31 +821,34 @@ void moveFile(const Path & oldName, const Path & newName) warn("can’t rename %s as %s, copying instead", oldName, newName); copyFile(oldPath, tempCopyTarget, true); std::filesystem::rename( - os_string_to_string(PathViewNG { tempCopyTarget }), - os_string_to_string(PathViewNG { newPath })); + os_string_to_string(PathViewNG{tempCopyTarget}), os_string_to_string(PathViewNG{newPath})); } } } ////////////////////////////////////////////////////////////////////// -bool isExecutableFileAmbient(const std::filesystem::path & exe) { +bool isExecutableFileAmbient(const std::filesystem::path & exe) +{ // Check file type, because directory being executable means // something completely different. // `is_regular_file` follows symlinks before checking. return std::filesystem::is_regular_file(exe) - && access(exe.string().c_str(), + && access( + exe.string().c_str(), #ifdef WIN32 - 0 // TODO do better + 0 // TODO do better #else - X_OK + X_OK #endif - ) == 0; + ) + == 0; } std::filesystem::path makeParentCanonical(const std::filesystem::path & rawPath) { - std::filesystem::path path(absPath(rawPath));; + std::filesystem::path path(absPath(rawPath)); + ; try { auto parent = path.parent_path(); if (parent == path) { diff --git a/src/libutil/freebsd/freebsd-jail.cc b/src/libutil/freebsd/freebsd-jail.cc index 575f9287e..90fbe0cd6 100644 --- a/src/libutil/freebsd/freebsd-jail.cc +++ b/src/libutil/freebsd/freebsd-jail.cc @@ -48,5 +48,5 @@ void AutoRemoveJail::reset(int j) ////////////////////////////////////////////////////////////////////// -} +} // namespace nix #endif diff --git a/src/libutil/freebsd/include/nix/util/freebsd-jail.hh b/src/libutil/freebsd/include/nix/util/freebsd-jail.hh index cb5abc511..33a86a398 100644 --- a/src/libutil/freebsd/include/nix/util/freebsd-jail.hh +++ b/src/libutil/freebsd/include/nix/util/freebsd-jail.hh @@ -17,4 +17,4 @@ public: void reset(int j); }; -} +} // namespace nix diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index 7b8fc3b2a..6efd5e0c7 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -5,47 +5,38 @@ #include "nix/util/fs-sink.hh" #ifdef _WIN32 -# include -# include "nix/util/file-path.hh" -# include "nix/util/windows-error.hh" +# include +# include "nix/util/file-path.hh" +# include "nix/util/windows-error.hh" #endif #include "util-config-private.hh" namespace nix { -void copyRecursive( - SourceAccessor & accessor, const CanonPath & from, - FileSystemObjectSink & sink, const CanonPath & to) +void copyRecursive(SourceAccessor & accessor, const CanonPath & from, FileSystemObjectSink & sink, const CanonPath & to) { auto stat = accessor.lstat(from); switch (stat.type) { - case SourceAccessor::tSymlink: - { + case SourceAccessor::tSymlink: { sink.createSymlink(to, accessor.readLink(from)); break; } - case SourceAccessor::tRegular: - { + case SourceAccessor::tRegular: { sink.createRegularFile(to, [&](CreateRegularFileSink & crf) { if (stat.isExecutable) crf.isExecutable(); - accessor.readFile(from, crf, [&](uint64_t size) { - crf.preallocateContents(size); - }); + accessor.readFile(from, crf, [&](uint64_t size) { crf.preallocateContents(size); }); }); break; } - case SourceAccessor::tDirectory: - { + case SourceAccessor::tDirectory: { sink.createDirectory(to); for (auto & [name, _] : accessor.readDirectory(from)) { - copyRecursive( - accessor, from / name, - sink, to / name); + copyRecursive(accessor, from / name, sink, to / name); break; } break; @@ -61,11 +52,10 @@ void copyRecursive( } } - struct RestoreSinkSettings : Config { - Setting preallocateContents{this, false, "preallocate-contents", - "Whether to preallocate files when writing objects with known size."}; + Setting preallocateContents{ + this, false, "preallocate-contents", "Whether to preallocate files when writing objects with known size."}; }; static RestoreSinkSettings restoreSinkSettings; @@ -87,7 +77,8 @@ void RestoreSink::createDirectory(const CanonPath & path) throw Error("path '%s' already exists", p.string()); }; -struct RestoreRegularFile : CreateRegularFileSink { +struct RestoreRegularFile : CreateRegularFileSink +{ AutoCloseFD fd; bool startFsync = false; @@ -101,7 +92,7 @@ struct RestoreRegularFile : CreateRegularFileSink { fd.startFsync(); } - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; void isExecutable() override; void preallocateContents(uint64_t size) override; }; @@ -114,12 +105,20 @@ void RestoreSink::createRegularFile(const CanonPath & path, std::function func) { - struct CRF : CreateRegularFileSink { + struct CRF : CreateRegularFileSink + { RegularFileSink & back; - CRF(RegularFileSink & back) : back(back) {} - void operator () (std::string_view data) override + + CRF(RegularFileSink & back) + : back(back) + { + } + + void operator()(std::string_view data) override { back.sink(data); } + void isExecutable() override {} - } crf { *this }; + } crf{*this}; + func(crf); } - -void NullFileSystemObjectSink::createRegularFile(const CanonPath & path, std::function func) +void NullFileSystemObjectSink::createRegularFile( + const CanonPath & path, std::function func) { - struct : CreateRegularFileSink { - void operator () (std::string_view data) override {} + struct : CreateRegularFileSink + { + void operator()(std::string_view data) override {} + void isExecutable() override {} } crf; + // Even though `NullFileSystemObjectSink` doesn't do anything, it's important // that we call the function, to e.g. advance the parser using this // sink. func(crf); } -} +} // namespace nix diff --git a/src/libutil/git.cc b/src/libutil/git.cc index edeef71b7..e87d5550b 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -17,32 +17,31 @@ namespace nix::git { using namespace nix; using namespace std::string_literals; -std::optional decodeMode(RawMode m) { +std::optional decodeMode(RawMode m) +{ switch (m) { - case (RawMode) Mode::Directory: - case (RawMode) Mode::Executable: - case (RawMode) Mode::Regular: - case (RawMode) Mode::Symlink: - return (Mode) m; - default: - return std::nullopt; + case (RawMode) Mode::Directory: + case (RawMode) Mode::Executable: + case (RawMode) Mode::Regular: + case (RawMode) Mode::Symlink: + return (Mode) m; + default: + return std::nullopt; } } - static std::string getStringUntil(Source & source, char byte) { std::string s; - char n[1] = { 0 }; - source(std::string_view { n, 1 }); + char n[1] = {0}; + source(std::string_view{n, 1}); while (*n != byte) { s += *n; - source(std::string_view { n, 1 }); + source(std::string_view{n, 1}); } return s; } - static std::string getString(Source & source, int n) { std::string v; @@ -75,7 +74,7 @@ void parseBlob( while (left) { checkInterrupt(); - buf.resize(std::min((unsigned long long)buf.capacity(), left)); + buf.resize(std::min((unsigned long long) buf.capacity(), left)); source(buf); crf(buf); left -= buf.size(); @@ -93,16 +92,13 @@ void parseBlob( doRegularFile(true); break; - case BlobMode::Symlink: - { + case BlobMode::Symlink: { std::string target; target.resize(size, '0'); target.reserve(size); for (size_t n = 0; n < target.size();) { checkInterrupt(); - n += source.read( - const_cast(target.c_str()) + n, - target.size() - n); + n += source.read(const_cast(target.c_str()) + n, target.size() - n); } sink.createSymlink(sinkPath, target); @@ -147,16 +143,16 @@ void parseTree( Hash hash(HashAlgorithm::SHA1); std::copy(hashs.begin(), hashs.end(), hash.hash); - hook(CanonPath{name}, TreeEntry { - .mode = mode, - .hash = hash, - }); + hook( + CanonPath{name}, + TreeEntry{ + .mode = mode, + .hash = hash, + }); } } -ObjectType parseObjectType( - Source & source, - const ExperimentalFeatureSettings & xpSettings) +ObjectType parseObjectType(Source & source, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::GitHashing); @@ -166,7 +162,8 @@ ObjectType parseObjectType( return ObjectType::Blob; } else if (type == "tree ") { return ObjectType::Tree; - } else throw Error("input doesn't look like a Git object"); + } else + throw Error("input doesn't look like a Git object"); } void parse( @@ -193,23 +190,26 @@ void parse( }; } - std::optional convertMode(SourceAccessor::Type type) { switch (type) { - case SourceAccessor::tSymlink: return Mode::Symlink; - case SourceAccessor::tRegular: return Mode::Regular; - case SourceAccessor::tDirectory: return Mode::Directory; + case SourceAccessor::tSymlink: + return Mode::Symlink; + case SourceAccessor::tRegular: + return Mode::Regular; + case SourceAccessor::tDirectory: + return Mode::Directory; case SourceAccessor::tChar: case SourceAccessor::tBlock: case SourceAccessor::tSocket: - case SourceAccessor::tFifo: return std::nullopt; + case SourceAccessor::tFifo: + return std::nullopt; case SourceAccessor::tUnknown: - default: unreachable(); + default: + unreachable(); } } - void restore(FileSystemObjectSink & sink, Source & source, std::function hook) { parse(sink, CanonPath::root, source, BlobMode::Regular, [&](CanonPath name, TreeEntry entry) { @@ -217,35 +217,30 @@ void restore(FileSystemObjectSink & sink, Source & source, std::functionlstat(from); auto gotOpt = convertMode(stat.type); if (!gotOpt) - throw Error("file '%s' (git hash %s) has an unsupported type", + throw Error( + "file '%s' (git hash %s) has an unsupported type", from, entry.hash.to_string(HashFormat::Base16, false)); auto & got = *gotOpt; if (got != entry.mode) - throw Error("git mode of file '%s' (git hash %s) is %o but expected %o", + throw Error( + "git mode of file '%s' (git hash %s) is %o but expected %o", from, entry.hash.to_string(HashFormat::Base16, false), (RawMode) got, (RawMode) entry.mode); - copyRecursive( - *accessor, from, - sink, name); + copyRecursive(*accessor, from, sink, name); }); } - -void dumpBlobPrefix( - uint64_t size, Sink & sink, - const ExperimentalFeatureSettings & xpSettings) +void dumpBlobPrefix(uint64_t size, Sink & sink, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::GitHashing); auto s = fmt("blob %d\0"s, std::to_string(size)); sink(s); } - -void dumpTree(const Tree & entries, Sink & sink, - const ExperimentalFeatureSettings & xpSettings) +void dumpTree(const Tree & entries, Sink & sink, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::GitHashing); @@ -270,7 +265,6 @@ void dumpTree(const Tree & entries, Sink & sink, sink(v1); } - Mode dump( const SourcePath & path, Sink & sink, @@ -281,22 +275,17 @@ Mode dump( auto st = path.lstat(); switch (st.type) { - case SourceAccessor::tRegular: - { - path.readFile(sink, [&](uint64_t size) { - dumpBlobPrefix(size, sink, xpSettings); - }); - return st.isExecutable - ? Mode::Executable - : Mode::Regular; + case SourceAccessor::tRegular: { + path.readFile(sink, [&](uint64_t size) { dumpBlobPrefix(size, sink, xpSettings); }); + return st.isExecutable ? Mode::Executable : Mode::Regular; } - case SourceAccessor::tDirectory: - { + case SourceAccessor::tDirectory: { Tree entries; for (auto & [name, _] : path.readDirectory()) { auto child = path / name; - if (!filter(child.path.abs())) continue; + if (!filter(child.path.abs())) + continue; auto entry = hook(child); @@ -310,8 +299,7 @@ Mode dump( return Mode::Directory; } - case SourceAccessor::tSymlink: - { + case SourceAccessor::tSymlink: { auto target = path.readLink(); dumpBlobPrefix(target.size(), sink, xpSettings); sink(target); @@ -328,11 +316,7 @@ Mode dump( } } - -TreeEntry dumpHash( - HashAlgorithm ha, - const SourcePath & path, - PathFilter & filter) +TreeEntry dumpHash(HashAlgorithm ha, const SourcePath & path, PathFilter & filter) { std::function hook; hook = [&](const SourcePath & path) -> TreeEntry { @@ -348,7 +332,6 @@ TreeEntry dumpHash( return hook(path); } - std::optional parseLsRemoteLine(std::string_view line) { const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$"); @@ -356,13 +339,10 @@ std::optional parseLsRemoteLine(std::string_view line) if (!std::regex_match(line.cbegin(), line.cend(), match, line_regex)) return std::nullopt; - return LsRemoteRefLine { - .kind = match[1].length() == 0 - ? LsRemoteRefLine::Kind::Object - : LsRemoteRefLine::Kind::Symbolic, + return LsRemoteRefLine{ + .kind = match[1].length() == 0 ? LsRemoteRefLine::Kind::Object : LsRemoteRefLine::Kind::Symbolic, .target = match[2], - .reference = match[3].length() == 0 ? std::nullopt : std::optional{ match[3] } - }; + .reference = match[3].length() == 0 ? std::nullopt : std::optional{match[3]}}; } -} +} // namespace nix::git diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 319eb795e..8ee725d2d 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -20,23 +20,29 @@ namespace nix { -static size_t regularHashSize(HashAlgorithm type) { +static size_t regularHashSize(HashAlgorithm type) +{ switch (type) { - case HashAlgorithm::BLAKE3: return blake3HashSize; - case HashAlgorithm::MD5: return md5HashSize; - case HashAlgorithm::SHA1: return sha1HashSize; - case HashAlgorithm::SHA256: return sha256HashSize; - case HashAlgorithm::SHA512: return sha512HashSize; + case HashAlgorithm::BLAKE3: + return blake3HashSize; + case HashAlgorithm::MD5: + return md5HashSize; + case HashAlgorithm::SHA1: + return sha1HashSize; + case HashAlgorithm::SHA256: + return sha256HashSize; + case HashAlgorithm::SHA512: + return sha512HashSize; } unreachable(); } +const StringSet hashAlgorithms = {"blake3", "md5", "sha1", "sha256", "sha512"}; -const StringSet hashAlgorithms = {"blake3", "md5", "sha1", "sha256", "sha512" }; +const StringSet hashFormats = {"base64", "nix32", "base16", "sri"}; -const StringSet hashFormats = {"base64", "nix32", "base16", "sri" }; - -Hash::Hash(HashAlgorithm algo, const ExperimentalFeatureSettings & xpSettings) : algo(algo) +Hash::Hash(HashAlgorithm algo, const ExperimentalFeatureSettings & xpSettings) + : algo(algo) { if (algo == HashAlgorithm::BLAKE3) { xpSettings.require(Xp::BLAKE3Hashes); @@ -46,30 +52,31 @@ Hash::Hash(HashAlgorithm algo, const ExperimentalFeatureSettings & xpSettings) : memset(hash, 0, maxHashSize); } - -bool Hash::operator == (const Hash & h2) const noexcept +bool Hash::operator==(const Hash & h2) const noexcept { - if (hashSize != h2.hashSize) return false; + if (hashSize != h2.hashSize) + return false; for (unsigned int i = 0; i < hashSize; i++) - if (hash[i] != h2.hash[i]) return false; + if (hash[i] != h2.hash[i]) + return false; return true; } - -std::strong_ordering Hash::operator <=> (const Hash & h) const noexcept +std::strong_ordering Hash::operator<=>(const Hash & h) const noexcept { - if (auto cmp = hashSize <=> h.hashSize; cmp != 0) return cmp; + if (auto cmp = hashSize <=> h.hashSize; cmp != 0) + return cmp; for (unsigned int i = 0; i < hashSize; i++) { - if (auto cmp = hash[i] <=> h.hash[i]; cmp != 0) return cmp; + if (auto cmp = hash[i] <=> h.hash[i]; cmp != 0) + return cmp; } - if (auto cmp = algo <=> h.algo; cmp != 0) return cmp; + if (auto cmp = algo <=> h.algo; cmp != 0) + return cmp; return std::strong_ordering::equivalent; } - const std::string base16Chars = "0123456789abcdef"; - static std::string printHash16(const Hash & hash) { std::string buf; @@ -81,11 +88,9 @@ static std::string printHash16(const Hash & hash) return buf; } - // omitted: E O U T const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; - static std::string printHash32(const Hash & hash) { assert(hash.hashSize); @@ -99,23 +104,19 @@ static std::string printHash32(const Hash & hash) unsigned int b = n * 5; unsigned int i = b / 8; unsigned int j = b % 8; - unsigned char c = - (hash.hash[i] >> j) - | (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j)); + unsigned char c = (hash.hash[i] >> j) | (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j)); s.push_back(nix32Chars[c & 0x1f]); } return s; } - std::string printHash16or32(const Hash & hash) { assert(static_cast(hash.algo)); return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false); } - std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const { std::string s; @@ -215,16 +216,17 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) if (!isSRI && rest.size() == base16Len()) { auto parseHexDigit = [&](char c) { - if (c >= '0' && c <= '9') return c - '0'; - if (c >= 'A' && c <= 'F') return c - 'A' + 10; - if (c >= 'a' && c <= 'f') return c - 'a' + 10; + if (c >= '0' && c <= '9') + return c - '0'; + if (c >= 'A' && c <= 'F') + return c - 'A' + 10; + if (c >= 'a' && c <= 'f') + return c - 'a' + 10; throw BadHash("invalid base-16 hash '%s'", rest); }; for (unsigned int i = 0; i < hashSize; i++) { - hash[i] = - parseHexDigit(rest[i * 2]) << 4 - | parseHexDigit(rest[i * 2 + 1]); + hash[i] = parseHexDigit(rest[i * 2]) << 4 | parseHexDigit(rest[i * 2 + 1]); } } @@ -234,7 +236,8 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) char c = rest[rest.size() - n - 1]; unsigned char digit; for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */ - if (nix32Chars[digit] == c) break; + if (nix32Chars[digit] == c) + break; if (digit >= 32) throw BadHash("invalid base-32 hash '%s'", rest); unsigned int b = n * 5; @@ -287,7 +290,6 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha return Hash::parseAny(hashStr, ha); } - union Ctx { blake3_hasher blake3; @@ -297,14 +299,18 @@ union Ctx SHA512_CTX sha512; }; - static void start(HashAlgorithm ha, Ctx & ctx) { - if (ha == HashAlgorithm::BLAKE3) blake3_hasher_init(&ctx.blake3); - else if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5); - else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1); - else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256); - else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512); + if (ha == HashAlgorithm::BLAKE3) + blake3_hasher_init(&ctx.blake3); + else if (ha == HashAlgorithm::MD5) + MD5_Init(&ctx.md5); + else if (ha == HashAlgorithm::SHA1) + SHA1_Init(&ctx.sha1); + else if (ha == HashAlgorithm::SHA256) + SHA256_Init(&ctx.sha256); + else if (ha == HashAlgorithm::SHA512) + SHA512_Init(&ctx.sha512); } // BLAKE3 data size threshold beyond which parallel hashing with TBB is likely faster. @@ -328,28 +334,35 @@ void blake3_hasher_update_with_heuristics(blake3_hasher * blake3, std::string_vi } } -static void update(HashAlgorithm ha, Ctx & ctx, - std::string_view data) +static void update(HashAlgorithm ha, Ctx & ctx, std::string_view data) { - if (ha == HashAlgorithm::BLAKE3) blake3_hasher_update_with_heuristics(&ctx.blake3, data); - else if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size()); - else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); - else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); - else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); + if (ha == HashAlgorithm::BLAKE3) + blake3_hasher_update_with_heuristics(&ctx.blake3, data); + else if (ha == HashAlgorithm::MD5) + MD5_Update(&ctx.md5, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA1) + SHA1_Update(&ctx.sha1, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA256) + SHA256_Update(&ctx.sha256, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA512) + SHA512_Update(&ctx.sha512, data.data(), data.size()); } - static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash) { - if (ha == HashAlgorithm::BLAKE3) blake3_hasher_finalize(&ctx.blake3, hash, BLAKE3_OUT_LEN); - else if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5); - else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1); - else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256); - else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512); + if (ha == HashAlgorithm::BLAKE3) + blake3_hasher_finalize(&ctx.blake3, hash, BLAKE3_OUT_LEN); + else if (ha == HashAlgorithm::MD5) + MD5_Final(hash, &ctx.md5); + else if (ha == HashAlgorithm::SHA1) + SHA1_Final(hash, &ctx.sha1); + else if (ha == HashAlgorithm::SHA256) + SHA256_Final(hash, &ctx.sha256); + else if (ha == HashAlgorithm::SHA512) + SHA512_Final(hash, &ctx.sha512); } -Hash hashString( - HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings) +Hash hashString(HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { Ctx ctx; Hash hash(ha, xpSettings); @@ -366,8 +379,8 @@ Hash hashFile(HashAlgorithm ha, const Path & path) return sink.finish().first; } - -HashSink::HashSink(HashAlgorithm ha) : ha(ha) +HashSink::HashSink(HashAlgorithm ha) + : ha(ha) { ctx = new Ctx; bytes = 0; @@ -403,7 +416,6 @@ HashResult HashSink::currentHash() return HashResult(hash, bytes); } - Hash compressHash(const Hash & hash, unsigned int newSize) { Hash h(hash.algo); @@ -413,17 +425,20 @@ Hash compressHash(const Hash & hash, unsigned int newSize) return h; } - std::optional parseHashFormatOpt(std::string_view hashFormatName) { - if (hashFormatName == "base16") return HashFormat::Base16; - if (hashFormatName == "nix32") return HashFormat::Nix32; + if (hashFormatName == "base16") + return HashFormat::Base16; + if (hashFormatName == "nix32") + return HashFormat::Nix32; if (hashFormatName == "base32") { warn(R"("base32" is a deprecated alias for hash format "nix32".)"); return HashFormat::Nix32; } - if (hashFormatName == "base64") return HashFormat::Base64; - if (hashFormatName == "sri") return HashFormat::SRI; + if (hashFormatName == "base64") + return HashFormat::Base64; + if (hashFormatName == "sri") + return HashFormat::SRI; return std::nullopt; } @@ -455,11 +470,16 @@ std::string_view printHashFormat(HashFormat HashFormat) std::optional parseHashAlgoOpt(std::string_view s) { - if (s == "blake3") return HashAlgorithm::BLAKE3; - if (s == "md5") return HashAlgorithm::MD5; - if (s == "sha1") return HashAlgorithm::SHA1; - if (s == "sha256") return HashAlgorithm::SHA256; - if (s == "sha512") return HashAlgorithm::SHA512; + if (s == "blake3") + return HashAlgorithm::BLAKE3; + if (s == "md5") + return HashAlgorithm::MD5; + if (s == "sha1") + return HashAlgorithm::SHA1; + if (s == "sha256") + return HashAlgorithm::SHA256; + if (s == "sha512") + return HashAlgorithm::SHA512; return std::nullopt; } @@ -475,11 +495,16 @@ HashAlgorithm parseHashAlgo(std::string_view s) std::string_view printHashAlgo(HashAlgorithm ha) { switch (ha) { - case HashAlgorithm::BLAKE3: return "blake3"; - case HashAlgorithm::MD5: return "md5"; - case HashAlgorithm::SHA1: return "sha1"; - case HashAlgorithm::SHA256: return "sha256"; - case HashAlgorithm::SHA512: return "sha512"; + case HashAlgorithm::BLAKE3: + return "blake3"; + case HashAlgorithm::MD5: + return "md5"; + case HashAlgorithm::SHA1: + return "sha1"; + case HashAlgorithm::SHA256: + return "sha256"; + case HashAlgorithm::SHA512: + return "sha512"; default: // illegal hash type enum value internally, as opposed to external input // which should be validated with nice error message. @@ -487,4 +512,4 @@ std::string_view printHashAlgo(HashAlgorithm ha) } } -} +} // namespace nix diff --git a/src/libutil/hilite.cc b/src/libutil/hilite.cc index 6d4eb17a1..8b7e3ff23 100644 --- a/src/libutil/hilite.cc +++ b/src/libutil/hilite.cc @@ -2,19 +2,15 @@ namespace nix { -std::string hiliteMatches( - std::string_view s, - std::vector matches, - std::string_view prefix, - std::string_view postfix) +std::string +hiliteMatches(std::string_view s, std::vector matches, std::string_view prefix, std::string_view postfix) { // Avoid extra work on zero matches if (matches.size() == 0) return std::string(s); - std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) { - return a.position() < b.position(); - }); + std::sort( + matches.begin(), matches.end(), [](const auto & a, const auto & b) { return a.position() < b.position(); }); std::string out; ssize_t last_end = 0; @@ -41,4 +37,4 @@ std::string hiliteMatches( return out; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/abstract-setting-to-json.hh b/src/libutil/include/nix/util/abstract-setting-to-json.hh index 2848f8afe..180aa59d2 100644 --- a/src/libutil/include/nix/util/abstract-setting-to-json.hh +++ b/src/libutil/include/nix/util/abstract-setting-to-json.hh @@ -15,4 +15,4 @@ std::map BaseSetting::toJSONObject() const obj.emplace("documentDefault", documentDefault); return obj; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/ansicolor.hh b/src/libutil/include/nix/util/ansicolor.hh index 86becafa6..2f0749e6a 100644 --- a/src/libutil/include/nix/util/ansicolor.hh +++ b/src/libutil/include/nix/util/ansicolor.hh @@ -1,4 +1,5 @@ #pragma once + /** * @file * @@ -18,4 +19,4 @@ namespace nix { #define ANSI_MAGENTA "\e[35;1m" #define ANSI_CYAN "\e[36;1m" -} +} // namespace nix diff --git a/src/libutil/include/nix/util/archive.hh b/src/libutil/include/nix/util/archive.hh index ae3274fa6..b88e1fa2d 100644 --- a/src/libutil/include/nix/util/archive.hh +++ b/src/libutil/include/nix/util/archive.hh @@ -5,10 +5,8 @@ #include "nix/util/serialise.hh" #include "nix/util/fs-sink.hh" - namespace nix { - /** * dumpPath creates a Nix archive of the specified path. * @@ -57,14 +55,12 @@ namespace nix { * `+` denotes string concatenation. * ``` */ -void dumpPath(const Path & path, Sink & sink, - PathFilter & filter = defaultPathFilter); +void dumpPath(const Path & path, Sink & sink, PathFilter & filter = defaultPathFilter); /** * Same as dumpPath(), but returns the last modified date of the path. */ -time_t dumpPathAndGetMtime(const Path & path, Sink & sink, - PathFilter & filter = defaultPathFilter); +time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter = defaultPathFilter); /** * Dump an archive with a single file with these contents. @@ -82,10 +78,8 @@ void restorePath(const std::filesystem::path & path, Source & source, bool start */ void copyNAR(Source & source, Sink & sink); - inline constexpr std::string_view narVersionMagic1 = "nix-archive-1"; inline constexpr std::string_view caseHackSuffix = "~nix~case~hack~"; - -} +} // namespace nix diff --git a/src/libutil/include/nix/util/args.hh b/src/libutil/include/nix/util/args.hh index f3ab0b532..5e64ae1d9 100644 --- a/src/libutil/include/nix/util/args.hh +++ b/src/libutil/include/nix/util/args.hh @@ -31,18 +31,28 @@ public: /** * Return a short one-line description of the command. - */ - virtual std::string description() { return ""; } + */ + virtual std::string description() + { + return ""; + } - virtual bool forceImpureByDefault() { return false; } + virtual bool forceImpureByDefault() + { + return false; + } /** * Return documentation about this command, in Markdown format. */ - virtual std::string doc() { return ""; } + virtual std::string doc() + { + return ""; + } /** - * @brief Get the [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) for the command. + * @brief Get the [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) for the + * command. * * @return Generally the working directory, but in case of a shebang * interpreter, returns the directory of the script. @@ -78,73 +88,79 @@ protected: Handler(std::function)> && fun) : fun(std::move(fun)) , arity(ArityAny) - { } + { + } Handler(std::function && handler) : fun([handler{std::move(handler)}](std::vector) { handler(); }) , arity(0) - { } + { + } Handler(std::function && handler) - : fun([handler{std::move(handler)}](std::vector ss) { - handler(std::move(ss[0])); - }) + : fun([handler{std::move(handler)}](std::vector ss) { handler(std::move(ss[0])); }) , arity(1) - { } + { + } Handler(std::function && handler) : fun([handler{std::move(handler)}](std::vector ss) { handler(std::move(ss[0]), std::move(ss[1])); - }) + }) , arity(2) - { } + { + } Handler(std::vector * dest) : fun([dest](std::vector ss) { *dest = ss; }) , arity(ArityAny) - { } + { + } Handler(std::string * dest) : fun([dest](std::vector ss) { *dest = ss[0]; }) , arity(1) - { } + { + } Handler(std::optional * dest) : fun([dest](std::vector ss) { *dest = ss[0]; }) , arity(1) - { } + { + } Handler(std::filesystem::path * dest) : fun([dest](std::vector ss) { *dest = ss[0]; }) , arity(1) - { } + { + } Handler(std::optional * dest) : fun([dest](std::vector ss) { *dest = ss[0]; }) , arity(1) - { } + { + } template Handler(T * dest, const T & val) : fun([dest, val](std::vector ss) { *dest = val; }) , arity(0) - { } + { + } template Handler(I * dest) - : fun([dest](std::vector ss) { - *dest = string2IntWithUnitPrefix(ss[0]); - }) + : fun([dest](std::vector ss) { *dest = string2IntWithUnitPrefix(ss[0]); }) , arity(1) - { } + { + } template Handler(std::optional * dest) - : fun([dest](std::vector ss) { - *dest = string2IntWithUnitPrefix(ss[0]); - }) + : fun([dest](std::vector ss) { *dest = string2IntWithUnitPrefix(ss[0]); }) , arity(1) - { } + { + } }; /** @@ -248,8 +264,8 @@ protected: * This list is used to extend the lifetime of the argument forms. * If this is not done, some closures that reference the command * itself will segfault. - */ - std::list processedArgs; + */ + std::list processedArgs; /** * Process some positional arguments @@ -261,7 +277,9 @@ protected: virtual bool processArgs(const Strings & args, bool finish); virtual Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos) - { return pos; } + { + return pos; + } StringSet hiddenCategories; @@ -287,11 +305,7 @@ public: */ void expectArg(const std::string & label, std::string * dest, bool optional = false) { - expectArgs({ - .label = label, - .optional = optional, - .handler = {dest} - }); + expectArgs({.label = label, .optional = optional, .handler = {dest}}); } /** @@ -299,11 +313,7 @@ public: */ void expectArg(const std::string & label, std::filesystem::path * dest, bool optional = false) { - expectArgs({ - .label = label, - .optional = optional, - .handler = {dest} - }); + expectArgs({.label = label, .optional = optional, .handler = {dest}}); } /** @@ -311,10 +321,7 @@ public: */ void expectArgs(const std::string & label, std::vector * dest) { - expectArgs({ - .label = label, - .handler = {dest} - }); + expectArgs({.label = label, .handler = {dest}}); } static CompleterFun completePath; @@ -364,7 +371,10 @@ struct Command : virtual public Args virtual std::optional experimentalFeature(); - virtual Category category() { return catDefault; } + virtual Category category() + { + return catDefault; + } }; using Commands = std::map()>>; @@ -401,7 +411,8 @@ public: }; /** An alias, except for the original syntax, which is in the map key. */ - struct AliasInfo { + struct AliasInfo + { AliasStatus status; std::vector replacement; }; @@ -419,9 +430,10 @@ protected: bool aliasUsed = false; }; -Strings argvToStrings(int argc, char * * argv); +Strings argvToStrings(int argc, char ** argv); -struct Completion { +struct Completion +{ std::string completion; std::string description; @@ -465,4 +477,4 @@ public: Strings parseShebangContent(std::string_view s); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/args/root.hh b/src/libutil/include/nix/util/args/root.hh index cdc9be613..86b677be4 100644 --- a/src/libutil/include/nix/util/args/root.hh +++ b/src/libutil/include/nix/util/args/root.hh @@ -57,7 +57,8 @@ protected: /** * A pointer to the completion and its two arguments; a thunk; */ - struct DeferredCompletion { + struct DeferredCompletion + { const CompleterClosure & completer; size_t n; std::string prefix; @@ -82,4 +83,4 @@ private: std::optional needsCompletion(std::string_view s); }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/callback.hh b/src/libutil/include/nix/util/callback.hh index c2cada2f6..2ed48c7a3 100644 --- a/src/libutil/include/nix/util/callback.hh +++ b/src/libutil/include/nix/util/callback.hh @@ -20,14 +20,18 @@ class Callback public: - Callback(std::function)> fun) : fun(fun) { } + Callback(std::function)> fun) + : fun(fun) + { + } // NOTE: std::function is noexcept move-constructible since C++20. Callback(Callback && callback) noexcept(std::is_nothrow_move_constructible_v) : fun(std::move(callback.fun)) { auto prev = callback.done.test_and_set(); - if (prev) done.test_and_set(); + if (prev) + done.test_and_set(); } void operator()(T && t) noexcept @@ -49,4 +53,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/canon-path.hh b/src/libutil/include/nix/util/canon-path.hh index f84347dc4..cb8b4325d 100644 --- a/src/libutil/include/nix/util/canon-path.hh +++ b/src/libutil/include/nix/util/canon-path.hh @@ -51,13 +51,16 @@ public: explicit CanonPath(const char * raw) : CanonPath(std::string_view(raw)) - { } + { + } - struct unchecked_t { }; + struct unchecked_t + {}; CanonPath(unchecked_t _, std::string path) : path(std::move(path)) - { } + { + } /** * Construct a canon path from a vector of elements. @@ -74,13 +77,19 @@ public: CanonPath(std::string_view raw, const CanonPath & root); bool isRoot() const - { return path.size() <= 1; } + { + return path.size() <= 1; + } explicit operator std::string_view() const - { return path; } + { + return path; + } const std::string & abs() const - { return path; } + { + return path; + } /** * Like abs(), but return an empty string if this path is @@ -93,10 +102,14 @@ public: } const char * c_str() const - { return path.c_str(); } + { + return path.c_str(); + } std::string_view rel() const - { return ((std::string_view) path).substr(1); } + { + return ((std::string_view) path).substr(1); + } const char * rel_c_str() const { @@ -113,18 +126,25 @@ public: Iterator(std::string_view remaining) : remaining(remaining) , slash(remaining.find('/')) - { } + { + } - bool operator != (const Iterator & x) const - { return remaining.data() != x.remaining.data(); } + bool operator!=(const Iterator & x) const + { + return remaining.data() != x.remaining.data(); + } - bool operator == (const Iterator & x) const - { return !(*this != x); } + bool operator==(const Iterator & x) const + { + return !(*this != x); + } - const std::string_view operator * () const - { return remaining.substr(0, slash); } + const std::string_view operator*() const + { + return remaining.substr(0, slash); + } - void operator ++ () + void operator++() { if (slash == remaining.npos) remaining = remaining.substr(remaining.size()); @@ -135,8 +155,15 @@ public: } }; - Iterator begin() const { return Iterator(rel()); } - Iterator end() const { return Iterator(rel().substr(path.size() - 1)); } + Iterator begin() const + { + return Iterator(rel()); + } + + Iterator end() const + { + return Iterator(rel().substr(path.size() - 1)); + } std::optional parent() const; @@ -147,21 +174,27 @@ public: std::optional dirOf() const { - if (isRoot()) return std::nullopt; + if (isRoot()) + return std::nullopt; return ((std::string_view) path).substr(0, path.rfind('/')); } std::optional baseName() const { - if (isRoot()) return std::nullopt; + if (isRoot()) + return std::nullopt; return ((std::string_view) path).substr(path.rfind('/') + 1); } - bool operator == (const CanonPath & x) const - { return path == x.path; } + bool operator==(const CanonPath & x) const + { + return path == x.path; + } - bool operator != (const CanonPath & x) const - { return path != x.path; } + bool operator!=(const CanonPath & x) const + { + return path != x.path; + } /** * Compare paths lexicographically except that path separators @@ -169,16 +202,19 @@ public: * a directory is always followed directly by its children. For * instance, 'foo' < 'foo/bar' < 'foo!'. */ - auto operator <=> (const CanonPath & x) const + auto operator<=>(const CanonPath & x) const { auto i = path.begin(); auto j = x.path.begin(); - for ( ; i != path.end() && j != x.path.end(); ++i, ++j) { + for (; i != path.end() && j != x.path.end(); ++i, ++j) { auto c_i = *i; - if (c_i == '/') c_i = 0; + if (c_i == '/') + c_i = 0; auto c_j = *j; - if (c_j == '/') c_j = 0; - if (auto cmp = c_i <=> c_j; cmp != 0) return cmp; + if (c_j == '/') + c_j = 0; + if (auto cmp = c_i <=> c_j; cmp != 0) + return cmp; } return (i != path.end()) <=> (j != x.path.end()); } @@ -199,14 +235,14 @@ public: /** * Concatenate two paths. */ - CanonPath operator / (const CanonPath & x) const; + CanonPath operator/(const CanonPath & x) const; /** * Add a path component to this one. It must not contain any slashes. */ void push(std::string_view c); - CanonPath operator / (std::string_view c) const; + CanonPath operator/(std::string_view c) const; /** * Check whether access to this path is allowed, which is the case @@ -225,14 +261,14 @@ public: friend class std::hash; }; -std::ostream & operator << (std::ostream & stream, const CanonPath & path); +std::ostream & operator<<(std::ostream & stream, const CanonPath & path); -} +} // namespace nix template<> struct std::hash { - std::size_t operator ()(const nix::CanonPath & s) const noexcept + std::size_t operator()(const nix::CanonPath & s) const noexcept { return std::hash{}(s.path); } diff --git a/src/libutil/include/nix/util/checked-arithmetic.hh b/src/libutil/include/nix/util/checked-arithmetic.hh index dcc6d86af..48679622c 100644 --- a/src/libutil/include/nix/util/checked-arithmetic.hh +++ b/src/libutil/include/nix/util/checked-arithmetic.hh @@ -32,15 +32,18 @@ struct Checked T value; Checked() = default; + explicit Checked(T const value) : value{value} { } + Checked(Checked const & other) = default; Checked(Checked && other) = default; Checked & operator=(Checked const & other) = default; std::strong_ordering operator<=>(Checked const & other) const = default; + std::strong_ordering operator<=>(T const & other) const { return value <=> other; @@ -68,6 +71,7 @@ struct Checked , overflowed_{overflowed ? OverflowKind::Overflow : OverflowKind::NoOverflow} { } + Result(T value, OverflowKind overflowed) : value{value} , overflowed_{overflowed} @@ -116,6 +120,7 @@ struct Checked { return (*this) + other.value; } + Result operator+(T const other) const { T result; @@ -127,6 +132,7 @@ struct Checked { return (*this) - other.value; } + Result operator-(T const other) const { T result; @@ -138,6 +144,7 @@ struct Checked { return (*this) * other.value; } + Result operator*(T const other) const { T result; @@ -149,6 +156,7 @@ struct Checked { return (*this) / other.value; } + /** * Performs a checked division. * @@ -181,4 +189,4 @@ std::ostream & operator<<(std::ostream & ios, Checked v) return ios; } -} +} // namespace nix::checked diff --git a/src/libutil/include/nix/util/chunked-vector.hh b/src/libutil/include/nix/util/chunked-vector.hh index 2c21183ac..38e53c7f5 100644 --- a/src/libutil/include/nix/util/chunked-vector.hh +++ b/src/libutil/include/nix/util/chunked-vector.hh @@ -20,7 +20,8 @@ namespace nix { * references to its elements. */ template -class ChunkedVector { +class ChunkedVector +{ private: uint32_t size_ = 0; std::vector> chunks; @@ -45,13 +46,16 @@ public: addChunk(); } - uint32_t size() const noexcept { return size_; } + uint32_t size() const noexcept + { + return size_; + } - template + template std::pair add(Args &&... args) { const auto idx = size_++; - auto & chunk = [&] () -> auto & { + auto & chunk = [&]() -> auto & { if (auto & back = chunks.back(); back.size() < ChunkSize) return back; return addChunk(); @@ -78,4 +82,4 @@ public: fn(e); } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/closure.hh b/src/libutil/include/nix/util/closure.hh index 54b18ab3d..d55d52c87 100644 --- a/src/libutil/include/nix/util/closure.hh +++ b/src/libutil/include/nix/util/closure.hh @@ -13,11 +13,7 @@ template using GetEdgesAsync = std::function> &)>)>; template -void computeClosure( - const set startElts, - set & res, - GetEdgesAsync getEdgesAsync -) +void computeClosure(const set startElts, set & res, GetEdgesAsync getEdgesAsync) { struct State { @@ -35,8 +31,10 @@ void computeClosure( enqueue = [&](const T & current) -> void { { auto state(state_.lock()); - if (state->exc) return; - if (!state->res.insert(current).second) return; + if (state->exc) + return; + if (!state->res.insert(current).second) + return; state->pending++; } @@ -48,13 +46,16 @@ void computeClosure( { auto state(state_.lock()); assert(state->pending); - if (!--state->pending) done.notify_one(); + if (!--state->pending) + done.notify_one(); } } catch (...) { auto state(state_.lock()); - if (!state->exc) state->exc = std::current_exception(); + if (!state->exc) + state->exc = std::current_exception(); assert(state->pending); - if (!--state->pending) done.notify_one(); + if (!--state->pending) + done.notify_one(); }; }); }; @@ -64,9 +65,11 @@ void computeClosure( { auto state(state_.lock()); - while (state->pending) state.wait(done); - if (state->exc) std::rethrow_exception(state->exc); + while (state->pending) + state.wait(done); + if (state->exc) + std::rethrow_exception(state->exc); } } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/comparator.hh b/src/libutil/include/nix/util/comparator.hh index c3af1758d..64ce47dc9 100644 --- a/src/libutil/include/nix/util/comparator.hh +++ b/src/libutil/include/nix/util/comparator.hh @@ -1,13 +1,14 @@ #pragma once ///@file -#define GENERATE_ONE_CMP(PRE, RET, QUAL, COMPARATOR, MY_TYPE, ...) \ - PRE RET QUAL operator COMPARATOR(const MY_TYPE & other) const noexcept { \ - __VA_OPT__(const MY_TYPE * me = this;) \ - auto fields1 = std::tie( __VA_ARGS__ ); \ - __VA_OPT__(me = &other;) \ - auto fields2 = std::tie( __VA_ARGS__ ); \ - return fields1 COMPARATOR fields2; \ +#define GENERATE_ONE_CMP(PRE, RET, QUAL, COMPARATOR, MY_TYPE, ...) \ + PRE RET QUAL operator COMPARATOR(const MY_TYPE & other) const noexcept \ + { \ + __VA_OPT__(const MY_TYPE * me = this;) \ + auto fields1 = std::tie(__VA_ARGS__); \ + __VA_OPT__(me = &other;) \ + auto fields2 = std::tie(__VA_ARGS__); \ + return fields1 COMPARATOR fields2; \ } #define GENERATE_EQUAL(prefix, qualification, my_type, args...) \ GENERATE_ONE_CMP(prefix, bool, qualification, ==, my_type, args) @@ -36,8 +37,8 @@ * ``` */ #define GENERATE_CMP(args...) \ - GENERATE_EQUAL(,,args) \ - GENERATE_SPACESHIP(,auto,,args) + GENERATE_EQUAL(, , args) \ + GENERATE_SPACESHIP(, auto, , args) /** * @param prefix This is for something before each declaration like @@ -46,5 +47,5 @@ * @param my_type the type are defining operators for. */ #define GENERATE_CMP_EXT(prefix, ret, my_type, args...) \ - GENERATE_EQUAL(prefix, my_type ::, my_type, args) \ + GENERATE_EQUAL(prefix, my_type ::, my_type, args) \ GENERATE_SPACESHIP(prefix, ret, my_type ::, my_type, args) diff --git a/src/libutil/include/nix/util/compression.hh b/src/libutil/include/nix/util/compression.hh index 15d869e88..351826856 100644 --- a/src/libutil/include/nix/util/compression.hh +++ b/src/libutil/include/nix/util/compression.hh @@ -29,4 +29,4 @@ MakeError(UnknownCompressionMethod, Error); MakeError(CompressionError, Error); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/config-global.hh b/src/libutil/include/nix/util/config-global.hh index 44f89e06d..4a4277c48 100644 --- a/src/libutil/include/nix/util/config-global.hh +++ b/src/libutil/include/nix/util/config-global.hh @@ -35,4 +35,4 @@ struct GlobalConfig : public AbstractConfig extern GlobalConfig globalConfig; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/config-impl.hh b/src/libutil/include/nix/util/config-impl.hh index 15e0c9554..f72917b11 100644 --- a/src/libutil/include/nix/util/config-impl.hh +++ b/src/libutil/include/nix/util/config-impl.hh @@ -17,19 +17,26 @@ namespace nix { -template<> struct BaseSetting::trait +template<> +struct BaseSetting::trait { static constexpr bool appendable = true; }; -template<> struct BaseSetting::trait + +template<> +struct BaseSetting::trait { static constexpr bool appendable = true; }; -template<> struct BaseSetting::trait + +template<> +struct BaseSetting::trait { static constexpr bool appendable = true; }; -template<> struct BaseSetting>::trait + +template<> +struct BaseSetting>::trait { static constexpr bool appendable = true; }; @@ -46,17 +53,19 @@ bool BaseSetting::isAppendable() return trait::appendable; } -template<> void BaseSetting::appendOrSet(Strings newValue, bool append); -template<> void BaseSetting::appendOrSet(StringSet newValue, bool append); -template<> void BaseSetting::appendOrSet(StringMap newValue, bool append); -template<> void BaseSetting>::appendOrSet(std::set newValue, bool append); +template<> +void BaseSetting::appendOrSet(Strings newValue, bool append); +template<> +void BaseSetting::appendOrSet(StringSet newValue, bool append); +template<> +void BaseSetting::appendOrSet(StringMap newValue, bool append); +template<> +void BaseSetting>::appendOrSet(std::set newValue, bool append); template void BaseSetting::appendOrSet(T newValue, bool append) { - static_assert( - !trait::appendable, - "using default `appendOrSet` implementation with an appendable type"); + static_assert(!trait::appendable, "using default `appendOrSet` implementation with an appendable type"); assert(!append); value = std::move(newValue); @@ -69,13 +78,15 @@ void BaseSetting::set(const std::string & str, bool append) appendOrSet(parse(str), append); else { assert(experimentalFeature); - warn("Ignoring setting '%s' because experimental feature '%s' is not enabled", + warn( + "Ignoring setting '%s' because experimental feature '%s' is not enabled", name, showExperimentalFeature(*experimentalFeature)); } } -template<> void BaseSetting::convertToArg(Args & args, const std::string & category); +template<> +void BaseSetting::convertToArg(Args & args, const std::string & category); template void BaseSetting::convertToArg(Args & args, const std::string & category) @@ -86,7 +97,10 @@ void BaseSetting::convertToArg(Args & args, const std::string & category) .description = fmt("Set the `%s` setting.", name), .category = category, .labels = {"value"}, - .handler = {[this](std::string s) { overridden = true; set(s); }}, + .handler = {[this](std::string s) { + overridden = true; + set(s); + }}, .experimentalFeature = experimentalFeature, }); @@ -97,14 +111,19 @@ void BaseSetting::convertToArg(Args & args, const std::string & category) .description = fmt("Append to the `%s` setting.", name), .category = category, .labels = {"value"}, - .handler = {[this](std::string s) { overridden = true; set(s, true); }}, + .handler = {[this](std::string s) { + overridden = true; + set(s, true); + }}, .experimentalFeature = experimentalFeature, }); } -#define DECLARE_CONFIG_SERIALISER(TY) \ - template<> TY BaseSetting< TY >::parse(const std::string & str) const; \ - template<> std::string BaseSetting< TY >::to_string() const; +#define DECLARE_CONFIG_SERIALISER(TY) \ + template<> \ + TY BaseSetting::parse(const std::string & str) const; \ + template<> \ + std::string BaseSetting::to_string() const; DECLARE_CONFIG_SERIALISER(std::string) DECLARE_CONFIG_SERIALISER(std::optional) @@ -134,4 +153,4 @@ std::string BaseSetting::to_string() const return std::to_string(value); } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/configuration.hh b/src/libutil/include/nix/util/configuration.hh index 24b42f02c..cc7e6aff7 100644 --- a/src/libutil/include/nix/util/configuration.hh +++ b/src/libutil/include/nix/util/configuration.hh @@ -247,7 +247,8 @@ protected: public: - BaseSetting(const T & def, + BaseSetting( + const T & def, const bool documentDefault, const std::string & name, const std::string & description, @@ -257,21 +258,58 @@ public: , value(def) , defaultValue(def) , documentDefault(documentDefault) - { } + { + } + + operator const T &() const + { + return value; + } + + operator T &() + { + return value; + } + + const T & get() const + { + return value; + } + + T & get() + { + return value; + } - operator const T &() const { return value; } - operator T &() { return value; } - const T & get() const { return value; } - T & get() { return value; } template - bool operator ==(const U & v2) const { return value == v2; } + bool operator==(const U & v2) const + { + return value == v2; + } + template - bool operator !=(const U & v2) const { return value != v2; } + bool operator!=(const U & v2) const + { + return value != v2; + } + template - void operator =(const U & v) { assign(v); } - virtual void assign(const T & v) { value = v; } + void operator=(const U & v) + { + assign(v); + } + + virtual void assign(const T & v) + { + value = v; + } + template - void setDefault(const U & v) { if (!overridden) value = v; } + void setDefault(const U & v) + { + if (!overridden) + value = v; + } /** * Require any experimental feature the setting depends on @@ -307,19 +345,23 @@ public: }; template -std::ostream & operator <<(std::ostream & str, const BaseSetting & opt) +std::ostream & operator<<(std::ostream & str, const BaseSetting & opt) { return str << static_cast(opt); } template -bool operator ==(const T & v1, const BaseSetting & v2) { return v1 == static_cast(v2); } +bool operator==(const T & v1, const BaseSetting & v2) +{ + return v1 == static_cast(v2); +} template class Setting : public BaseSetting { public: - Setting(Config * options, + Setting( + Config * options, const T & def, const std::string & name, const std::string & description, @@ -331,7 +373,10 @@ public: options->addSetting(this); } - void operator =(const T & v) { this->assign(v); } + void operator=(const T & v) + { + this->assign(v); + } }; /** @@ -345,7 +390,8 @@ class PathSetting : public BaseSetting { public: - PathSetting(Config * options, + PathSetting( + Config * options, const Path & def, const std::string & name, const std::string & description, @@ -353,9 +399,15 @@ public: Path parse(const std::string & str) const override; - Path operator +(const char * p) const { return value + p; } + Path operator+(const char * p) const + { + return value + p; + } - void operator =(const Path & v) { this->assign(v); } + void operator=(const Path & v) + { + this->assign(v); + } }; /** @@ -367,7 +419,8 @@ class OptionalPathSetting : public BaseSetting> { public: - OptionalPathSetting(Config * options, + OptionalPathSetting( + Config * options, const std::optional & def, const std::string & name, const std::string & description, @@ -375,14 +428,16 @@ public: std::optional parse(const std::string & str) const override; - void operator =(const std::optional & v); + void operator=(const std::optional & v); }; - -struct ExperimentalFeatureSettings : Config { +struct ExperimentalFeatureSettings : Config +{ Setting> experimentalFeatures{ - this, {}, "experimental-features", + this, + {}, + "experimental-features", R"( Experimental features that are enabled. @@ -426,4 +481,4 @@ struct ExperimentalFeatureSettings : Config { // FIXME: don't use a global variable. extern ExperimentalFeatureSettings experimentalFeatureSettings; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/current-process.hh b/src/libutil/include/nix/util/current-process.hh index b2c92a34c..364493137 100644 --- a/src/libutil/include/nix/util/current-process.hh +++ b/src/libutil/include/nix/util/current-process.hh @@ -4,7 +4,7 @@ #include #ifndef _WIN32 -# include +# include #endif #include "nix/util/types.hh" @@ -38,4 +38,4 @@ void restoreProcessContext(bool restoreMounts = true); */ std::optional getSelfExe(); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/english.hh b/src/libutil/include/nix/util/english.hh index 9c6c93571..1dcff51ca 100644 --- a/src/libutil/include/nix/util/english.hh +++ b/src/libutil/include/nix/util/english.hh @@ -9,10 +9,7 @@ namespace nix { * * If `count == 1`, prints `1 {single}` to `output`, otherwise prints `{count} {plural}`. */ -std::ostream & pluralize( - std::ostream & output, - unsigned int count, - const std::string_view single, - const std::string_view plural); +std::ostream & +pluralize(std::ostream & output, unsigned int count, const std::string_view single, const std::string_view plural); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/environment-variables.hh b/src/libutil/include/nix/util/environment-variables.hh index 9b2fab4f4..f8c3b7ad0 100644 --- a/src/libutil/include/nix/util/environment-variables.hh +++ b/src/libutil/include/nix/util/environment-variables.hh @@ -66,4 +66,4 @@ void clearEnv(); */ void replaceEnv(const StringMap & newEnv); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/error.hh b/src/libutil/include/nix/util/error.hh index 7c96112ea..bd21e02d3 100644 --- a/src/libutil/include/nix/util/error.hh +++ b/src/libutil/include/nix/util/error.hh @@ -29,22 +29,13 @@ namespace nix { - -typedef enum { - lvlError = 0, - lvlWarn, - lvlNotice, - lvlInfo, - lvlTalkative, - lvlChatty, - lvlDebug, - lvlVomit -} Verbosity; +typedef enum { lvlError = 0, lvlWarn, lvlNotice, lvlInfo, lvlTalkative, lvlChatty, lvlDebug, lvlVomit } Verbosity; /** * The lines of code surrounding an error. */ -struct LinesOfCode { +struct LinesOfCode +{ std::optional prevLineOfCode; std::optional errLineOfCode; std::optional nextLineOfCode; @@ -60,10 +51,7 @@ struct LinesOfCode { 4feb7d9f71? */ struct Pos; -void printCodeLines(std::ostream & out, - const std::string & prefix, - const Pos & errPos, - const LinesOfCode & loc); +void printCodeLines(std::ostream & out, const std::string & prefix, const Pos & errPos, const LinesOfCode & loc); /** * When a stack frame is printed. @@ -77,15 +65,17 @@ enum struct TracePrint { Always, }; -struct Trace { +struct Trace +{ std::shared_ptr pos; HintFmt hint; TracePrint print = TracePrint::Default; }; -inline std::strong_ordering operator<=>(const Trace& lhs, const Trace& rhs); +inline std::strong_ordering operator<=>(const Trace & lhs, const Trace & rhs); -struct ErrorInfo { +struct ErrorInfo +{ Verbosity level; HintFmt msg; std::shared_ptr pos; @@ -128,51 +118,71 @@ protected: public: BaseError(const BaseError &) = default; - BaseError& operator=(const BaseError &) = default; - BaseError& operator=(BaseError &&) = default; + BaseError & operator=(const BaseError &) = default; + BaseError & operator=(BaseError &&) = default; template - BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = HintFmt(args...), .status = status } - { } + BaseError(unsigned int status, const Args &... args) + : err{.level = lvlError, .msg = HintFmt(args...), .status = status} + { + } template - explicit BaseError(const std::string & fs, const Args & ... args) - : err { .level = lvlError, .msg = HintFmt(fs, args...) } - { } + explicit BaseError(const std::string & fs, const Args &... args) + : err{.level = lvlError, .msg = HintFmt(fs, args...)} + { + } template - BaseError(const Suggestions & sug, const Args & ... args) - : err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug } - { } + BaseError(const Suggestions & sug, const Args &... args) + : err{.level = lvlError, .msg = HintFmt(args...), .suggestions = sug} + { + } BaseError(HintFmt hint) - : err { .level = lvlError, .msg = hint } - { } + : err{.level = lvlError, .msg = hint} + { + } BaseError(ErrorInfo && e) : err(std::move(e)) - { } + { + } BaseError(const ErrorInfo & e) : err(e) - { } + { + } /** The error message without "error: " prefixed to it. */ - std::string message() { + std::string message() + { return err.msg.str(); } - const char * what() const noexcept override { return calcWhat().c_str(); } - const std::string & msg() const { return calcWhat(); } - const ErrorInfo & info() const { calcWhat(); return err; } + const char * what() const noexcept override + { + return calcWhat().c_str(); + } + + const std::string & msg() const + { + return calcWhat(); + } + + const ErrorInfo & info() const + { + calcWhat(); + return err; + } void withExitStatus(unsigned int status) { err.status = status; } - void atPos(std::shared_ptr pos) { + void atPos(std::shared_ptr pos) + { err.pos = pos; } @@ -182,23 +192,29 @@ public: } template - void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) + void addTrace(std::shared_ptr && e, std::string_view fs, const Args &... args) { addTrace(std::move(e), HintFmt(std::string(fs), args...)); } void addTrace(std::shared_ptr && e, HintFmt hint, TracePrint print = TracePrint::Default); - bool hasTrace() const { return !err.traces.empty(); } + bool hasTrace() const + { + return !err.traces.empty(); + } - const ErrorInfo & info() { return err; }; + const ErrorInfo & info() + { + return err; + }; }; #define MakeError(newClass, superClass) \ - class newClass : public superClass \ - { \ - public: \ - using superClass::superClass; \ + class newClass : public superClass \ + { \ + public: \ + using superClass::superClass; \ } MakeError(Error, BaseError); @@ -236,8 +252,9 @@ public: * will be used to try to add additional information to the message. */ template - SysError(int errNo, const Args & ... args) - : SystemError(""), errNo(errNo) + SysError(int errNo, const Args &... args) + : SystemError("") + , errNo(errNo) { auto hf = HintFmt(args...); err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); @@ -250,15 +267,15 @@ public: * calling this constructor! */ template - SysError(const Args & ... args) - : SysError(errno, args ...) + SysError(const Args &... args) + : SysError(errno, args...) { } }; #ifdef _WIN32 namespace windows { - class WinError; +class WinError; } #endif @@ -301,4 +318,4 @@ void panic(const char * file, int line, const char * func); */ #define unreachable() (::nix::panic(__FILE__, __LINE__, __func__)) -} +} // namespace nix diff --git a/src/libutil/include/nix/util/exec.hh b/src/libutil/include/nix/util/exec.hh index a362cef35..e4c9bf772 100644 --- a/src/libutil/include/nix/util/exec.hh +++ b/src/libutil/include/nix/util/exec.hh @@ -12,4 +12,4 @@ namespace nix { */ int execvpe(const OsChar * file0, const OsChar * const argv[], const OsChar * const envp[]); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/exit.hh b/src/libutil/include/nix/util/exit.hh index 55f33e62f..5f0f256ed 100644 --- a/src/libutil/include/nix/util/exit.hh +++ b/src/libutil/include/nix/util/exit.hh @@ -11,9 +11,18 @@ class Exit : public std::exception { public: int status; - Exit() : status(0) { } - explicit Exit(int status) : status(status) { } + + Exit() + : status(0) + { + } + + explicit Exit(int status) + : status(status) + { + } + virtual ~Exit(); }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/experimental-features.hh b/src/libutil/include/nix/util/experimental-features.hh index 8923517ba..1eabc3461 100644 --- a/src/libutil/include/nix/util/experimental-features.hh +++ b/src/libutil/include/nix/util/experimental-features.hh @@ -15,8 +15,7 @@ namespace nix { * their string representation and documentation in the corresponding * `.cc` file as well. */ -enum struct ExperimentalFeature -{ +enum struct ExperimentalFeature { CaDerivations, ImpureDerivations, Flakes, @@ -49,8 +48,7 @@ using Xp = ExperimentalFeature; * Parse an experimental feature (enum value) from its name. Experimental * feature flag names are hyphenated and do not contain spaces. */ -const std::optional parseExperimentalFeature( - const std::string_view & name); +const std::optional parseExperimentalFeature(const std::string_view & name); /** * Show the name of an experimental feature. This is the opposite of @@ -68,9 +66,7 @@ nlohmann::json documentExperimentalFeatures(); /** * Shorthand for `str << showExperimentalFeature(feature)`. */ -std::ostream & operator<<( - std::ostream & str, - const ExperimentalFeature & feature); +std::ostream & operator<<(std::ostream & str, const ExperimentalFeature & feature); /** * Parse a set of strings to the corresponding set of experimental @@ -100,4 +96,4 @@ public: void to_json(nlohmann::json &, const ExperimentalFeature &); void from_json(const nlohmann::json &, ExperimentalFeature &); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/file-content-address.hh b/src/libutil/include/nix/util/file-content-address.hh index 0922604f8..def123202 100644 --- a/src/libutil/include/nix/util/file-content-address.hh +++ b/src/libutil/include/nix/util/file-content-address.hh @@ -57,22 +57,14 @@ std::string_view renderFileSerialisationMethod(FileSerialisationMethod method); * Dump a serialization of the given file system object. */ void dumpPath( - const SourcePath & path, - Sink & sink, - FileSerialisationMethod method, - PathFilter & filter = defaultPathFilter); + const SourcePath & path, Sink & sink, FileSerialisationMethod method, PathFilter & filter = defaultPathFilter); /** * Restore a serialisation of the given file system object. * * \todo use an arbitrary `FileSystemObjectSink`. */ -void restorePath( - const Path & path, - Source & source, - FileSerialisationMethod method, - bool startFsync = false); - +void restorePath(const Path & path, Source & source, FileSerialisationMethod method, bool startFsync = false); /** * Compute the hash of the given file system object according to the @@ -85,9 +77,7 @@ void restorePath( * ``` */ HashResult hashPath( - const SourcePath & path, - FileSerialisationMethod method, HashAlgorithm ha, - PathFilter & filter = defaultPathFilter); + const SourcePath & path, FileSerialisationMethod method, HashAlgorithm ha, PathFilter & filter = defaultPathFilter); /** * An enumeration of the ways we can ingest file system @@ -153,8 +143,6 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method); * useful defined for a merkle format. */ std::pair> hashPath( - const SourcePath & path, - FileIngestionMethod method, HashAlgorithm ha, - PathFilter & filter = defaultPathFilter); + const SourcePath & path, FileIngestionMethod method, HashAlgorithm ha, PathFilter & filter = defaultPathFilter); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/file-descriptor.hh b/src/libutil/include/nix/util/file-descriptor.hh index e2bcce2a2..3dd2dd8e6 100644 --- a/src/libutil/include/nix/util/file-descriptor.hh +++ b/src/libutil/include/nix/util/file-descriptor.hh @@ -5,8 +5,8 @@ #include "nix/util/error.hh" #ifdef _WIN32 -# define WIN32_LEAN_AND_MEAN -# include +# define WIN32_LEAN_AND_MEAN +# include #endif namespace nix { @@ -93,18 +93,19 @@ void writeLine(Descriptor fd, std::string s); /** * Read a file descriptor until EOF occurs. */ -std::string drainFD(Descriptor fd, bool block = true, const size_t reserveSize=0); +std::string drainFD(Descriptor fd, bool block = true, const size_t reserveSize = 0); /** * The Windows version is always blocking. */ void drainFD( - Descriptor fd - , Sink & sink + Descriptor fd, + Sink & sink #ifndef _WIN32 - , bool block = true + , + bool block = true #endif - ); +); /** * Get [Standard Input](https://en.wikipedia.org/wiki/Standard_streams#Standard_input_(stdin)) @@ -155,10 +156,10 @@ public: AutoCloseFD(); AutoCloseFD(Descriptor fd); AutoCloseFD(const AutoCloseFD & fd) = delete; - AutoCloseFD(AutoCloseFD&& fd) noexcept; + AutoCloseFD(AutoCloseFD && fd) noexcept; ~AutoCloseFD(); - AutoCloseFD& operator =(const AutoCloseFD & fd) = delete; - AutoCloseFD& operator =(AutoCloseFD&& fd); + AutoCloseFD & operator=(const AutoCloseFD & fd) = delete; + AutoCloseFD & operator=(AutoCloseFD && fd); Descriptor get() const; explicit operator bool() const; Descriptor release(); @@ -213,4 +214,4 @@ std::wstring handleToFileName(Descriptor handle); MakeError(EndOfFile, Error); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/file-path-impl.hh b/src/libutil/include/nix/util/file-path-impl.hh index 1b4dd28f1..91c1a58cd 100644 --- a/src/libutil/include/nix/util/file-path-impl.hh +++ b/src/libutil/include/nix/util/file-path-impl.hh @@ -42,7 +42,6 @@ struct UnixPathTrait } }; - /** * Windows-style path primitives. * @@ -75,22 +74,17 @@ struct WindowsPathTrait { size_t p1 = path.find('/', from); size_t p2 = path.find(preferredSep, from); - return p1 == String::npos ? p2 : - p2 == String::npos ? p1 : - std::min(p1, p2); + return p1 == String::npos ? p2 : p2 == String::npos ? p1 : std::min(p1, p2); } static size_t rfindPathSep(StringView path, size_t from = String::npos) { size_t p1 = path.rfind('/', from); size_t p2 = path.rfind(preferredSep, from); - return p1 == String::npos ? p2 : - p2 == String::npos ? p1 : - std::max(p1, p2); + return p1 == String::npos ? p2 : p2 == String::npos ? p1 : std::max(p1, p2); } }; - template using OsPathTrait = #ifdef _WIN32 @@ -100,7 +94,6 @@ using OsPathTrait = #endif ; - /** * Core pure path canonicalization algorithm. * @@ -116,9 +109,7 @@ using OsPathTrait = * "result" points to a symlink. */ template -typename PathDict::String canonPathInner( - typename PathDict::StringView remaining, - auto && hookComponent) +typename PathDict::String canonPathInner(typename PathDict::StringView remaining, auto && hookComponent) { assert(remaining != ""); @@ -131,7 +122,8 @@ typename PathDict::String canonPathInner( while (!remaining.empty() && PathDict::isPathSep(remaining[0])) remaining.remove_prefix(1); - if (remaining.empty()) break; + if (remaining.empty()) + break; auto nextComp = ({ auto nextPathSep = PathDict::findPathSep(remaining); @@ -143,9 +135,9 @@ typename PathDict::String canonPathInner( remaining.remove_prefix(1); /* If `..', delete the last component. */ - else if (nextComp == "..") - { - if (!result.empty()) result.erase(PathDict::rfindPathSep(result)); + else if (nextComp == "..") { + if (!result.empty()) + result.erase(PathDict::rfindPathSep(result)); remaining.remove_prefix(2); } @@ -165,9 +157,9 @@ typename PathDict::String canonPathInner( } if (result.empty()) - result = typename PathDict::String { PathDict::preferredSep }; + result = typename PathDict::String{PathDict::preferredSep}; return result; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/file-path.hh b/src/libutil/include/nix/util/file-path.hh index deff076f1..25349eaf7 100644 --- a/src/libutil/include/nix/util/file-path.hh +++ b/src/libutil/include/nix/util/file-path.hh @@ -30,18 +30,27 @@ struct PathViewNG : OsStringView PathViewNG(const std::filesystem::path & path) : OsStringView{path.native()} - { } + { + } PathViewNG(const OsString & path) : OsStringView{path} - { } + { + } - const string_view & native() const { return *this; } - string_view & native() { return *this; } + const string_view & native() const + { + return *this; + } + + string_view & native() + { + return *this; + } }; std::optional maybePath(PathView path); std::filesystem::path pathNG(PathView path); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/file-system.hh b/src/libutil/include/nix/util/file-system.hh index c45cb55aa..98b992472 100644 --- a/src/libutil/include/nix/util/file-system.hh +++ b/src/libutil/include/nix/util/file-system.hh @@ -14,7 +14,7 @@ #include #include #ifdef _WIN32 -# include +# include #endif #include @@ -28,7 +28,7 @@ * @todo get rid of this, and stop using `stat` when we want `lstat` too. */ #ifndef S_ISLNK -# define S_ISLNK(m) false +# define S_ISLNK(m) false #endif namespace nix { @@ -48,19 +48,14 @@ bool isAbsolute(PathView path); * * In the process of being deprecated for `std::filesystem::absolute`. */ -Path absPath(PathView path, - std::optional dir = {}, - bool resolveSymlinks = false); +Path absPath(PathView path, std::optional dir = {}, bool resolveSymlinks = false); -inline Path absPath(const Path & path, - std::optional dir = {}, - bool resolveSymlinks = false) +inline Path absPath(const Path & path, std::optional dir = {}, bool resolveSymlinks = false) { return absPath(PathView{path}, dir, resolveSymlinks); } -std::filesystem::path absPath(const std::filesystem::path & path, - bool resolveSymlinks = false); +std::filesystem::path absPath(const std::filesystem::path & path, bool resolveSymlinks = false); /** * Canonicalise a path by removing all `.` or `..` components and @@ -176,19 +171,22 @@ enum struct FsSync { Yes, No }; */ void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No); -static inline void writeFile(const std::filesystem::path & path, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No) +static inline void +writeFile(const std::filesystem::path & path, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No) { return writeFile(path.string(), s, mode, sync); } void writeFile(const Path & path, Source & source, mode_t mode = 0666, FsSync sync = FsSync::No); -static inline void writeFile(const std::filesystem::path & path, Source & source, mode_t mode = 0666, FsSync sync = FsSync::No) +static inline void +writeFile(const std::filesystem::path & path, Source & source, mode_t mode = 0666, FsSync sync = FsSync::No) { return writeFile(path.string(), source, mode, sync); } -void writeFile(AutoCloseFD & fd, const Path & origPath, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No); +void writeFile( + AutoCloseFD & fd, const Path & origPath, std::string_view s, mode_t mode = 0666, FsSync sync = FsSync::No); /** * Flush a path's parent directory to disk. @@ -295,29 +293,41 @@ public: void reset(const std::filesystem::path & p, bool recursive = true); - const std::filesystem::path & path() const { return _path; } - PathViewNG view() const { return _path; } + const std::filesystem::path & path() const + { + return _path; + } - operator const std::filesystem::path & () const { return _path; } - operator PathViewNG () const { return _path; } + PathViewNG view() const + { + return _path; + } + + operator const std::filesystem::path &() const + { + return _path; + } + + operator PathViewNG() const + { + return _path; + } }; - struct DIRDeleter { - void operator()(DIR * dir) const { + void operator()(DIR * dir) const + { closedir(dir); } }; typedef std::unique_ptr AutoCloseDir; - /** * Create a temporary directory. */ -Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", - mode_t mode = 0755); +Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", mode_t mode = 0755); /** * Create a temporary file, returning a file handle and its path. @@ -367,59 +377,71 @@ extern PathFilter defaultPathFilter; bool chmodIfNeeded(const std::filesystem::path & path, mode_t mode, mode_t mask = S_IRWXU | S_IRWXG | S_IRWXO); /** - * @brief A directory iterator that can be used to iterate over the - * contents of a directory. It is similar to std::filesystem::directory_iterator - * but throws NixError on failure instead of std::filesystem::filesystem_error. - */ -class DirectoryIterator { + * @brief A directory iterator that can be used to iterate over the + * contents of a directory. It is similar to std::filesystem::directory_iterator + * but throws NixError on failure instead of std::filesystem::filesystem_error. + */ +class DirectoryIterator +{ public: // --- Iterator Traits --- using iterator_category = std::input_iterator_tag; - using value_type = std::filesystem::directory_entry; - using difference_type = std::ptrdiff_t; - using pointer = const std::filesystem::directory_entry*; - using reference = const std::filesystem::directory_entry&; + using value_type = std::filesystem::directory_entry; + using difference_type = std::ptrdiff_t; + using pointer = const std::filesystem::directory_entry *; + using reference = const std::filesystem::directory_entry &; // Default constructor (represents end iterator) DirectoryIterator() noexcept = default; // Constructor taking a path - explicit DirectoryIterator(const std::filesystem::path& p); + explicit DirectoryIterator(const std::filesystem::path & p); - reference operator*() const { + reference operator*() const + { // Accessing the value itself doesn't typically throw filesystem_error // after successful construction/increment, but underlying operations might. // If directory_entry methods called via -> could throw, add try-catch there. return *it_; } - pointer operator->() const { + pointer operator->() const + { return &(*it_); } - - DirectoryIterator& operator++(); + DirectoryIterator & operator++(); // Postfix increment operator - DirectoryIterator operator++(int) { + DirectoryIterator operator++(int) + { DirectoryIterator temp = *this; ++(*this); // Uses the prefix increment's try-catch logic return temp; } // Equality comparison - friend bool operator==(const DirectoryIterator& a, const DirectoryIterator& b) noexcept { + friend bool operator==(const DirectoryIterator & a, const DirectoryIterator & b) noexcept + { return a.it_ == b.it_; } // Inequality comparison - friend bool operator!=(const DirectoryIterator& a, const DirectoryIterator& b) noexcept { + friend bool operator!=(const DirectoryIterator & a, const DirectoryIterator & b) noexcept + { return !(a == b); } // Allow direct use in range-based for loops if iterating over an instance - DirectoryIterator begin() const { return *this; } - DirectoryIterator end() const { return DirectoryIterator{}; } + DirectoryIterator begin() const + { + return *this; + } + + DirectoryIterator end() const + { + return DirectoryIterator{}; + } private: @@ -432,11 +454,11 @@ class AutoUnmount Path path; bool del; public: - AutoUnmount(Path&); + AutoUnmount(Path &); AutoUnmount(); ~AutoUnmount(); void cancel(); }; #endif -} +} // namespace nix diff --git a/src/libutil/include/nix/util/finally.hh b/src/libutil/include/nix/util/finally.hh index 2b25010a1..a5656ad41 100644 --- a/src/libutil/include/nix/util/finally.hh +++ b/src/libutil/include/nix/util/finally.hh @@ -16,10 +16,15 @@ private: bool movedFrom = false; public: - Finally(Fn fun) : fun(std::move(fun)) { } + Finally(Fn fun) + : fun(std::move(fun)) + { + } + // Copying Finallys is definitely not a good idea and will cause them to be // called twice. - Finally(Finally &other) = delete; + Finally(Finally & other) = delete; + // NOTE: Move constructor can be nothrow if the callable type is itself nothrow // move-constructible. Finally(Finally && other) noexcept(std::is_nothrow_move_constructible_v) @@ -27,6 +32,7 @@ public: { other.movedFrom = true; } + ~Finally() noexcept(false) { try { diff --git a/src/libutil/include/nix/util/fmt.hh b/src/libutil/include/nix/util/fmt.hh index 5435a4ebf..f32a0b62b 100644 --- a/src/libutil/include/nix/util/fmt.hh +++ b/src/libutil/include/nix/util/fmt.hh @@ -5,7 +5,6 @@ #include #include "nix/util/ansicolor.hh" - namespace nix { /** @@ -22,10 +21,11 @@ namespace nix { */ template inline void formatHelper(F & f) -{ } +{ +} template -inline void formatHelper(F & f, const T & x, const Args & ... args) +inline void formatHelper(F & f, const T & x, const Args &... args) { // Interpolate one argument and then recurse. formatHelper(f % x, args...); @@ -36,10 +36,7 @@ inline void formatHelper(F & f, const T & x, const Args & ... args) */ inline void setExceptions(boost::format & fmt) { - fmt.exceptions( - boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); + fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit ^ boost::io::too_few_args_bit); } /** @@ -80,7 +77,7 @@ inline std::string fmt(const char * s) } template -inline std::string fmt(const std::string & fs, const Args & ... args) +inline std::string fmt(const std::string & fs, const Args &... args) { boost::format f(fs); setExceptions(f); @@ -95,14 +92,18 @@ inline std::string fmt(const std::string & fs, const Args & ... args) * either wrap the argument in `Uncolored` or add a specialization of * `HintFmt::operator%`. */ -template +template struct Magenta { - Magenta(const T &s) : value(s) {} + Magenta(const T & s) + : value(s) + { + } + const T & value; }; -template +template std::ostream & operator<<(std::ostream & out, const Magenta & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; @@ -115,14 +116,18 @@ std::ostream & operator<<(std::ostream & out, const Magenta & y) * * By default, arguments to `HintFmt` are printed in magenta (see `Magenta`). */ -template +template struct Uncolored { - Uncolored(const T & s) : value(s) {} + Uncolored(const T & s) + : value(s) + { + } + const T & value; }; -template +template std::ostream & operator<<(std::ostream & out, const Uncolored & y) { return out << ANSI_NORMAL << y.value; @@ -144,9 +149,11 @@ public: */ HintFmt(const std::string & literal) : HintFmt("%s", Uncolored(literal)) - { } + { + } - static HintFmt fromFormatString(const std::string & format) { + static HintFmt fromFormatString(const std::string & format) + { return HintFmt(boost::format(format)); } @@ -154,16 +161,18 @@ public: * Interpolate the given arguments into the format string. */ template - HintFmt(const std::string & format, const Args & ... args) + HintFmt(const std::string & format, const Args &... args) : HintFmt(boost::format(format), args...) - { } + { + } HintFmt(const HintFmt & hf) : fmt(hf.fmt) - { } + { + } template - HintFmt(boost::format && fmt, const Args & ... args) + HintFmt(boost::format && fmt, const Args &... args) : fmt(std::move(fmt)) { setExceptions(fmt); @@ -194,4 +203,4 @@ public: std::ostream & operator<<(std::ostream & os, const HintFmt & hf); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/fs-sink.hh b/src/libutil/include/nix/util/fs-sink.hh index 1c34fba93..f96fe3ef9 100644 --- a/src/libutil/include/nix/util/fs-sink.hh +++ b/src/libutil/include/nix/util/fs-sink.hh @@ -19,10 +19,9 @@ struct CreateRegularFileSink : Sink /** * An optimization. By default, do nothing. */ - virtual void preallocateContents(uint64_t size) { }; + virtual void preallocateContents(uint64_t size) {}; }; - struct FileSystemObjectSink { virtual ~FileSystemObjectSink() = default; @@ -33,9 +32,7 @@ struct FileSystemObjectSink * This function in general is no re-entrant. Only one file can be * written at a time. */ - virtual void createRegularFile( - const CanonPath & path, - std::function) = 0; + virtual void createRegularFile(const CanonPath & path, std::function) = 0; virtual void createSymlink(const CanonPath & path, const std::string & target) = 0; }; @@ -57,19 +54,18 @@ struct ExtendedFileSystemObjectSink : virtual FileSystemObjectSink * Recursively copy file system objects from the source into the sink. */ void copyRecursive( - SourceAccessor & accessor, const CanonPath & sourcePath, - FileSystemObjectSink & sink, const CanonPath & destPath); + SourceAccessor & accessor, const CanonPath & sourcePath, FileSystemObjectSink & sink, const CanonPath & destPath); /** * Ignore everything and do nothing */ struct NullFileSystemObjectSink : FileSystemObjectSink { - void createDirectory(const CanonPath & path) override { } - void createSymlink(const CanonPath & path, const std::string & target) override { } - void createRegularFile( - const CanonPath & path, - std::function) override; + void createDirectory(const CanonPath & path) override {} + + void createSymlink(const CanonPath & path, const std::string & target) override {} + + void createRegularFile(const CanonPath & path, std::function) override; }; /** @@ -82,13 +78,12 @@ struct RestoreSink : FileSystemObjectSink explicit RestoreSink(bool startFsync) : startFsync{startFsync} - { } + { + } void createDirectory(const CanonPath & path) override; - void createRegularFile( - const CanonPath & path, - std::function) override; + void createRegularFile(const CanonPath & path, std::function) override; void createSymlink(const CanonPath & path, const std::string & target) override; }; @@ -103,7 +98,10 @@ struct RegularFileSink : FileSystemObjectSink bool regular = true; Sink & sink; - RegularFileSink(Sink & sink) : sink(sink) { } + RegularFileSink(Sink & sink) + : sink(sink) + { + } void createDirectory(const CanonPath & path) override { @@ -115,9 +113,7 @@ struct RegularFileSink : FileSystemObjectSink regular = false; } - void createRegularFile( - const CanonPath & path, - std::function) override; + void createRegularFile(const CanonPath & path, std::function) override; }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/git.hh b/src/libutil/include/nix/util/git.hh index 9bdb30bb9..97008c53a 100644 --- a/src/libutil/include/nix/util/git.hh +++ b/src/libutil/include/nix/util/git.hh @@ -16,8 +16,8 @@ namespace nix::git { enum struct ObjectType { Blob, Tree, - //Commit, - //Tag, + // Commit, + // Tag, }; using RawMode = uint32_t; @@ -39,8 +39,8 @@ struct TreeEntry Mode mode; Hash hash; - bool operator ==(const TreeEntry &) const = default; - auto operator <=>(const TreeEntry &) const = default; + bool operator==(const TreeEntry &) const = default; + auto operator<=>(const TreeEntry &) const = default; }; /** @@ -72,9 +72,8 @@ using SinkHook = void(const CanonPath & name, TreeEntry entry); * * @throws if prefix not recognized */ -ObjectType parseObjectType( - Source & source, - const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); +ObjectType +parseObjectType(Source & source, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * These 3 modes are represented by blob objects. @@ -82,21 +81,22 @@ ObjectType parseObjectType( * Sometimes we need this information to disambiguate how a blob is * being used to better match our own "file system object" data model. */ -enum struct BlobMode : RawMode -{ +enum struct BlobMode : RawMode { Regular = static_cast(Mode::Regular), Executable = static_cast(Mode::Executable), Symlink = static_cast(Mode::Symlink), }; void parseBlob( - FileSystemObjectSink & sink, const CanonPath & sinkPath, + FileSystemObjectSink & sink, + const CanonPath & sinkPath, Source & source, BlobMode blobMode, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); void parseTree( - FileSystemObjectSink & sink, const CanonPath & sinkPath, + FileSystemObjectSink & sink, + const CanonPath & sinkPath, Source & source, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -109,7 +109,8 @@ void parseTree( * a blob, this is ignored. */ void parse( - FileSystemObjectSink & sink, const CanonPath & sinkPath, + FileSystemObjectSink & sink, + const CanonPath & sinkPath, Source & source, BlobMode rootModeIfBlob, std::function hook, @@ -139,15 +140,13 @@ void restore(FileSystemObjectSink & sink, Source & source, std::function reference; @@ -211,4 +205,4 @@ struct LsRemoteRefLine { */ std::optional parseLsRemoteLine(std::string_view line); -} +} // namespace nix::git diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index 715537456..4237d7660 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -8,10 +8,8 @@ namespace nix { - MakeError(BadHash, Error); - enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512, BLAKE3 }; const int blake3HashSize = 32; @@ -89,12 +87,12 @@ public: /** * Check whether two hashes are equal. */ - bool operator == (const Hash & h2) const noexcept; + bool operator==(const Hash & h2) const noexcept; /** * Compare how two hashes are ordered. */ - std::strong_ordering operator <=> (const Hash & h2) const noexcept; + std::strong_ordering operator<=>(const Hash & h2) const noexcept; /** * Returns the length of a base-16 representation of this hash. @@ -158,7 +156,8 @@ std::string printHash16or32(const Hash & hash); /** * Compute the hash of the given string. */ -Hash hashString(HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); +Hash hashString( + HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * Compute the hash of the given file, hashing its contents directly. @@ -210,7 +209,6 @@ std::optional parseHashAlgoOpt(std::string_view s); */ std::string_view printHashAlgo(HashAlgorithm ha); - union Ctx; struct AbstractHashSink : virtual Sink @@ -234,5 +232,4 @@ public: HashResult currentHash(); }; - -} +} // namespace nix diff --git a/src/libutil/include/nix/util/hilite.hh b/src/libutil/include/nix/util/hilite.hh index 2d5cf7c6f..ee9985f39 100644 --- a/src/libutil/include/nix/util/hilite.hh +++ b/src/libutil/include/nix/util/hilite.hh @@ -14,10 +14,7 @@ namespace nix { * If some matches overlap, then their union will be wrapped rather * than the individual matches. */ -std::string hiliteMatches( - std::string_view s, - std::vector matches, - std::string_view prefix, - std::string_view postfix); +std::string +hiliteMatches(std::string_view s, std::vector matches, std::string_view prefix, std::string_view postfix); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/json-impls.hh b/src/libutil/include/nix/util/json-impls.hh index 9dd344c50..8a6198313 100644 --- a/src/libutil/include/nix/util/json-impls.hh +++ b/src/libutil/include/nix/util/json-impls.hh @@ -4,12 +4,13 @@ #include // Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types -#define JSON_IMPL(TYPE) \ - namespace nlohmann { \ - using namespace nix; \ - template <> \ - struct adl_serializer { \ - static TYPE from_json(const json & json); \ - static void to_json(json & json, TYPE t); \ - }; \ +#define JSON_IMPL(TYPE) \ + namespace nlohmann { \ + using namespace nix; \ + template<> \ + struct adl_serializer \ + { \ + static TYPE from_json(const json & json); \ + static void to_json(json & json, TYPE t); \ + }; \ } diff --git a/src/libutil/include/nix/util/json-utils.hh b/src/libutil/include/nix/util/json-utils.hh index 37f4d58f8..20c50f957 100644 --- a/src/libutil/include/nix/util/json-utils.hh +++ b/src/libutil/include/nix/util/json-utils.hh @@ -21,9 +21,7 @@ nlohmann::json * get(nlohmann::json & map, const std::string & key); * * Use instead of nlohmann::json::at() to avoid ugly exceptions. */ -const nlohmann::json & valueAt( - const nlohmann::json::object_t & map, - const std::string & key); +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key); std::optional optionalValueAt(const nlohmann::json::object_t & value, const std::string & key); std::optional nullableValueAt(const nlohmann::json::object_t & value, const std::string & key); @@ -73,36 +71,45 @@ struct json_avoids_null; * Handle numbers in default impl */ template -struct json_avoids_null : std::bool_constant::value> {}; +struct json_avoids_null : std::bool_constant::value> +{}; template<> -struct json_avoids_null : std::false_type {}; +struct json_avoids_null : std::false_type +{}; template<> -struct json_avoids_null : std::true_type {}; +struct json_avoids_null : std::true_type +{}; template<> -struct json_avoids_null : std::true_type {}; +struct json_avoids_null : std::true_type +{}; template -struct json_avoids_null> : std::true_type {}; +struct json_avoids_null> : std::true_type +{}; template -struct json_avoids_null> : std::true_type {}; +struct json_avoids_null> : std::true_type +{}; template -struct json_avoids_null> : std::true_type {}; +struct json_avoids_null> : std::true_type +{}; template -struct json_avoids_null> : std::true_type {}; +struct json_avoids_null> : std::true_type +{}; /** * `ExperimentalFeature` is always rendered as a string. */ template<> -struct json_avoids_null : std::true_type {}; +struct json_avoids_null : std::true_type +{}; -} +} // namespace nix namespace nlohmann { @@ -123,12 +130,8 @@ struct adl_serializer> */ static void from_json(const json & json, std::optional & t) { - static_assert( - nix::json_avoids_null::value, - "null is already in use for underlying type's JSON"); - t = json.is_null() - ? std::nullopt - : std::make_optional(json.template get()); + static_assert(nix::json_avoids_null::value, "null is already in use for underlying type's JSON"); + t = json.is_null() ? std::nullopt : std::make_optional(json.template get()); } /** @@ -137,9 +140,7 @@ struct adl_serializer> */ static void to_json(json & json, const std::optional & t) { - static_assert( - nix::json_avoids_null::value, - "null is already in use for underlying type's JSON"); + static_assert(nix::json_avoids_null::value, "null is already in use for underlying type's JSON"); if (t) json = *t; else @@ -147,4 +148,4 @@ struct adl_serializer> } }; -} +} // namespace nlohmann diff --git a/src/libutil/include/nix/util/logging.hh b/src/libutil/include/nix/util/logging.hh index dabfac483..500d443e6 100644 --- a/src/libutil/include/nix/util/logging.hh +++ b/src/libutil/include/nix/util/logging.hh @@ -46,14 +46,18 @@ typedef uint64_t ActivityId; struct LoggerSettings : Config { Setting showTrace{ - this, false, "show-trace", + this, + false, + "show-trace", R"( Whether Nix should print out a stack trace in case of Nix expression evaluation errors. )"}; Setting jsonLogPath{ - this, "", "json-log-path", + this, + "", + "json-log-path", R"( A file or unix socket to which JSON records of Nix's log output are written, in the same format as `--log-format internal-json` @@ -75,23 +79,40 @@ public: { // FIXME: use std::variant. enum { tInt = 0, tString = 1 } type; + uint64_t i = 0; std::string s; - Field(const std::string & s) : type(tString), s(s) { } - Field(const char * s) : type(tString), s(s) { } - Field(const uint64_t & i) : type(tInt), i(i) { } + + Field(const std::string & s) + : type(tString) + , s(s) + { + } + + Field(const char * s) + : type(tString) + , s(s) + { + } + + Field(const uint64_t & i) + : type(tInt) + , i(i) + { + } }; typedef std::vector Fields; - virtual ~Logger() { } + virtual ~Logger() {} - virtual void stop() { }; + virtual void stop() {}; /** * Guard object to resume the logger when done. */ - struct Suspension { + struct Suspension + { Finally> _finalize; }; @@ -99,11 +120,14 @@ public: std::optional suspendIf(bool cond); - virtual void pause() { }; - virtual void resume() { }; + virtual void pause() {}; + virtual void resume() {}; // Whether the logger prints the whole build log - virtual bool isVerbose() { return false; } + virtual bool isVerbose() + { + return false; + } virtual void log(Verbosity lvl, std::string_view s) = 0; @@ -122,26 +146,32 @@ public: virtual void warn(const std::string & msg); - virtual void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) { }; + virtual void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) {}; - virtual void stopActivity(ActivityId act) { }; + virtual void stopActivity(ActivityId act) {}; - virtual void result(ActivityId act, ResultType type, const Fields & fields) { }; + virtual void result(ActivityId act, ResultType type, const Fields & fields) {}; virtual void writeToStdout(std::string_view s); template - inline void cout(const Args & ... args) + inline void cout(const Args &... args) { writeToStdout(fmt(args...)); } virtual std::optional ask(std::string_view s) - { return {}; } + { + return {}; + } - virtual void setPrintBuildLogs(bool printBuildLogs) - { } + virtual void setPrintBuildLogs(bool printBuildLogs) {} }; /** @@ -151,8 +181,10 @@ public: */ struct nop { - template nop(T...) - { } + template + nop(T...) + { + } }; ActivityId getCurActivity(); @@ -164,25 +196,34 @@ struct Activity const ActivityId id; - Activity(Logger & logger, Verbosity lvl, ActivityType type, const std::string & s = "", - const Logger::Fields & fields = {}, ActivityId parent = getCurActivity()); + Activity( + Logger & logger, + Verbosity lvl, + ActivityType type, + const std::string & s = "", + const Logger::Fields & fields = {}, + ActivityId parent = getCurActivity()); - Activity(Logger & logger, ActivityType type, - const Logger::Fields & fields = {}, ActivityId parent = getCurActivity()) - : Activity(logger, lvlError, type, "", fields, parent) { }; + Activity( + Logger & logger, ActivityType type, const Logger::Fields & fields = {}, ActivityId parent = getCurActivity()) + : Activity(logger, lvlError, type, "", fields, parent) {}; Activity(const Activity & act) = delete; ~Activity(); void progress(uint64_t done = 0, uint64_t expected = 0, uint64_t running = 0, uint64_t failed = 0) const - { result(resProgress, done, expected, running, failed); } + { + result(resProgress, done, expected, running, failed); + } void setExpected(ActivityType type2, uint64_t expected) const - { result(resSetExpected, type2, expected); } + { + result(resSetExpected, type2, expected); + } template - void result(ResultType type, const Args & ... args) const + void result(ResultType type, const Args &... args) const { Logger::Fields fields; nop{(fields.emplace_back(Logger::Field(args)), 1)...}; @@ -200,8 +241,17 @@ struct Activity struct PushActivity { const ActivityId prevAct; - PushActivity(ActivityId act) : prevAct(getCurActivity()) { setCurActivity(act); } - ~PushActivity() { setCurActivity(prevAct); } + + PushActivity(ActivityId act) + : prevAct(getCurActivity()) + { + setCurActivity(act); + } + + ~PushActivity() + { + setCurActivity(prevAct); + } }; extern std::unique_ptr logger; @@ -213,9 +263,8 @@ std::unique_ptr makeSimpleLogger(bool printBuildLogs = true); * list of loggers in `extraLoggers`. Only `mainLogger` is used for * writing to stdout and getting user input. */ -std::unique_ptr makeTeeLogger( - std::unique_ptr mainLogger, - std::vector> && extraLoggers); +std::unique_ptr +makeTeeLogger(std::unique_ptr mainLogger, std::vector> && extraLoggers); std::unique_ptr makeJSONLogger(Descriptor fd, bool includeNixPrefix = true); @@ -231,16 +280,20 @@ std::optional parseJSONMessage(const std::string & msg, std::str /** * @param source A noun phrase describing the source of the message, e.g. "the builder". */ -bool handleJSONLogMessage(nlohmann::json & json, - const Activity & act, std::map & activities, +bool handleJSONLogMessage( + nlohmann::json & json, + const Activity & act, + std::map & activities, std::string_view source, bool trusted); /** * @param source A noun phrase describing the source of the message, e.g. "the builder". */ -bool handleJSONLogMessage(const std::string & msg, - const Activity & act, std::map & activities, +bool handleJSONLogMessage( + const std::string & msg, + const Activity & act, + std::map & activities, std::string_view source, bool trusted); @@ -255,11 +308,11 @@ extern Verbosity verbosity; * intervention or that need more explanation. Use the 'print' macros for more * lightweight status messages. */ -#define logErrorInfo(level, errorInfo...) \ - do { \ - if ((level) <= nix::verbosity) { \ - logger->logEI((level), errorInfo); \ - } \ +#define logErrorInfo(level, errorInfo...) \ + do { \ + if ((level) <= nix::verbosity) { \ + logger->logEI((level), errorInfo); \ + } \ } while (0) #define logError(errorInfo...) logErrorInfo(lvlError, errorInfo) @@ -271,11 +324,11 @@ extern Verbosity verbosity; * arguments are evaluated lazily. */ #define printMsgUsing(loggerParam, level, args...) \ - do { \ - auto __lvl = level; \ - if (__lvl <= nix::verbosity) { \ - loggerParam->log(__lvl, fmt(args)); \ - } \ + do { \ + auto __lvl = level; \ + if (__lvl <= nix::verbosity) { \ + loggerParam->log(__lvl, fmt(args)); \ + } \ } while (0) #define printMsg(level, args...) printMsgUsing(logger, level, args) @@ -290,7 +343,7 @@ extern Verbosity verbosity; * if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix. */ template -inline void warn(const std::string & fs, const Args & ... args) +inline void warn(const std::string & fs, const Args &... args) { boost::format f(fs); formatHelper(f, args...); @@ -305,4 +358,4 @@ inline void warn(const std::string & fs, const Args & ... args) void writeToStderr(std::string_view s); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/lru-cache.hh b/src/libutil/include/nix/util/lru-cache.hh index 0834a8e74..23cfa91e1 100644 --- a/src/libutil/include/nix/util/lru-cache.hh +++ b/src/libutil/include/nix/util/lru-cache.hh @@ -141,4 +141,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/memory-source-accessor.hh b/src/libutil/include/nix/util/memory-source-accessor.hh index d09ba153d..a04d1d347 100644 --- a/src/libutil/include/nix/util/memory-source-accessor.hh +++ b/src/libutil/include/nix/util/memory-source-accessor.hh @@ -14,33 +14,37 @@ struct MemorySourceAccessor : virtual SourceAccessor * `MemorySourceAccessor`, this has a side benefit of nicely * defining what a "file system object" is in Nix. */ - struct File { - bool operator == (const File &) const noexcept; - std::strong_ordering operator <=> (const File &) const noexcept; + struct File + { + bool operator==(const File &) const noexcept; + std::strong_ordering operator<=>(const File &) const noexcept; - struct Regular { + struct Regular + { bool executable = false; std::string contents; - bool operator == (const Regular &) const = default; - auto operator <=> (const Regular &) const = default; + bool operator==(const Regular &) const = default; + auto operator<=>(const Regular &) const = default; }; - struct Directory { + struct Directory + { using Name = std::string; std::map> contents; - bool operator == (const Directory &) const noexcept; + bool operator==(const Directory &) const noexcept; // TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet. - bool operator < (const Directory &) const noexcept; + bool operator<(const Directory &) const noexcept; }; - struct Symlink { + struct Symlink + { std::string target; - bool operator == (const Symlink &) const = default; - auto operator <=> (const Symlink &) const = default; + bool operator==(const Symlink &) const = default; + auto operator<=>(const Symlink &) const = default; }; using Raw = std::variant; @@ -51,10 +55,12 @@ struct MemorySourceAccessor : virtual SourceAccessor Stat lstat() const; }; - File root { File::Directory {} }; + File root{File::Directory{}}; - bool operator == (const MemorySourceAccessor &) const noexcept = default; - bool operator < (const MemorySourceAccessor & other) const noexcept { + bool operator==(const MemorySourceAccessor &) const noexcept = default; + + bool operator<(const MemorySourceAccessor & other) const noexcept + { return root < other.root; } @@ -80,19 +86,18 @@ struct MemorySourceAccessor : virtual SourceAccessor SourcePath addFile(CanonPath path, std::string && contents); }; - -inline bool MemorySourceAccessor::File::Directory::operator == ( +inline bool MemorySourceAccessor::File::Directory::operator==( const MemorySourceAccessor::File::Directory &) const noexcept = default; -inline bool MemorySourceAccessor::File::Directory::operator < ( - const MemorySourceAccessor::File::Directory & other) const noexcept + +inline bool +MemorySourceAccessor::File::Directory::operator<(const MemorySourceAccessor::File::Directory & other) const noexcept { return contents < other.contents; } -inline bool MemorySourceAccessor::File::operator == ( - const MemorySourceAccessor::File &) const noexcept = default; -inline std::strong_ordering MemorySourceAccessor::File::operator <=> ( - const MemorySourceAccessor::File &) const noexcept = default; +inline bool MemorySourceAccessor::File::operator==(const MemorySourceAccessor::File &) const noexcept = default; +inline std::strong_ordering +MemorySourceAccessor::File::operator<=>(const MemorySourceAccessor::File &) const noexcept = default; /** * Write to a `MemorySourceAccessor` at the given path @@ -101,15 +106,16 @@ struct MemorySink : FileSystemObjectSink { MemorySourceAccessor & dst; - MemorySink(MemorySourceAccessor & dst) : dst(dst) { } + MemorySink(MemorySourceAccessor & dst) + : dst(dst) + { + } void createDirectory(const CanonPath & path) override; - void createRegularFile( - const CanonPath & path, - std::function) override; + void createRegularFile(const CanonPath & path, std::function) override; void createSymlink(const CanonPath & path, const std::string & target) override; }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/muxable-pipe.hh b/src/libutil/include/nix/util/muxable-pipe.hh index d912627fb..f15c8e5f8 100644 --- a/src/libutil/include/nix/util/muxable-pipe.hh +++ b/src/libutil/include/nix/util/muxable-pipe.hh @@ -79,4 +79,4 @@ struct MuxablePipePollState std::function handleEOF); }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/os-string.hh b/src/libutil/include/nix/util/os-string.hh index 3e24763fb..f0cbcbaba 100644 --- a/src/libutil/include/nix/util/os-string.hh +++ b/src/libutil/include/nix/util/os-string.hh @@ -49,4 +49,4 @@ OsString string_to_os_string(std::string_view s); # define OS_STR(s) L##s #endif -} +} // namespace nix diff --git a/src/libutil/include/nix/util/pool.hh b/src/libutil/include/nix/util/pool.hh index a63db50de..a9091c2de 100644 --- a/src/libutil/include/nix/util/pool.hh +++ b/src/libutil/include/nix/util/pool.hh @@ -29,7 +29,7 @@ namespace nix { * Here, the Connection object referenced by ‘conn’ is automatically * returned to the pool when ‘conn’ goes out of scope. */ -template +template class Pool { public: @@ -63,7 +63,8 @@ private: public: - Pool(size_t max = std::numeric_limits::max(), + Pool( + size_t max = std::numeric_limits::max(), const Factory & factory = []() { return make_ref(); }, const Validator & validator = [](ref r) { return true; }) : factory(factory) @@ -106,7 +107,11 @@ public: friend Pool; - Handle(Pool & pool, std::shared_ptr r) : pool(pool), r(r) { } + Handle(Pool & pool, std::shared_ptr r) + : pool(pool) + , r(r) + { + } public: // NOTE: Copying std::shared_ptr and calling a .reset() on it is always noexcept. @@ -123,7 +128,8 @@ public: ~Handle() { - if (!r) return; + if (!r) + return; { auto state_(pool.state.lock()); if (!bad) @@ -134,10 +140,20 @@ public: pool.wakeup.notify_one(); } - R * operator -> () { return &*r; } - R & operator * () { return *r; } + R * operator->() + { + return &*r; + } - void markBad() { bad = true; } + R & operator*() + { + return *r; + } + + void markBad() + { + bad = true; + } }; Handle get() @@ -197,4 +213,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/pos-idx.hh b/src/libutil/include/nix/util/pos-idx.hh index 0bf59301a..8e668176c 100644 --- a/src/libutil/include/nix/util/pos-idx.hh +++ b/src/libutil/include/nix/util/pos-idx.hh @@ -49,7 +49,7 @@ public: inline PosIdx noPos = {}; -} +} // namespace nix namespace std { diff --git a/src/libutil/include/nix/util/pos-table.hh b/src/libutil/include/nix/util/pos-table.hh index f64466c21..d944b1353 100644 --- a/src/libutil/include/nix/util/pos-table.hh +++ b/src/libutil/include/nix/util/pos-table.hh @@ -113,4 +113,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/position.hh b/src/libutil/include/nix/util/position.hh index 34cf86392..48f381439 100644 --- a/src/libutil/include/nix/util/position.hh +++ b/src/libutil/include/nix/util/position.hh @@ -21,30 +21,53 @@ struct Pos uint32_t line = 0; uint32_t column = 0; - struct Stdin { + struct Stdin + { ref source; + bool operator==(const Stdin & rhs) const noexcept - { return *source == *rhs.source; } + { + return *source == *rhs.source; + } + std::strong_ordering operator<=>(const Stdin & rhs) const noexcept - { return *source <=> *rhs.source; } + { + return *source <=> *rhs.source; + } }; - struct String { + + struct String + { ref source; + bool operator==(const String & rhs) const noexcept - { return *source == *rhs.source; } + { + return *source == *rhs.source; + } + std::strong_ordering operator<=>(const String & rhs) const noexcept - { return *source <=> *rhs.source; } + { + return *source <=> *rhs.source; + } }; typedef std::variant Origin; Origin origin = std::monostate(); - Pos() { } - Pos(uint32_t line, uint32_t column, Origin origin) - : line(line), column(column), origin(origin) { } + Pos() {} - explicit operator bool() const { return line > 0; } + Pos(uint32_t line, uint32_t column, Origin origin) + : line(line) + , column(column) + , origin(origin) + { + } + + explicit operator bool() const + { + return line > 0; + } operator std::shared_ptr() const; @@ -67,39 +90,60 @@ struct Pos */ std::optional getSourcePath() const; - struct LinesIterator { + struct LinesIterator + { using difference_type = size_t; using value_type = std::string_view; using reference = const std::string_view &; using pointer = const std::string_view *; using iterator_category = std::input_iterator_tag; - LinesIterator(): pastEnd(true) {} - explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) { + LinesIterator() + : pastEnd(true) + { + } + + explicit LinesIterator(std::string_view input) + : input(input) + , pastEnd(input.empty()) + { if (!pastEnd) bump(true); } - LinesIterator & operator++() { + LinesIterator & operator++() + { bump(false); return *this; } - LinesIterator operator++(int) { + + LinesIterator operator++(int) + { auto result = *this; ++*this; return result; } - reference operator*() const { return curLine; } - pointer operator->() const { return &curLine; } + reference operator*() const + { + return curLine; + } - bool operator!=(const LinesIterator & other) const { + pointer operator->() const + { + return &curLine; + } + + bool operator!=(const LinesIterator & other) const + { return !(*this == other); } - bool operator==(const LinesIterator & other) const { + + bool operator==(const LinesIterator & other) const + { return (pastEnd && other.pastEnd) - || (std::forward_as_tuple(input.size(), input.data()) - == std::forward_as_tuple(other.input.size(), other.input.data())); + || (std::forward_as_tuple(input.size(), input.data()) + == std::forward_as_tuple(other.input.size(), other.input.data())); } private: @@ -112,4 +156,4 @@ struct Pos std::ostream & operator<<(std::ostream & str, const Pos & pos); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/posix-source-accessor.hh b/src/libutil/include/nix/util/posix-source-accessor.hh index ea65b148f..895e2e1c1 100644 --- a/src/libutil/include/nix/util/posix-source-accessor.hh +++ b/src/libutil/include/nix/util/posix-source-accessor.hh @@ -27,10 +27,7 @@ struct PosixSourceAccessor : virtual SourceAccessor */ time_t mtime = 0; - void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) override; + void readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) override; bool pathExists(const CanonPath & path) override; @@ -81,4 +78,4 @@ private: std::filesystem::path makeAbsPath(const CanonPath & path); }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/processes.hh b/src/libutil/include/nix/util/processes.hh index ab5f23e49..23dee8713 100644 --- a/src/libutil/include/nix/util/processes.hh +++ b/src/libutil/include/nix/util/processes.hh @@ -37,11 +37,11 @@ public: Pid(); #ifndef _WIN32 Pid(pid_t pid); - void operator =(pid_t pid); + void operator=(pid_t pid); operator pid_t(); #else Pid(AutoCloseFD pid); - void operator =(AutoCloseFD pid); + void operator=(AutoCloseFD pid); #endif ~Pid(); int kill(); @@ -55,7 +55,6 @@ public: #endif }; - #ifndef _WIN32 /** * Kill all processes running under the specified uid by sending them @@ -64,7 +63,6 @@ public: void killUser(uid_t uid); #endif - /** * Fork a process that runs the given function, and return the child * pid to the caller. @@ -89,9 +87,12 @@ pid_t startProcess(std::function fun, const ProcessOptions & options = P * Run a program and return its stdout in a string (i.e., like the * shell backtick operator). */ -std::string runProgram(Path program, bool lookupPath = false, +std::string runProgram( + Path program, + bool lookupPath = false, const Strings & args = Strings(), - const std::optional & input = {}, bool isInteractive = false); + const std::optional & input = {}, + bool isInteractive = false); struct RunOptions { @@ -115,16 +116,17 @@ std::pair runProgram(RunOptions && options); void runProgram2(const RunOptions & options); - class ExecError : public Error { public: int status; template - ExecError(int status, const Args & ... args) - : Error(args...), status(status) - { } + ExecError(int status, const Args &... args) + : Error(args...) + , status(status) + { + } }; /** @@ -135,4 +137,4 @@ std::string statusToString(int status); bool statusOk(int status); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/ref.hh b/src/libutil/include/nix/util/ref.hh index 92688bf1e..fb27949c0 100644 --- a/src/libutil/include/nix/util/ref.hh +++ b/src/libutil/include/nix/util/ref.hh @@ -32,17 +32,17 @@ public: throw std::invalid_argument("null pointer cast to ref"); } - T* operator ->() const + T * operator->() const { return &*p; } - T& operator *() const + T & operator*() const { return *p; } - operator std::shared_ptr () const + operator std::shared_ptr() const { return p; } @@ -65,22 +65,22 @@ public: } template - operator ref () const + operator ref() const { return ref((std::shared_ptr) p); } - bool operator == (const ref & other) const + bool operator==(const ref & other) const { return p == other.p; } - bool operator != (const ref & other) const + bool operator!=(const ref & other) const { return p != other.p; } - auto operator <=> (const ref & other) const + auto operator<=>(const ref & other) const { return p <=> other.p; } @@ -88,17 +88,14 @@ public: private: template - friend ref - make_ref(Args&&... args); - + friend ref make_ref(Args &&... args); }; template -inline ref -make_ref(Args&&... args) +inline ref make_ref(Args &&... args) { auto p = std::make_shared(std::forward(args)...); return ref(p); } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/references.hh b/src/libutil/include/nix/util/references.hh index 89a42e009..1d5648075 100644 --- a/src/libutil/include/nix/util/references.hh +++ b/src/libutil/include/nix/util/references.hh @@ -14,13 +14,17 @@ class RefScanSink : public Sink public: - RefScanSink(StringSet && hashes) : hashes(hashes) - { } + RefScanSink(StringSet && hashes) + : hashes(hashes) + { + } StringSet & getResult() - { return seen; } + { + return seen; + } - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; }; struct RewritingSink : Sink @@ -36,7 +40,7 @@ struct RewritingSink : Sink RewritingSink(const std::string & from, const std::string & to, Sink & nextSink); RewritingSink(const StringMap & rewrites, Sink & nextSink); - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; void flush(); }; @@ -48,9 +52,9 @@ struct HashModuloSink : AbstractHashSink HashModuloSink(HashAlgorithm ha, const std::string & modulus); - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; HashResult finish() override; }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/regex-combinators.hh b/src/libutil/include/nix/util/regex-combinators.hh index 75ccd4e6c..c86ad8204 100644 --- a/src/libutil/include/nix/util/regex-combinators.hh +++ b/src/libutil/include/nix/util/regex-combinators.hh @@ -31,4 +31,4 @@ static inline std::string list(std::string_view a) return ss.str(); } -} +} // namespace nix::regex diff --git a/src/libutil/include/nix/util/repair-flag.hh b/src/libutil/include/nix/util/repair-flag.hh index f412d6a20..ad59108f6 100644 --- a/src/libutil/include/nix/util/repair-flag.hh +++ b/src/libutil/include/nix/util/repair-flag.hh @@ -1,4 +1,5 @@ #pragma once + ///@file namespace nix { diff --git a/src/libutil/include/nix/util/serialise.hh b/src/libutil/include/nix/util/serialise.hh index 97fdddae3..16e0d0fa5 100644 --- a/src/libutil/include/nix/util/serialise.hh +++ b/src/libutil/include/nix/util/serialise.hh @@ -8,19 +8,25 @@ #include "nix/util/util.hh" #include "nix/util/file-descriptor.hh" -namespace boost::context { struct stack_context; } +namespace boost::context { +struct stack_context; +} namespace nix { - /** * Abstract destination of binary data. */ struct Sink { - virtual ~Sink() { } - virtual void operator () (std::string_view data) = 0; - virtual bool good() { return true; } + virtual ~Sink() {} + + virtual void operator()(std::string_view data) = 0; + + virtual bool good() + { + return true; + } }; /** @@ -28,17 +34,14 @@ struct Sink */ struct NullSink : Sink { - void operator () (std::string_view data) override - { } + void operator()(std::string_view data) override {} }; - struct FinishSink : virtual Sink { virtual void finish() = 0; }; - /** * A buffered abstract sink. Warning: a BufferedSink should not be * used from multiple threads concurrently. @@ -49,9 +52,13 @@ struct BufferedSink : virtual Sink std::unique_ptr buffer; BufferedSink(size_t bufSize = 32 * 1024) - : bufSize(bufSize), bufPos(0), buffer(nullptr) { } + : bufSize(bufSize) + , bufPos(0) + , buffer(nullptr) + { + } - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; void flush(); @@ -60,21 +67,20 @@ protected: virtual void writeUnbuffered(std::string_view data) = 0; }; - /** * Abstract source of binary data. */ struct Source { - virtual ~Source() { } + virtual ~Source() {} /** * Store exactly ‘len’ bytes in the buffer pointed to by ‘data’. * It blocks until all the requested data is available, or throws * an error if it is not going to be available. */ - void operator () (char * data, size_t len); - void operator () (std::string_view data); + void operator()(char * data, size_t len); + void operator()(std::string_view data); /** * Store up to ‘len’ in the buffer pointed to by ‘data’, and @@ -83,14 +89,16 @@ struct Source */ virtual size_t read(char * data, size_t len) = 0; - virtual bool good() { return true; } + virtual bool good() + { + return true; + } void drainInto(Sink & sink); std::string drain(); }; - /** * A buffered abstract source. Warning: a BufferedSource should not be * used from multiple threads concurrently. @@ -101,7 +109,12 @@ struct BufferedSource : Source std::unique_ptr buffer; BufferedSource(size_t bufSize = 32 * 1024) - : bufSize(bufSize), bufPosIn(0), bufPosOut(0), buffer(nullptr) { } + : bufSize(bufSize) + , bufPosIn(0) + , bufPosOut(0) + , buffer(nullptr) + { + } size_t read(char * data, size_t len) override; @@ -117,7 +130,6 @@ protected: virtual size_t readUnbuffered(char * data, size_t len) = 0; }; - /** * A sink that writes data to a file descriptor. */ @@ -126,9 +138,17 @@ struct FdSink : BufferedSink Descriptor fd; size_t written = 0; - FdSink() : fd(INVALID_DESCRIPTOR) { } - FdSink(Descriptor fd) : fd(fd) { } - FdSink(FdSink&&) = default; + FdSink() + : fd(INVALID_DESCRIPTOR) + { + } + + FdSink(Descriptor fd) + : fd(fd) + { + } + + FdSink(FdSink &&) = default; FdSink & operator=(FdSink && s) { @@ -149,7 +169,6 @@ private: bool _good = true; }; - /** * A source that reads data from a file descriptor. */ @@ -159,8 +178,16 @@ struct FdSource : BufferedSource size_t read = 0; BackedStringView endOfFileError{"unexpected end-of-file"}; - FdSource() : fd(INVALID_DESCRIPTOR) { } - FdSource(Descriptor fd) : fd(fd) { } + FdSource() + : fd(INVALID_DESCRIPTOR) + { + } + + FdSource(Descriptor fd) + : fd(fd) + { + } + FdSource(FdSource &&) = default; FdSource & operator=(FdSource && s) = default; @@ -179,22 +206,24 @@ private: bool _good = true; }; - /** * A sink that writes data to a string. */ struct StringSink : Sink { std::string s; - StringSink() { } + + StringSink() {} + explicit StringSink(const size_t reservedSize) { - s.reserve(reservedSize); + s.reserve(reservedSize); }; - StringSink(std::string && s) : s(std::move(s)) { }; - void operator () (std::string_view data) override; -}; + StringSink(std::string && s) + : s(std::move(s)) {}; + void operator()(std::string_view data) override; +}; /** * A source that reads data from a string. @@ -208,28 +237,41 @@ struct StringSource : Source // from std::string -> std::string_view occurs when the string is passed // by rvalue. StringSource(std::string &&) = delete; - StringSource(std::string_view s) : s(s), pos(0) { } - StringSource(const std::string& str): StringSource(std::string_view(str)) {} + + StringSource(std::string_view s) + : s(s) + , pos(0) + { + } + + StringSource(const std::string & str) + : StringSource(std::string_view(str)) + { + } size_t read(char * data, size_t len) override; }; - /** * A sink that writes all incoming data to two other sinks. */ struct TeeSink : Sink { - Sink & sink1, & sink2; - TeeSink(Sink & sink1, Sink & sink2) : sink1(sink1), sink2(sink2) { } - virtual void operator () (std::string_view data) override + Sink &sink1, &sink2; + + TeeSink(Sink & sink1, Sink & sink2) + : sink1(sink1) + , sink2(sink2) + { + } + + virtual void operator()(std::string_view data) override { sink1(data); sink2(data); } }; - /** * Adapter class of a Source that saves all data read to a sink. */ @@ -237,8 +279,13 @@ struct TeeSource : Source { Source & orig; Sink & sink; + TeeSource(Source & orig, Sink & sink) - : orig(orig), sink(sink) { } + : orig(orig) + , sink(sink) + { + } + size_t read(char * data, size_t len) override { size_t n = orig.read(data, len); @@ -254,8 +301,13 @@ struct SizedSource : Source { Source & orig; size_t remain; + SizedSource(Source & orig, size_t size) - : orig(orig), remain(size) { } + : orig(orig) + , remain(size) + { + } + size_t read(char * data, size_t len) override { if (this->remain <= 0) { @@ -289,7 +341,7 @@ struct LengthSink : Sink { uint64_t length = 0; - void operator () (std::string_view data) override + void operator()(std::string_view data) override { length += data.size(); } @@ -302,8 +354,10 @@ struct LengthSource : Source { Source & next; - LengthSource(Source & next) : next(next) - { } + LengthSource(Source & next) + : next(next) + { + } uint64_t total = 0; @@ -324,15 +378,17 @@ struct LambdaSink : Sink lambda_t lambda; - LambdaSink(const lambda_t & lambda) : lambda(lambda) { } + LambdaSink(const lambda_t & lambda) + : lambda(lambda) + { + } - void operator () (std::string_view data) override + void operator()(std::string_view data) override { lambda(data); } }; - /** * Convert a function into a source. */ @@ -342,7 +398,10 @@ struct LambdaSource : Source lambda_t lambda; - LambdaSource(const lambda_t & lambda) : lambda(lambda) { } + LambdaSource(const lambda_t & lambda) + : lambda(lambda) + { + } size_t read(char * data, size_t len) override { @@ -356,11 +415,14 @@ struct LambdaSource : Source */ struct ChainSource : Source { - Source & source1, & source2; + Source &source1, &source2; bool useSecond = false; + ChainSource(Source & s1, Source & s2) - : source1(s1), source2(s2) - { } + : source1(s1) + , source2(s2) + { + } size_t read(char * data, size_t len) override; }; @@ -372,16 +434,12 @@ std::unique_ptr sourceToSink(std::function fun); * Source executes the function as a coroutine. */ std::unique_ptr sinkToSource( - std::function fun, - std::function eof = []() { - throw EndOfFile("coroutine has finished"); - }); - + std::function fun, std::function eof = []() { throw EndOfFile("coroutine has finished"); }); void writePadding(size_t len, Sink & sink); void writeString(std::string_view s, Sink & sink); -inline Sink & operator << (Sink & sink, uint64_t n) +inline Sink & operator<<(Sink & sink, uint64_t n) { unsigned char buf[8]; buf[0] = n & 0xff; @@ -396,15 +454,13 @@ inline Sink & operator << (Sink & sink, uint64_t n) return sink; } -Sink & operator << (Sink & in, const Error & ex); -Sink & operator << (Sink & sink, std::string_view s); -Sink & operator << (Sink & sink, const Strings & s); -Sink & operator << (Sink & sink, const StringSet & s); - +Sink & operator<<(Sink & in, const Error & ex); +Sink & operator<<(Sink & sink, std::string_view s); +Sink & operator<<(Sink & sink, const Strings & s); +Sink & operator<<(Sink & sink, const StringSet & s); MakeError(SerialisationError, Error); - template T readNum(Source & source) { @@ -419,35 +475,33 @@ T readNum(Source & source) return (T) n; } - inline unsigned int readInt(Source & source) { return readNum(source); } - inline uint64_t readLongLong(Source & source) { return readNum(source); } - void readPadding(size_t len, Source & source); size_t readString(char * buf, size_t max, Source & source); std::string readString(Source & source, size_t max = std::numeric_limits::max()); -template T readStrings(Source & source); +template +T readStrings(Source & source); -Source & operator >> (Source & in, std::string & s); +Source & operator>>(Source & in, std::string & s); template -Source & operator >> (Source & in, T & n) +Source & operator>>(Source & in, T & n) { n = readNum(in); return in; } template -Source & operator >> (Source & in, bool & b) +Source & operator>>(Source & in, bool & b) { b = readNum(in); return in; @@ -455,7 +509,6 @@ Source & operator >> (Source & in, bool & b) Error readError(Source & source); - /** * An adapter that converts a std::basic_istream into a source. */ @@ -465,7 +518,8 @@ struct StreamToSourceAdapter : Source StreamToSourceAdapter(std::shared_ptr> istream) : istream(istream) - { } + { + } size_t read(char * data, size_t len) override { @@ -480,7 +534,6 @@ struct StreamToSourceAdapter : Source } }; - /** * A source that reads a distinct format of concatenated chunks back into its * logical form, in order to guarantee a known state to the original stream, @@ -496,8 +549,10 @@ struct FramedSource : Source std::vector pending; size_t pos = 0; - FramedSource(Source & from) : from(from) - { } + FramedSource(Source & from) + : from(from) + { + } ~FramedSource() { @@ -505,7 +560,8 @@ struct FramedSource : Source if (!eof) { while (true) { auto n = readInt(from); - if (!n) break; + if (!n) + break; std::vector data(n); from(data.data(), n); } @@ -517,7 +573,8 @@ struct FramedSource : Source size_t read(char * data, size_t len) override { - if (eof) throw EndOfFile("reached end of FramedSource"); + if (eof) + throw EndOfFile("reached end of FramedSource"); if (pos >= pending.size()) { size_t len = readInt(from); @@ -549,8 +606,10 @@ struct FramedSink : nix::BufferedSink std::function checkError; FramedSink(BufferedSink & to, std::function && checkError) - : to(to), checkError(checkError) - { } + : to(to) + , checkError(checkError) + { + } ~FramedSink() { @@ -572,4 +631,4 @@ struct FramedSink : nix::BufferedSink }; }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/signals.hh b/src/libutil/include/nix/util/signals.hh index 5a2ba8e75..8facec37f 100644 --- a/src/libutil/include/nix/util/signals.hh +++ b/src/libutil/include/nix/util/signals.hh @@ -41,10 +41,9 @@ inline void checkInterrupt(); */ MakeError(Interrupted, BaseError); - struct InterruptCallback { - virtual ~InterruptCallback() { }; + virtual ~InterruptCallback() {}; }; /** @@ -53,8 +52,7 @@ struct InterruptCallback * * @note Does nothing on Windows */ -std::unique_ptr createInterruptCallback( - std::function callback); +std::unique_ptr createInterruptCallback(std::function callback); /** * A RAII class that causes the current thread to receive SIGUSR1 when @@ -65,6 +63,6 @@ std::unique_ptr createInterruptCallback( */ struct ReceiveInterrupts; -} +} // namespace nix #include "nix/util/signals-impl.hh" diff --git a/src/libutil/include/nix/util/signature/local-keys.hh b/src/libutil/include/nix/util/signature/local-keys.hh index 85918f906..1c0579ce9 100644 --- a/src/libutil/include/nix/util/signature/local-keys.hh +++ b/src/libutil/include/nix/util/signature/local-keys.hh @@ -15,7 +15,8 @@ namespace nix { * : * ``` */ -struct BorrowedCryptoValue { +struct BorrowedCryptoValue +{ std::string_view name; std::string_view payload; @@ -45,7 +46,10 @@ protected: Key(std::string_view s, bool sensitiveValue); Key(std::string_view name, std::string && key) - : name(name), key(std::move(key)) { } + : name(name) + , key(std::move(key)) + { + } }; struct PublicKey; @@ -65,7 +69,9 @@ struct SecretKey : Key private: SecretKey(std::string_view name, std::string && key) - : Key(name, std::move(key)) { } + : Key(name, std::move(key)) + { + } }; struct PublicKey : Key @@ -89,7 +95,9 @@ struct PublicKey : Key private: PublicKey(std::string_view name, std::string && key) - : Key(name, std::move(key)) { } + : Key(name, std::move(key)) + { + } friend struct SecretKey; }; @@ -104,4 +112,4 @@ typedef std::map PublicKeys; */ bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/signature/signer.hh b/src/libutil/include/nix/util/signature/signer.hh index ca2905eef..074c0c6e5 100644 --- a/src/libutil/include/nix/util/signature/signer.hh +++ b/src/libutil/include/nix/util/signature/signer.hh @@ -37,7 +37,7 @@ struct Signer virtual const PublicKey & getPublicKey() = 0; }; -using Signers = std::map; +using Signers = std::map; /** * Local signer @@ -58,4 +58,4 @@ private: PublicKey publicKey; }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/sort.hh b/src/libutil/include/nix/util/sort.hh index 0affdf3ce..2a4eb6e7c 100644 --- a/src/libutil/include/nix/util/sort.hh +++ b/src/libutil/include/nix/util/sort.hh @@ -296,4 +296,4 @@ void peeksort(Iter begin, Iter end, Comparator comp = {}) peeksortImpl(peeksortImpl, begin, end, /*leftRunEnd=*/begin, /*rightRunBegin=*/end); } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/source-accessor.hh b/src/libutil/include/nix/util/source-accessor.hh index 92a9adc46..aa937da48 100644 --- a/src/libutil/include/nix/util/source-accessor.hh +++ b/src/libutil/include/nix/util/source-accessor.hh @@ -46,8 +46,7 @@ struct SourceAccessor : std::enable_shared_from_this SourceAccessor(); - virtual ~SourceAccessor() - { } + virtual ~SourceAccessor() {} /** * Return the contents of a file as a string. @@ -72,24 +71,28 @@ struct SourceAccessor : std::enable_shared_from_this * @note subclasses of `SourceAccessor` need to implement at least * one of the `readFile()` variants. */ - virtual void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback = [](uint64_t size){}); + virtual void + readFile(const CanonPath & path, Sink & sink, std::function sizeCallback = [](uint64_t size) {}); virtual bool pathExists(const CanonPath & path); enum Type { - tRegular, tSymlink, tDirectory, - /** - Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things. + tRegular, + tSymlink, + tDirectory, + /** + Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, + and possibly even more exotic things. - Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`. + Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`. - Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types. - */ - tChar, tBlock, tSocket, tFifo, - tUnknown + Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types. + */ + tChar, + tBlock, + tSocket, + tFifo, + tUnknown }; struct Stat @@ -133,15 +136,10 @@ struct SourceAccessor : std::enable_shared_from_this virtual std::string readLink(const CanonPath & path) = 0; - virtual void dumpPath( - const CanonPath & path, - Sink & sink, - PathFilter & filter = defaultPathFilter); + virtual void dumpPath(const CanonPath & path, Sink & sink, PathFilter & filter = defaultPathFilter); - Hash hashPath( - const CanonPath & path, - PathFilter & filter = defaultPathFilter, - HashAlgorithm ha = HashAlgorithm::SHA256); + Hash + hashPath(const CanonPath & path, PathFilter & filter = defaultPathFilter, HashAlgorithm ha = HashAlgorithm::SHA256); /** * Return a corresponding path in the root filesystem, if @@ -149,14 +147,16 @@ struct SourceAccessor : std::enable_shared_from_this * materialized in the root filesystem. */ virtual std::optional getPhysicalPath(const CanonPath & path) - { return std::nullopt; } + { + return std::nullopt; + } - bool operator == (const SourceAccessor & x) const + bool operator==(const SourceAccessor & x) const { return number == x.number; } - auto operator <=> (const SourceAccessor & x) const + auto operator<=>(const SourceAccessor & x) const { return number <=> x.number; } @@ -172,9 +172,7 @@ struct SourceAccessor : std::enable_shared_from_this * @param mode might only be a temporary solution for this. * See the discussion in https://github.com/NixOS/nix/pull/9985. */ - CanonPath resolveSymlinks( - const CanonPath & path, - SymlinkResolution mode = SymlinkResolution::Full); + CanonPath resolveSymlinks(const CanonPath & path, SymlinkResolution mode = SymlinkResolution::Full); /** * A string that uniquely represents the contents of this @@ -187,7 +185,9 @@ struct SourceAccessor : std::enable_shared_from_this * tree, if available. */ virtual std::optional getLastModified() - { return std::nullopt; } + { + return std::nullopt; + } }; /** @@ -228,4 +228,4 @@ ref makeUnionSourceAccessor(std::vector> && */ ref projectSubdirSourceAccessor(ref, CanonPath subdirectory); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/source-path.hh b/src/libutil/include/nix/util/source-path.hh index c0cba0241..f7cfc8ef7 100644 --- a/src/libutil/include/nix/util/source-path.hh +++ b/src/libutil/include/nix/util/source-path.hh @@ -26,7 +26,8 @@ struct SourcePath SourcePath(ref accessor, CanonPath path = CanonPath::root) : accessor(std::move(accessor)) , path(std::move(path)) - { } + { + } std::string_view baseName() const; @@ -42,15 +43,15 @@ struct SourcePath */ std::string readFile() const; - void readFile( - Sink & sink, - std::function sizeCallback = [](uint64_t size){}) const - { return accessor->readFile(path, sink, sizeCallback); } + void readFile(Sink & sink, std::function sizeCallback = [](uint64_t size) {}) const + { + return accessor->readFile(path, sink, sizeCallback); + } /** * Return whether this `SourcePath` denotes a file (of any type) * that exists - */ + */ bool pathExists() const; /** @@ -80,9 +81,7 @@ struct SourcePath /** * Dump this `SourcePath` to `sink` as a NAR archive. */ - void dumpPath( - Sink & sink, - PathFilter & filter = defaultPathFilter) const; + void dumpPath(Sink & sink, PathFilter & filter = defaultPathFilter) const; /** * Return the location of this path in the "real" filesystem, if @@ -95,14 +94,14 @@ struct SourcePath /** * Append a `CanonPath` to this path. */ - SourcePath operator / (const CanonPath & x) const; + SourcePath operator/(const CanonPath & x) const; /** * Append a single component `c` to this path. `c` must not * contain a slash. A slash is implicitly added between this path * and `c`. */ - SourcePath operator / (std::string_view c) const; + SourcePath operator/(std::string_view c) const; bool operator==(const SourcePath & x) const noexcept; std::strong_ordering operator<=>(const SourcePath & x) const noexcept; @@ -110,8 +109,7 @@ struct SourcePath /** * Convenience wrapper around `SourceAccessor::resolveSymlinks()`. */ - SourcePath resolveSymlinks( - SymlinkResolution mode = SymlinkResolution::Full) const + SourcePath resolveSymlinks(SymlinkResolution mode = SymlinkResolution::Full) const { return {accessor, accessor->resolveSymlinks(path, mode)}; } @@ -119,9 +117,9 @@ struct SourcePath friend class std::hash; }; -std::ostream & operator << (std::ostream & str, const SourcePath & path); +std::ostream & operator<<(std::ostream & str, const SourcePath & path); -} +} // namespace nix template<> struct std::hash diff --git a/src/libutil/include/nix/util/split.hh b/src/libutil/include/nix/util/split.hh index 24a73fea8..838dcdd58 100644 --- a/src/libutil/include/nix/util/split.hh +++ b/src/libutil/include/nix/util/split.hh @@ -14,23 +14,25 @@ namespace nix { * separator. Otherwise, we return `std::nullopt`, and we leave the argument * string alone. */ -static inline std::optional splitPrefixTo(std::string_view & string, char separator) { +static inline std::optional splitPrefixTo(std::string_view & string, char separator) +{ auto sepInstance = string.find(separator); if (sepInstance != std::string_view::npos) { auto prefix = string.substr(0, sepInstance); - string.remove_prefix(sepInstance+1); + string.remove_prefix(sepInstance + 1); return prefix; } return std::nullopt; } -static inline bool splitPrefix(std::string_view & string, std::string_view prefix) { +static inline bool splitPrefix(std::string_view & string, std::string_view prefix) +{ bool res = hasPrefix(string, prefix); if (res) string.remove_prefix(prefix.length()); return res; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/strings.hh b/src/libutil/include/nix/util/strings.hh index 4c77516a3..b4ef66bfe 100644 --- a/src/libutil/include/nix/util/strings.hh +++ b/src/libutil/include/nix/util/strings.hh @@ -132,4 +132,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/suggestions.hh b/src/libutil/include/nix/util/suggestions.hh index 6a76eb9d9..6b20f37ab 100644 --- a/src/libutil/include/nix/util/suggestions.hh +++ b/src/libutil/include/nix/util/suggestions.hh @@ -11,7 +11,8 @@ int levenshteinDistance(std::string_view first, std::string_view second); /** * A potential suggestion for the cli interface. */ -class Suggestion { +class Suggestion +{ public: /// The smaller the better int distance; @@ -19,27 +20,22 @@ public: std::string to_string() const; - bool operator ==(const Suggestion &) const = default; - auto operator <=>(const Suggestion &) const = default; + bool operator==(const Suggestion &) const = default; + auto operator<=>(const Suggestion &) const = default; }; -class Suggestions { +class Suggestions +{ public: std::set suggestions; std::string to_string() const; - Suggestions trim( - int limit = 5, - int maxDistance = 2 - ) const; + Suggestions trim(int limit = 5, int maxDistance = 2) const; - static Suggestions bestMatches ( - const StringSet & allMatches, - std::string_view query - ); + static Suggestions bestMatches(const StringSet & allMatches, std::string_view query); - Suggestions& operator+=(const Suggestions & other); + Suggestions & operator+=(const Suggestions & other); }; std::ostream & operator<<(std::ostream & str, const Suggestion &); @@ -49,18 +45,19 @@ std::ostream & operator<<(std::ostream & str, const Suggestions &); * Either a value of type `T`, or some suggestions */ template -class OrSuggestions { +class OrSuggestions +{ public: using Raw = std::variant; Raw raw; - T* operator ->() + T * operator->() { return &**this; } - T& operator *() + T & operator*() { return std::get(raw); } @@ -100,7 +97,6 @@ public: else return noSuggestions; } - }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/sync.hh b/src/libutil/include/nix/util/sync.hh index 4b9d546d2..262fc328b 100644 --- a/src/libutil/include/nix/util/sync.hh +++ b/src/libutil/include/nix/util/sync.hh @@ -36,10 +36,22 @@ private: public: - SyncBase() { } - SyncBase(const T & data) : data(data) { } - SyncBase(T && data) noexcept : data(std::move(data)) { } - SyncBase(SyncBase && other) noexcept : data(std::move(*other.lock())) { } + SyncBase() {} + + SyncBase(const T & data) + : data(data) + { + } + + SyncBase(T && data) noexcept + : data(std::move(data)) + { + } + + SyncBase(SyncBase && other) noexcept + : data(std::move(*other.lock())) + { + } template class Lock @@ -48,11 +60,22 @@ public: SyncBase * s; L lk; friend SyncBase; - Lock(SyncBase * s) : s(s), lk(s->mutex) { } + + Lock(SyncBase * s) + : s(s) + , lk(s->mutex) + { + } public: - Lock(Lock && l) : s(l.s) { unreachable(); } + Lock(Lock && l) + : s(l.s) + { + unreachable(); + } + Lock(const Lock & l) = delete; - ~Lock() { } + + ~Lock() {} void wait(std::condition_variable & cv) { @@ -61,25 +84,22 @@ public: } template - std::cv_status wait_for(std::condition_variable & cv, - const std::chrono::duration & duration) + std::cv_status wait_for(std::condition_variable & cv, const std::chrono::duration & duration) { assert(s); return cv.wait_for(lk, duration); } template - bool wait_for(std::condition_variable & cv, - const std::chrono::duration & duration, - Predicate pred) + bool wait_for(std::condition_variable & cv, const std::chrono::duration & duration, Predicate pred) { assert(s); return cv.wait_for(lk, duration, pred); } template - std::cv_status wait_until(std::condition_variable & cv, - const std::chrono::time_point & duration) + std::cv_status + wait_until(std::condition_variable & cv, const std::chrono::time_point & duration) { assert(s); return cv.wait_until(lk, duration); @@ -88,32 +108,53 @@ public: struct WriteLock : Lock { - T * operator -> () { return &WriteLock::s->data; } - T & operator * () { return WriteLock::s->data; } + T * operator->() + { + return &WriteLock::s->data; + } + + T & operator*() + { + return WriteLock::s->data; + } }; /** * Acquire write (exclusive) access to the inner value. */ - WriteLock lock() { return WriteLock(this); } + WriteLock lock() + { + return WriteLock(this); + } struct ReadLock : Lock { - const T * operator -> () { return &ReadLock::s->data; } - const T & operator * () { return ReadLock::s->data; } + const T * operator->() + { + return &ReadLock::s->data; + } + + const T & operator*() + { + return ReadLock::s->data; + } }; /** * Acquire read access to the inner value. When using * `std::shared_mutex`, this will use a shared lock. */ - ReadLock readLock() const { return ReadLock(const_cast(this)); } + ReadLock readLock() const + { + return ReadLock(const_cast(this)); + } }; template using Sync = SyncBase, std::unique_lock>; template -using SharedSync = SyncBase, std::shared_lock>; +using SharedSync = + SyncBase, std::shared_lock>; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/tarfile.hh b/src/libutil/include/nix/util/tarfile.hh index 2005d13ca..c66e05ef6 100644 --- a/src/libutil/include/nix/util/tarfile.hh +++ b/src/libutil/include/nix/util/tarfile.hh @@ -43,4 +43,4 @@ void unpackTarfile(const std::filesystem::path & tarFile, const std::filesystem: time_t unpackTarfileToSink(TarArchive & archive, ExtendedFileSystemObjectSink & parseSink); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/terminal.hh b/src/libutil/include/nix/util/terminal.hh index 7ff05a487..f19de268c 100644 --- a/src/libutil/include/nix/util/terminal.hh +++ b/src/libutil/include/nix/util/terminal.hh @@ -18,9 +18,8 @@ bool isTTY(); * included in the character count. Also, tabs are expanded to * spaces. */ -std::string filterANSIEscapes(std::string_view s, - bool filterAll = false, - unsigned int width = std::numeric_limits::max()); +std::string filterANSIEscapes( + std::string_view s, bool filterAll = false, unsigned int width = std::numeric_limits::max()); /** * Recalculate the window size, updating a global variable. @@ -37,4 +36,4 @@ void updateWindowSize(); */ std::pair getWindowSize(); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/thread-pool.hh b/src/libutil/include/nix/util/thread-pool.hh index 92009e396..811c03d88 100644 --- a/src/libutil/include/nix/util/thread-pool.hh +++ b/src/libutil/include/nix/util/thread-pool.hh @@ -87,7 +87,8 @@ void processGraph( std::function(const T &)> getEdges, std::function processNode) { - struct Graph { + struct Graph + { std::set left; std::map> refs, rrefs; }; @@ -101,7 +102,6 @@ void processGraph( ThreadPool pool; worker = [&](const T & node) { - { auto graph(graph_.lock()); auto i = graph->refs.find(node); @@ -110,22 +110,21 @@ void processGraph( goto doWork; } - getRefs: - { - auto refs = getEdges(node); - refs.erase(node); + getRefs: { + auto refs = getEdges(node); + refs.erase(node); - { - auto graph(graph_.lock()); - for (auto & ref : refs) - if (graph->left.count(ref)) { - graph->refs[node].insert(ref); - graph->rrefs[ref].insert(node); - } - if (graph->refs[node].empty()) - goto doWork; - } + { + auto graph(graph_.lock()); + for (auto & ref : refs) + if (graph->left.count(ref)) { + graph->refs[node].insert(ref); + graph->rrefs[ref].insert(node); + } + if (graph->refs[node].empty()) + goto doWork; } + } return; @@ -167,4 +166,4 @@ void processGraph( throw Error("graph processing incomplete (cyclic reference?)"); } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/topo-sort.hh b/src/libutil/include/nix/util/topo-sort.hh index 6ba6fda71..9f403e2e6 100644 --- a/src/libutil/include/nix/util/topo-sort.hh +++ b/src/libutil/include/nix/util/topo-sort.hh @@ -6,9 +6,10 @@ namespace nix { template -std::vector topoSort(std::set items, - std::function(const T &)> getChildren, - std::function makeCycleError) +std::vector topoSort( + std::set items, + std::function(const T &)> getChildren, + std::function makeCycleError) { std::vector sorted; decltype(items) visited, parents; @@ -16,9 +17,11 @@ std::vector topoSort(std::set items, std::function dfsVisit; dfsVisit = [&](const T & path, const T * parent) { - if (parents.count(path)) throw makeCycleError(path, *parent); + if (parents.count(path)) + throw makeCycleError(path, *parent); - if (!visited.insert(path).second) return; + if (!visited.insert(path).second) + return; parents.insert(path); auto references = getChildren(path); @@ -40,4 +43,4 @@ std::vector topoSort(std::set items, return sorted; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/types.hh b/src/libutil/include/nix/util/types.hh index edb34f5e2..f8c6c0979 100644 --- a/src/libutil/include/nix/util/types.hh +++ b/src/libutil/include/nix/util/types.hh @@ -1,7 +1,6 @@ #pragma once ///@file - #include #include #include @@ -67,7 +66,10 @@ typedef std::vector> Headers; template struct OnStartup { - OnStartup(T && t) { t(); } + OnStartup(T && t) + { + t(); + } }; /** @@ -75,18 +77,18 @@ struct OnStartup * cast to a bool in Attr. */ template -struct Explicit { +struct Explicit +{ T t; - bool operator ==(const Explicit & other) const = default; + bool operator==(const Explicit & other) const = default; - bool operator <(const Explicit & other) const + bool operator<(const Explicit & other) const { return t < other.t; } }; - /** * This wants to be a little bit like rust's Cow type. * Some parts of the evaluator benefit greatly from being able to reuse @@ -97,7 +99,8 @@ struct Explicit { * since those can easily become ambiguous to the reader and can degrade * into copying behaviour we want to avoid. */ -class BackedStringView { +class BackedStringView +{ private: std::variant data; @@ -106,19 +109,38 @@ private: * a pointer. Without this we'd need to store the view object * even when we already own a string. */ - class Ptr { + class Ptr + { private: std::string_view view; public: - Ptr(std::string_view view): view(view) {} - const std::string_view * operator->() const { return &view; } + Ptr(std::string_view view) + : view(view) + { + } + + const std::string_view * operator->() const + { + return &view; + } }; public: - BackedStringView(std::string && s): data(std::move(s)) {} - BackedStringView(std::string_view sv): data(sv) {} + BackedStringView(std::string && s) + : data(std::move(s)) + { + } + + BackedStringView(std::string_view sv) + : data(sv) + { + } + template - BackedStringView(const char (& lit)[N]): data(std::string_view(lit)) {} + BackedStringView(const char (&lit)[N]) + : data(std::string_view(lit)) + { + } BackedStringView(const BackedStringView &) = delete; BackedStringView & operator=(const BackedStringView &) = delete; @@ -137,18 +159,18 @@ public: std::string toOwned() && { - return isOwned() - ? std::move(std::get(data)) - : std::string(std::get(data)); + return isOwned() ? std::move(std::get(data)) : std::string(std::get(data)); } std::string_view operator*() const { - return isOwned() - ? std::get(data) - : std::get(data); + return isOwned() ? std::get(data) : std::get(data); + } + + Ptr operator->() const + { + return Ptr(**this); } - Ptr operator->() const { return Ptr(**this); } }; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/unix-domain-socket.hh b/src/libutil/include/nix/util/unix-domain-socket.hh index 3aaaddf82..6d28b6276 100644 --- a/src/libutil/include/nix/util/unix-domain-socket.hh +++ b/src/libutil/include/nix/util/unix-domain-socket.hh @@ -87,4 +87,4 @@ void connect(Socket fd, const std::filesystem::path & path); */ AutoCloseFD connect(const std::filesystem::path & path); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/url-parts.hh b/src/libutil/include/nix/util/url-parts.hh index 1ddc6a536..bf1215b6d 100644 --- a/src/libutil/include/nix/util/url-parts.hh +++ b/src/libutil/include/nix/util/url-parts.hh @@ -33,7 +33,8 @@ extern std::regex refRegex; /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is /// This is because of the definition of a ref in refs.c in https://github.com/git/git /// See tests/functional/fetchGitRefs.sh for the full definition -const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$"; +const static std::string badGitRefRegexS = + "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$"; extern std::regex badGitRefRegex; /// A Git revision (a SHA-1 commit hash). @@ -43,4 +44,4 @@ extern std::regex revRegex; /// A ref or revision, or a ref followed by a revision. const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))"; -} +} // namespace nix diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index a509f06da..8980b4ce3 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -15,7 +15,7 @@ struct ParsedURL std::string to_string() const; - bool operator ==(const ParsedURL & other) const noexcept; + bool operator==(const ParsedURL & other) const noexcept; /** * Remove `.` and `..` path elements. @@ -23,12 +23,12 @@ struct ParsedURL ParsedURL canonicalise(); }; -std::ostream & operator << (std::ostream & os, const ParsedURL & url); +std::ostream & operator<<(std::ostream & os, const ParsedURL & url); MakeError(BadURL, Error); std::string percentDecode(std::string_view in); -std::string percentEncode(std::string_view s, std::string_view keep=""); +std::string percentEncode(std::string_view s, std::string_view keep = ""); StringMap decodeQuery(const std::string & query); @@ -44,7 +44,8 @@ ParsedURL parseURL(const std::string & url); * For example git uses `git+https` to designate remotes using a Git * protocol over http. */ -struct ParsedUrlScheme { +struct ParsedUrlScheme +{ std::optional application; std::string_view transport; }; @@ -65,4 +66,4 @@ std::string fixGitURL(const std::string & url); */ bool isValidSchemeName(std::string_view scheme); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/users.hh b/src/libutil/include/nix/util/users.hh index 1d467173c..f2c6caecf 100644 --- a/src/libutil/include/nix/util/users.hh +++ b/src/libutil/include/nix/util/users.hh @@ -4,7 +4,7 @@ #include "nix/util/types.hh" #ifndef _WIN32 -# include +# include #endif namespace nix { @@ -59,7 +59,6 @@ Path createNixStateDir(); */ std::string expandTilde(std::string_view path); - /** * Is the current user UID 0 on Unix? * @@ -67,4 +66,4 @@ std::string expandTilde(std::string_view path); */ bool isRootUser(); -} +} // namespace nix diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 2361bf2e7..015086d39 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -5,7 +5,6 @@ #include "nix/util/error.hh" #include "nix/util/logging.hh" - #include #include #include @@ -24,10 +23,8 @@ void initLibUtil(); */ std::vector stringsToCharPtrs(const Strings & ss); - MakeError(FormatError, Error); - template auto concatStrings(Parts &&... parts) -> std::enable_if_t<(... && std::is_convertible_v), std::string> @@ -36,11 +33,11 @@ auto concatStrings(Parts &&... parts) return concatStringsSep({}, views); } - /** * Add quotes around a collection of strings. */ -template Strings quoteStrings(const C & c) +template +Strings quoteStrings(const C & c) { Strings res; for (auto & s : c) @@ -55,25 +52,18 @@ template Strings quoteStrings(const C & c) */ std::string chomp(std::string_view s); - /** * Remove whitespace from the start and end of a string. */ std::string trim(std::string_view s, std::string_view whitespace = " \n\r\t"); - /** * Replace all occurrences of a string inside another string. */ -std::string replaceStrings( - std::string s, - std::string_view from, - std::string_view to); - +std::string replaceStrings(std::string s, std::string_view from, std::string_view to); std::string rewriteStrings(std::string s, const StringMap & rewrites); - /** * Parse a string into an integer. */ @@ -91,11 +81,16 @@ N string2IntWithUnitPrefix(std::string_view s) if (!s.empty()) { char u = std::toupper(*s.rbegin()); if (std::isalpha(u)) { - if (u == 'K') multiplier = 1ULL << 10; - else if (u == 'M') multiplier = 1ULL << 20; - else if (u == 'G') multiplier = 1ULL << 30; - else if (u == 'T') multiplier = 1ULL << 40; - else throw UsageError("invalid unit specifier '%1%'", u); + if (u == 'K') + multiplier = 1ULL << 10; + else if (u == 'M') + multiplier = 1ULL << 20; + else if (u == 'G') + multiplier = 1ULL << 30; + else if (u == 'T') + multiplier = 1ULL << 40; + else + throw UsageError("invalid unit specifier '%1%'", u); s.remove_suffix(1); } } @@ -117,7 +112,6 @@ std::string renderSize(uint64_t value, bool align = false); template std::optional string2Float(const std::string_view s); - /** * Convert a little-endian integer to host order. */ @@ -131,25 +125,21 @@ T readLittleEndian(unsigned char * p) return x; } - /** * @return true iff `s` starts with `prefix`. */ bool hasPrefix(std::string_view s, std::string_view prefix); - /** * @return true iff `s` ends in `suffix`. */ bool hasSuffix(std::string_view s, std::string_view suffix); - /** * Convert a string to lower case. */ std::string toLower(std::string s); - /** * Escape a string as a shell word. * @@ -160,7 +150,6 @@ std::string toLower(std::string s); */ std::string escapeShellArgAlways(const std::string_view s); - /** * Exception handling in destructors: print an error message, then * ignore the exception. @@ -182,8 +171,6 @@ void ignoreExceptionInDestructor(Verbosity lvl = lvlError); */ void ignoreExceptionExceptInterrupt(Verbosity lvl = lvlError); - - /** * Tree formatting. */ @@ -192,7 +179,6 @@ constexpr char treeLast[] = "└───"; constexpr char treeLine[] = "│ "; constexpr char treeNull[] = " "; - /** * Encode arbitrary bytes as Base64. */ @@ -203,7 +189,6 @@ std::string base64Encode(std::string_view s); */ std::string base64Decode(std::string_view s); - /** * Remove common leading whitespace from the lines in the string * 's'. For example, if every line is indented by at least 3 spaces, @@ -211,7 +196,6 @@ std::string base64Decode(std::string_view s); */ std::string stripIndentation(std::string_view s); - /** * Get the prefix of 's' up to and excluding the next line break (LF * optionally preceded by CR), and the remainder following the line @@ -219,66 +203,67 @@ std::string stripIndentation(std::string_view s); */ std::pair getLine(std::string_view s); - /** * Get a value for the specified key from an associate container. */ -template +template const typename T::mapped_type * get(const T & map, const typename T::key_type & key) { auto i = map.find(key); - if (i == map.end()) return nullptr; + if (i == map.end()) + return nullptr; return &i->second; } -template +template typename T::mapped_type * get(T & map, const typename T::key_type & key) { auto i = map.find(key); - if (i == map.end()) return nullptr; + if (i == map.end()) + return nullptr; return &i->second; } /** * Get a value for the specified key from an associate container, or a default value if the key isn't present. */ -template -const typename T::mapped_type & getOr(T & map, - const typename T::key_type & key, - const typename T::mapped_type & defaultValue) +template +const typename T::mapped_type & +getOr(T & map, const typename T::key_type & key, const typename T::mapped_type & defaultValue) { auto i = map.find(key); - if (i == map.end()) return defaultValue; + if (i == map.end()) + return defaultValue; return i->second; } /** * Remove and return the first item from a container. */ -template +template std::optional remove_begin(T & c) { auto i = c.begin(); - if (i == c.end()) return {}; + if (i == c.end()) + return {}; auto v = std::move(*i); c.erase(i); return v; } - /** * Remove and return the first item from a container. */ -template +template std::optional pop(T & c) { - if (c.empty()) return {}; + if (c.empty()) + return {}; auto v = std::move(c.front()); c.pop(); return v; } - /** * Append items to a container. TODO: remove this once we can use * C++23's `append_range()`. @@ -289,11 +274,9 @@ void append(C & c, std::initializer_list l) c.insert(c.end(), l.begin(), l.end()); } - template class Callback; - /** * A RAII helper that increments a counter on construction and * decrements it on destruction. @@ -303,56 +286,89 @@ struct MaintainCount { T & counter; long delta; - MaintainCount(T & counter, long delta = 1) : counter(counter), delta(delta) { counter += delta; } - ~MaintainCount() { counter -= delta; } -}; + MaintainCount(T & counter, long delta = 1) + : counter(counter) + , delta(delta) + { + counter += delta; + } + + ~MaintainCount() + { + counter -= delta; + } +}; /** * A Rust/Python-like enumerate() iterator adapter. * * Borrowed from http://reedbeta.com/blog/python-like-enumerate-in-cpp17. */ -template ())), - typename = decltype(std::end(std::declval()))> +template< + typename T, + typename TIter = decltype(std::begin(std::declval())), + typename = decltype(std::end(std::declval()))> constexpr auto enumerate(T && iterable) { struct iterator { size_t i; TIter iter; - constexpr bool operator != (const iterator & other) const { return iter != other.iter; } - constexpr void operator ++ () { ++i; ++iter; } - constexpr auto operator * () const { return std::tie(i, *iter); } + + constexpr bool operator!=(const iterator & other) const + { + return iter != other.iter; + } + + constexpr void operator++() + { + ++i; + ++iter; + } + + constexpr auto operator*() const + { + return std::tie(i, *iter); + } }; struct iterable_wrapper { T iterable; - constexpr auto begin() { return iterator{ 0, std::begin(iterable) }; } - constexpr auto end() { return iterator{ 0, std::end(iterable) }; } + + constexpr auto begin() + { + return iterator{0, std::begin(iterable)}; + } + + constexpr auto end() + { + return iterator{0, std::end(iterable)}; + } }; - return iterable_wrapper{ std::forward(iterable) }; + return iterable_wrapper{std::forward(iterable)}; } - /** * C++17 std::visit boilerplate */ -template struct overloaded : Ts... { using Ts::operator()...; }; -template overloaded(Ts...) -> overloaded; - +template +struct overloaded : Ts... +{ + using Ts::operator()...; +}; +template +overloaded(Ts...) -> overloaded; std::string showBytes(uint64_t bytes); - /** * Provide an addition operator between strings and string_views * inexplicably omitted from the standard library. */ -inline std::string operator + (const std::string & s1, std::string_view s2) +inline std::string operator+(const std::string & s1, std::string_view s2) { std::string s; s.reserve(s1.size() + s2.size()); @@ -361,13 +377,13 @@ inline std::string operator + (const std::string & s1, std::string_view s2) return s; } -inline std::string operator + (std::string && s, std::string_view s2) +inline std::string operator+(std::string && s, std::string_view s2) { s.append(s2); return std::move(s); } -inline std::string operator + (std::string_view s1, const char * s2) +inline std::string operator+(std::string_view s1, const char * s2) { auto s2Size = strlen(s2); std::string s; @@ -377,4 +393,4 @@ inline std::string operator + (std::string_view s1, const char * s2) return s; } -} +} // namespace nix diff --git a/src/libutil/include/nix/util/variant-wrapper.hh b/src/libutil/include/nix/util/variant-wrapper.hh index cedcb999c..146ae07b6 100644 --- a/src/libutil/include/nix/util/variant-wrapper.hh +++ b/src/libutil/include/nix/util/variant-wrapper.hh @@ -8,13 +8,13 @@ * Force the default versions of all constructors (copy, move, copy * assignment). */ -#define FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ - CLASS_NAME(const CLASS_NAME &) = default; \ - CLASS_NAME(CLASS_NAME &) = default; \ - CLASS_NAME(CLASS_NAME &&) = default; \ - \ - CLASS_NAME & operator =(const CLASS_NAME &) = default; \ - CLASS_NAME & operator =(CLASS_NAME &) = default; +#define FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ + CLASS_NAME(const CLASS_NAME &) = default; \ + CLASS_NAME(CLASS_NAME &) = default; \ + CLASS_NAME(CLASS_NAME &&) = default; \ + \ + CLASS_NAME & operator=(const CLASS_NAME &) = default; \ + CLASS_NAME & operator=(CLASS_NAME &) = default; /** * Make a wrapper constructor. All args are forwarded to the @@ -22,9 +22,10 @@ * * The moral equivalent of `using Raw::Raw;` */ -#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \ - FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ - \ - CLASS_NAME(auto &&... arg) \ +#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \ + FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ + \ + CLASS_NAME(auto &&... arg) \ : raw(std::forward(arg)...) \ - { } + { \ + } diff --git a/src/libutil/include/nix/util/xml-writer.hh b/src/libutil/include/nix/util/xml-writer.hh index ae5a6ced7..8d084ad11 100644 --- a/src/libutil/include/nix/util/xml-writer.hh +++ b/src/libutil/include/nix/util/xml-writer.hh @@ -6,13 +6,10 @@ #include #include - namespace nix { - typedef std::map> XMLAttrs; - class XMLWriter { private: @@ -31,12 +28,10 @@ public: void close(); - void openElement(std::string_view name, - const XMLAttrs & attrs = XMLAttrs()); + void openElement(std::string_view name, const XMLAttrs & attrs = XMLAttrs()); void closeElement(); - void writeEmptyElement(std::string_view name, - const XMLAttrs & attrs = XMLAttrs()); + void writeEmptyElement(std::string_view name, const XMLAttrs & attrs = XMLAttrs()); private: void writeAttrs(const XMLAttrs & attrs); @@ -44,23 +39,21 @@ private: void indent_(size_t depth); }; - class XMLOpenElement { private: XMLWriter & writer; public: - XMLOpenElement(XMLWriter & writer, std::string_view name, - const XMLAttrs & attrs = XMLAttrs()) + XMLOpenElement(XMLWriter & writer, std::string_view name, const XMLAttrs & attrs = XMLAttrs()) : writer(writer) { writer.openElement(name, attrs); } + ~XMLOpenElement() { writer.closeElement(); } }; - -} +} // namespace nix diff --git a/src/libutil/json-utils.cc b/src/libutil/json-utils.cc index 34da83a2c..74b3b27cc 100644 --- a/src/libutil/json-utils.cc +++ b/src/libutil/json-utils.cc @@ -10,20 +10,20 @@ namespace nix { const nlohmann::json * get(const nlohmann::json & map, const std::string & key) { auto i = map.find(key); - if (i == map.end()) return nullptr; + if (i == map.end()) + return nullptr; return &*i; } nlohmann::json * get(nlohmann::json & map, const std::string & key) { auto i = map.find(key); - if (i == map.end()) return nullptr; + if (i == map.end()) + return nullptr; return &*i; } -const nlohmann::json & valueAt( - const nlohmann::json::object_t & map, - const std::string & key) +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key) { if (!map.contains(key)) throw Error("Expected JSON object to contain key '%s' but it doesn't: %s", key, nlohmann::json(map).dump()); @@ -36,7 +36,7 @@ std::optional optionalValueAt(const nlohmann::json::object_t & m if (!map.contains(key)) return std::nullopt; - return std::optional { map.at(key) }; + return std::optional{map.at(key)}; } std::optional nullableValueAt(const nlohmann::json::object_t & map, const std::string & key) @@ -46,7 +46,7 @@ std::optional nullableValueAt(const nlohmann::json::object_t & m if (value.is_null()) return std::nullopt; - return std::optional { std::move(value) }; + return std::optional{std::move(value)}; } const nlohmann::json * getNullable(const nlohmann::json & value) @@ -63,16 +63,14 @@ const nlohmann::json * getNullable(const nlohmann::json & value) * functions. It is too cumbersome and easy to forget to expect regular * JSON code to use it directly. */ -static const nlohmann::json & ensureType( - const nlohmann::json & value, - nlohmann::json::value_type expectedType - ) +static const nlohmann::json & ensureType(const nlohmann::json & value, nlohmann::json::value_type expectedType) { if (value.type() != expectedType) throw Error( "Expected JSON value to be of type '%s' but it is of type '%s': %s", nlohmann::json(expectedType).type_name(), - value.type_name(), value.dump()); + value.type_name(), + value.dump()); return value; } @@ -102,8 +100,7 @@ const nlohmann::json::number_unsigned_t & getUnsigned(const nlohmann::json & val typeName = value.is_number_float() ? "floating point number" : "signed integral number"; } throw Error( - "Expected JSON value to be an unsigned integral number but it is of type '%s': %s", - typeName, value.dump()); + "Expected JSON value to be an unsigned integral number but it is of type '%s': %s", typeName, value.dump()); } const nlohmann::json::boolean_t & getBoolean(const nlohmann::json & value) @@ -146,4 +143,4 @@ StringSet getStringSet(const nlohmann::json & value) return stringSet; } -} +} // namespace nix diff --git a/src/libutil/linux/cgroup.cc b/src/libutil/linux/cgroup.cc index c82fdc11c..20d19ae7d 100644 --- a/src/libutil/linux/cgroup.cc +++ b/src/libutil/linux/cgroup.cc @@ -19,7 +19,8 @@ std::optional getCgroupFS() { static auto res = [&]() -> std::optional { auto fp = fopen("/proc/mounts", "r"); - if (!fp) return std::nullopt; + if (!fp) + return std::nullopt; Finally delFP = [&]() { fclose(fp); }; while (auto ent = getmntent(fp)) if (std::string_view(ent->mnt_type) == "cgroup2") @@ -50,7 +51,8 @@ StringMap getCgroups(const Path & cgroupFile) static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool returnStats) { - if (!pathExists(cgroup)) return {}; + if (!pathExists(cgroup)) + return {}; auto procsFile = cgroup / "cgroup.procs"; @@ -67,7 +69,8 @@ static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool retu this cgroup. */ for (auto & entry : DirectoryIterator{cgroup}) { checkInterrupt(); - if (entry.symlink_status().type() != std::filesystem::file_type::directory) continue; + if (entry.symlink_status().type() != std::filesystem::file_type::directory) + continue; destroyCgroup(cgroup / entry.path().filename(), false); } @@ -78,7 +81,8 @@ static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool retu while (true) { auto pids = tokenizeString>(readFile(procsFile)); - if (pids.empty()) break; + if (pids.empty()) + break; if (round > 20) throw Error("cannot kill cgroup '%s'", cgroup); @@ -93,8 +97,7 @@ static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool retu try { auto cmdline = readFile(fmt("/proc/%d/cmdline", pid)); using namespace std::string_literals; - warn("killing stray builder process %d (%s)...", - pid, trim(replaceStrings(cmdline, "\0"s, " "))); + warn("killing stray builder process %d (%s)...", pid, trim(replaceStrings(cmdline, "\0"s, " "))); } catch (SystemError &) { } } @@ -120,17 +123,18 @@ static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool retu std::string_view userPrefix = "user_usec "; if (hasPrefix(line, userPrefix)) { auto n = string2Int(line.substr(userPrefix.size())); - if (n) stats.cpuUser = std::chrono::microseconds(*n); + if (n) + stats.cpuUser = std::chrono::microseconds(*n); } std::string_view systemPrefix = "system_usec "; if (hasPrefix(line, systemPrefix)) { auto n = string2Int(line.substr(systemPrefix.size())); - if (n) stats.cpuSystem = std::chrono::microseconds(*n); + if (n) + stats.cpuSystem = std::chrono::microseconds(*n); } } } - } if (rmdir(cgroup.c_str()) == -1) @@ -163,4 +167,4 @@ std::string getRootCgroup() return rootCgroup; } -} +} // namespace nix diff --git a/src/libutil/linux/include/nix/util/cgroup.hh b/src/libutil/linux/include/nix/util/cgroup.hh index eb49c3419..59de13d46 100644 --- a/src/libutil/linux/include/nix/util/cgroup.hh +++ b/src/libutil/linux/include/nix/util/cgroup.hh @@ -34,4 +34,4 @@ std::string getCurrentCgroup(); */ std::string getRootCgroup(); -} +} // namespace nix diff --git a/src/libutil/linux/include/nix/util/linux-namespaces.hh b/src/libutil/linux/include/nix/util/linux-namespaces.hh index 59db745d3..8f7ffa8df 100644 --- a/src/libutil/linux/include/nix/util/linux-namespaces.hh +++ b/src/libutil/linux/include/nix/util/linux-namespaces.hh @@ -32,4 +32,4 @@ bool userNamespacesSupported(); bool mountAndPidNamespacesSupported(); -} +} // namespace nix diff --git a/src/libutil/linux/linux-namespaces.cc b/src/libutil/linux/linux-namespaces.cc index 93f299076..b7787cb6f 100644 --- a/src/libutil/linux/linux-namespaces.cc +++ b/src/libutil/linux/linux-namespaces.cc @@ -16,36 +16,27 @@ namespace nix { bool userNamespacesSupported() { - static auto res = [&]() -> bool - { + static auto res = [&]() -> bool { if (!pathExists("/proc/self/ns/user")) { debug("'/proc/self/ns/user' does not exist; your kernel was likely built without CONFIG_USER_NS=y"); return false; } Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces"; - if (!pathExists(maxUserNamespaces) || - trim(readFile(maxUserNamespaces)) == "0") - { + if (!pathExists(maxUserNamespaces) || trim(readFile(maxUserNamespaces)) == "0") { debug("user namespaces appear to be disabled; check '/proc/sys/user/max_user_namespaces'"); return false; } Path procSysKernelUnprivilegedUsernsClone = "/proc/sys/kernel/unprivileged_userns_clone"; if (pathExists(procSysKernelUnprivilegedUsernsClone) - && trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0") - { + && trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0") { debug("user namespaces appear to be disabled; check '/proc/sys/kernel/unprivileged_userns_clone'"); return false; } try { - Pid pid = startProcess([&]() - { - _exit(0); - }, { - .cloneFlags = CLONE_NEWUSER - }); + Pid pid = startProcess([&]() { _exit(0); }, {.cloneFlags = CLONE_NEWUSER}); auto r = pid.wait(); assert(!r); @@ -61,27 +52,25 @@ bool userNamespacesSupported() bool mountAndPidNamespacesSupported() { - static auto res = [&]() -> bool - { + static auto res = [&]() -> bool { try { - Pid pid = startProcess([&]() - { - /* Make sure we don't remount the parent's /proc. */ - if (mount(0, "/", 0, MS_PRIVATE | MS_REC, 0) == -1) - _exit(1); + Pid pid = startProcess( + [&]() { + /* Make sure we don't remount the parent's /proc. */ + if (mount(0, "/", 0, MS_PRIVATE | MS_REC, 0) == -1) + _exit(1); - /* Test whether we can remount /proc. The kernel disallows - this if /proc is not fully visible, i.e. if there are - filesystems mounted on top of files inside /proc. See - https://lore.kernel.org/lkml/87tvsrjai0.fsf@xmission.com/T/. */ - if (mount("none", "/proc", "proc", 0, 0) == -1) - _exit(2); + /* Test whether we can remount /proc. The kernel disallows + this if /proc is not fully visible, i.e. if there are + filesystems mounted on top of files inside /proc. See + https://lore.kernel.org/lkml/87tvsrjai0.fsf@xmission.com/T/. */ + if (mount("none", "/proc", "proc", 0, 0) == -1) + _exit(2); - _exit(0); - }, { - .cloneFlags = CLONE_NEWNS | CLONE_NEWPID | (userNamespacesSupported() ? CLONE_NEWUSER : 0) - }); + _exit(0); + }, + {.cloneFlags = CLONE_NEWNS | CLONE_NEWPID | (userNamespacesSupported() ? CLONE_NEWUSER : 0)}); if (pid.wait()) { debug("PID namespaces do not work on this system: cannot remount /proc"); @@ -98,7 +87,6 @@ bool mountAndPidNamespacesSupported() return res; } - ////////////////////////////////////////////////////////////////////// static AutoCloseFD fdSavedMountNamespace; @@ -144,4 +132,4 @@ void tryUnshareFilesystem() throw SysError("unsharing filesystem state"); } -} +} // namespace nix diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 4dadf1550..997110617 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -26,6 +26,7 @@ ActivityId getCurActivity() { return curActivity; } + void setCurActivity(const ActivityId activityId) { curActivity = activityId; @@ -48,7 +49,7 @@ void Logger::writeToStdout(std::string_view s) Logger::Suspension Logger::suspend() { pause(); - return Suspension { ._finalize = {[this](){this->resume();}} }; + return Suspension{._finalize = {[this]() { this->resume(); }}}; } std::optional Logger::suspendIf(bool cond) @@ -72,25 +73,42 @@ public: tty = isTTY(); } - bool isVerbose() override { + bool isVerbose() override + { return printBuildLogs; } void log(Verbosity lvl, std::string_view s) override { - if (lvl > verbosity) return; + if (lvl > verbosity) + return; std::string prefix; if (systemd) { char c; switch (lvl) { - case lvlError: c = '3'; break; - case lvlWarn: c = '4'; break; - case lvlNotice: case lvlInfo: c = '5'; break; - case lvlTalkative: case lvlChatty: c = '6'; break; - case lvlDebug: case lvlVomit: c = '7'; break; - default: c = '7'; break; // should not happen, and missing enum case is reported by -Werror=switch-enum + case lvlError: + c = '3'; + break; + case lvlWarn: + c = '4'; + break; + case lvlNotice: + case lvlInfo: + c = '5'; + break; + case lvlTalkative: + case lvlChatty: + c = '6'; + break; + case lvlDebug: + case lvlVomit: + c = '7'; + break; + default: + c = '7'; + break; // should not happen, and missing enum case is reported by -Werror=switch-enum } prefix = std::string("<") + c + ">"; } @@ -106,9 +124,13 @@ public: log(ei.level, toView(oss)); } - void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) - override + void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) override { if (lvl <= verbosity && !s.empty()) log(lvl, s + "..."); @@ -119,8 +141,7 @@ public: if (type == resBuildLogLine && printBuildLogs) { auto lastLine = fields[0].s; printError(lastLine); - } - else if (type == resPostBuildLogLine && printBuildLogs) { + } else if (type == resPostBuildLogLine && printBuildLogs) { auto lastLine = fields[0].s; printError("post-build-hook: " + lastLine); } @@ -132,9 +153,7 @@ Verbosity verbosity = lvlInfo; void writeToStderr(std::string_view s) { try { - writeFull( - getStandardError(), - s, false); + writeFull(getStandardError(), s, false); } catch (SystemError & e) { /* Ignore failing writes to stderr. We need to ignore write errors to ensure that cleanup code that logs to stderr runs @@ -159,9 +178,15 @@ static uint64_t getPid() #endif } -Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type, - const std::string & s, const Logger::Fields & fields, ActivityId parent) - : logger(logger), id(nextId++ + (((uint64_t) getPid()) << 32)) +Activity::Activity( + Logger & logger, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Logger::Fields & fields, + ActivityId parent) + : logger(logger) + , id(nextId++ + (((uint64_t) getPid()) << 32)) { logger.startActivity(id, lvl, type, s, fields, parent); } @@ -181,22 +206,26 @@ void to_json(nlohmann::json & json, std::shared_ptr pos) } } -struct JSONLogger : Logger { +struct JSONLogger : Logger +{ Descriptor fd; bool includeNixPrefix; JSONLogger(Descriptor fd, bool includeNixPrefix) : fd(fd) , includeNixPrefix(includeNixPrefix) - { } + { + } - bool isVerbose() override { + bool isVerbose() override + { return true; } void addFields(nlohmann::json & json, const Fields & fields) { - if (fields.empty()) return; + if (fields.empty()) + return; auto & arr = json["fields"] = nlohmann::json::array(); for (auto & f : fields) if (f.type == Logger::Field::tInt) @@ -217,8 +246,7 @@ struct JSONLogger : Logger { void write(const nlohmann::json & json) { auto line = - (includeNixPrefix ? "@nix " : "") + - json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace); + (includeNixPrefix ? "@nix " : "") + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace); /* Acquire a lock to prevent log messages from clobbering each other. */ @@ -272,8 +300,13 @@ struct JSONLogger : Logger { write(json); } - void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) override + void startActivity( + ActivityId act, + Verbosity lvl, + ActivityType type, + const std::string & s, + const Fields & fields, + ActivityId parent) override { nlohmann::json json; json["action"] = "start"; @@ -312,19 +345,20 @@ std::unique_ptr makeJSONLogger(Descriptor fd, bool includeNixPrefix) std::unique_ptr makeJSONLogger(const std::filesystem::path & path, bool includeNixPrefix) { - struct JSONFileLogger : JSONLogger { + struct JSONFileLogger : JSONLogger + { AutoCloseFD fd; JSONFileLogger(AutoCloseFD && fd, bool includeNixPrefix) : JSONLogger(fd.get(), includeNixPrefix) , fd(std::move(fd)) - { } + { + } }; - AutoCloseFD fd = - std::filesystem::is_socket(path) - ? connect(path) - : toDescriptor(open(path.string().c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644)); + AutoCloseFD fd = std::filesystem::is_socket(path) + ? connect(path) + : toDescriptor(open(path.string().c_str(), O_CREAT | O_APPEND | O_WRONLY, 0644)); if (!fd) throw SysError("opening log file %1%", path); @@ -346,7 +380,6 @@ void applyJSONLogger() } catch (...) { ignoreExceptionExceptInterrupt(); } - } } @@ -358,27 +391,30 @@ static Logger::Fields getFields(nlohmann::json & json) fields.emplace_back(Logger::Field(f.get())); else if (f.type() == nlohmann::json::value_t::string) fields.emplace_back(Logger::Field(f.get())); - else throw Error("unsupported JSON type %d", (int) f.type()); + else + throw Error("unsupported JSON type %d", (int) f.type()); } return fields; } std::optional parseJSONMessage(const std::string & msg, std::string_view source) { - if (!hasPrefix(msg, "@nix ")) return std::nullopt; + if (!hasPrefix(msg, "@nix ")) + return std::nullopt; try { return nlohmann::json::parse(std::string(msg, 5)); } catch (std::exception & e) { - printError("bad JSON log message from %s: %s", - Uncolored(source), - e.what()); + printError("bad JSON log message from %s: %s", Uncolored(source), e.what()); } return std::nullopt; } -bool handleJSONLogMessage(nlohmann::json & json, - const Activity & act, std::map & activities, - std::string_view source, bool trusted) +bool handleJSONLogMessage( + nlohmann::json & json, + const Activity & act, + std::map & activities, + std::string_view source, + bool trusted) { try { std::string action = json["action"]; @@ -386,10 +422,11 @@ bool handleJSONLogMessage(nlohmann::json & json, if (action == "start") { auto type = (ActivityType) json["type"]; if (trusted || type == actFileTransfer) - activities.emplace(std::piecewise_construct, + activities.emplace( + std::piecewise_construct, std::forward_as_tuple(json["id"]), - std::forward_as_tuple(*logger, (Verbosity) json["level"], type, - json["text"], getFields(json["fields"]), act.id)); + std::forward_as_tuple( + *logger, (Verbosity) json["level"], type, json["text"], getFields(json["fields"]), act.id)); } else if (action == "stop") @@ -412,21 +449,22 @@ bool handleJSONLogMessage(nlohmann::json & json, } return true; - } catch (const nlohmann::json::exception &e) { - warn( - "Unable to handle a JSON message from %s: %s", - Uncolored(source), - e.what() - ); + } catch (const nlohmann::json::exception & e) { + warn("Unable to handle a JSON message from %s: %s", Uncolored(source), e.what()); return false; } } -bool handleJSONLogMessage(const std::string & msg, - const Activity & act, std::map & activities, std::string_view source, bool trusted) +bool handleJSONLogMessage( + const std::string & msg, + const Activity & act, + std::map & activities, + std::string_view source, + bool trusted) { auto json = parseJSONMessage(msg, source); - if (!json) return false; + if (!json) + return false; return handleJSONLogMessage(*json, act, activities, source, trusted); } @@ -440,4 +478,4 @@ Activity::~Activity() } } -} +} // namespace nix diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 5612c9454..363f52a54 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -2,15 +2,13 @@ namespace nix { -MemorySourceAccessor::File * -MemorySourceAccessor::open(const CanonPath & path, std::optional create) +MemorySourceAccessor::File * MemorySourceAccessor::open(const CanonPath & path, std::optional create) { File * cur = &root; bool newF = false; - for (std::string_view name : path) - { + for (std::string_view name : path) { auto * curDirP = std::get_if(&cur->raw); if (!curDirP) return nullptr; @@ -22,16 +20,19 @@ MemorySourceAccessor::open(const CanonPath & path, std::optional create) return nullptr; else { newF = true; - i = curDir.contents.insert(i, { - std::string { name }, - File::Directory {}, - }); + i = curDir.contents.insert( + i, + { + std::string{name}, + File::Directory{}, + }); } } cur = &i->second; } - if (newF && create) *cur = std::move(*create); + if (newF && create) + *cur = std::move(*create); return cur; } @@ -54,32 +55,33 @@ bool MemorySourceAccessor::pathExists(const CanonPath & path) MemorySourceAccessor::Stat MemorySourceAccessor::File::lstat() const { - return std::visit(overloaded { - [](const Regular & r) { - return Stat { - .type = tRegular, - .fileSize = r.contents.size(), - .isExecutable = r.executable, - }; + return std::visit( + overloaded{ + [](const Regular & r) { + return Stat{ + .type = tRegular, + .fileSize = r.contents.size(), + .isExecutable = r.executable, + }; + }, + [](const Directory &) { + return Stat{ + .type = tDirectory, + }; + }, + [](const Symlink &) { + return Stat{ + .type = tSymlink, + }; + }, }, - [](const Directory &) { - return Stat { - .type = tDirectory, - }; - }, - [](const Symlink &) { - return Stat { - .type = tSymlink, - }; - }, - }, this->raw); + this->raw); } -std::optional -MemorySourceAccessor::maybeLstat(const CanonPath & path) +std::optional MemorySourceAccessor::maybeLstat(const CanonPath & path) { const auto * f = open(path, std::nullopt); - return f ? std::optional { f->lstat() } : std::nullopt; + return f ? std::optional{f->lstat()} : std::nullopt; } MemorySourceAccessor::DirEntries MemorySourceAccessor::readDirectory(const CanonPath & path) @@ -110,7 +112,7 @@ std::string MemorySourceAccessor::readLink(const CanonPath & path) SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents) { - auto * f = open(path, File { File::Regular {} }); + auto * f = open(path, File{File::Regular{}}); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); if (auto * r = std::get_if(&f->raw)) @@ -121,12 +123,11 @@ SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents return SourcePath{ref(shared_from_this()), path}; } - using File = MemorySourceAccessor::File; void MemorySink::createDirectory(const CanonPath & path) { - auto * f = dst.open(path, File { File::Directory { } }); + auto * f = dst.open(path, File{File::Directory{}}); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); @@ -134,25 +135,27 @@ void MemorySink::createDirectory(const CanonPath & path) throw Error("file '%s' is not a directory", path); }; -struct CreateMemoryRegularFile : CreateRegularFileSink { +struct CreateMemoryRegularFile : CreateRegularFileSink +{ File::Regular & regularFile; CreateMemoryRegularFile(File::Regular & r) : regularFile(r) - { } + { + } - void operator () (std::string_view data) override; + void operator()(std::string_view data) override; void isExecutable() override; void preallocateContents(uint64_t size) override; }; void MemorySink::createRegularFile(const CanonPath & path, std::function func) { - auto * f = dst.open(path, File { File::Regular {} }); + auto * f = dst.open(path, File{File::Regular{}}); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); if (auto * rp = std::get_if(&f->raw)) { - CreateMemoryRegularFile crf { *rp }; + CreateMemoryRegularFile crf{*rp}; func(crf); } else throw Error("file '%s' is not a regular file", path); @@ -168,14 +171,14 @@ void CreateMemoryRegularFile::preallocateContents(uint64_t len) regularFile.contents.reserve(len); } -void CreateMemoryRegularFile::operator () (std::string_view data) +void CreateMemoryRegularFile::operator()(std::string_view data) { regularFile.contents += data; } void MemorySink::createSymlink(const CanonPath & path, const std::string & target) { - auto * f = dst.open(path, File { File::Symlink { } }); + auto * f = dst.open(path, File{File::Symlink{}}); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); if (auto * s = std::get_if(&f->raw)) @@ -194,4 +197,4 @@ ref makeEmptySourceAccessor() return empty; } -} +} // namespace nix diff --git a/src/libutil/mounted-source-accessor.cc b/src/libutil/mounted-source-accessor.cc index b7de2afbf..4c32147f9 100644 --- a/src/libutil/mounted-source-accessor.cc +++ b/src/libutil/mounted-source-accessor.cc @@ -76,4 +76,4 @@ ref makeMountedSourceAccessor(std::map(std::move(mounts)); } -} +} // namespace nix diff --git a/src/libutil/pos-table.cc b/src/libutil/pos-table.cc index e50b12873..e24aff4b1 100644 --- a/src/libutil/pos-table.cc +++ b/src/libutil/pos-table.cc @@ -48,4 +48,4 @@ Pos PosTable::operator[](PosIdx p) const return result; } -} +} // namespace nix diff --git a/src/libutil/position.cc b/src/libutil/position.cc index a1d9460ed..049c95474 100644 --- a/src/libutil/position.cc +++ b/src/libutil/position.cc @@ -31,29 +31,27 @@ std::optional Pos::getCodeLines() const return std::nullopt; } - std::optional Pos::getSource() const { - return std::visit(overloaded { - [](const std::monostate &) -> std::optional { - return std::nullopt; - }, - [](const Pos::Stdin & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const Pos::String & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const SourcePath & path) -> std::optional { - try { - return path.readFile(); - } catch (Error &) { - return std::nullopt; - } - } - }, origin); + return std::visit( + overloaded{ + [](const std::monostate &) -> std::optional { return std::nullopt; }, + [](const Pos::Stdin & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const Pos::String & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const SourcePath & path) -> std::optional { + try { + return path.readFile(); + } catch (Error &) { + return std::nullopt; + } + }}, + origin); } std::optional Pos::getSourcePath() const @@ -66,12 +64,13 @@ std::optional Pos::getSourcePath() const void Pos::print(std::ostream & out, bool showOrigin) const { if (showOrigin) { - std::visit(overloaded { - [&](const std::monostate &) { out << "«none»"; }, - [&](const Pos::Stdin &) { out << "«stdin»"; }, - [&](const Pos::String & s) { out << "«string»"; }, - [&](const SourcePath & path) { out << path; } - }, origin); + std::visit( + overloaded{ + [&](const std::monostate &) { out << "«none»"; }, + [&](const Pos::Stdin &) { out << "«stdin»"; }, + [&](const Pos::String & s) { out << "«string»"; }, + [&](const SourcePath & path) { out << path; }}, + origin); out << ":"; } out << line; @@ -107,7 +106,8 @@ void Pos::LinesIterator::bump(bool atFirst) input.remove_prefix(eol); } -std::optional Pos::getSnippetUpTo(const Pos & end) const { +std::optional Pos::getSnippetUpTo(const Pos & end) const +{ assert(this->origin == end.origin); if (end.line < this->line) @@ -152,5 +152,4 @@ std::optional Pos::getSnippetUpTo(const Pos & end) const { return std::nullopt; } - -} +} // namespace nix diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 2ce7c88e4..73a08116d 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -15,43 +15,41 @@ PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot) } PosixSourceAccessor::PosixSourceAccessor() - : PosixSourceAccessor(std::filesystem::path {}) -{ } + : PosixSourceAccessor(std::filesystem::path{}) +{ +} SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) { std::filesystem::path path2 = absPath(path); return { make_ref(path2.root_path()), - CanonPath { path2.relative_path().string() }, + CanonPath{path2.relative_path().string()}, }; } std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path) { - return root.empty() - ? (std::filesystem::path { path.abs() }) - : path.isRoot() - ? /* Don't append a slash for the root of the accessor, since - it can be a non-directory (e.g. in the case of `fetchTree - { type = "file" }`). */ - root - : root / path.rel(); + return root.empty() ? (std::filesystem::path{path.abs()}) + : path.isRoot() ? /* Don't append a slash for the root of the accessor, since + it can be a non-directory (e.g. in the case of `fetchTree + { type = "file" }`). */ + root + : root / path.rel(); } -void PosixSourceAccessor::readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) +void PosixSourceAccessor::readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) { assertNoSymlinks(path); auto ap = makeAbsPath(path); - AutoCloseFD fd = toDescriptor(open(ap.string().c_str(), O_RDONLY - #ifndef _WIN32 - | O_NOFOLLOW | O_CLOEXEC - #endif + AutoCloseFD fd = toDescriptor(open( + ap.string().c_str(), + O_RDONLY +#ifndef _WIN32 + | O_NOFOLLOW | O_CLOEXEC +#endif )); if (!fd) throw SysError("opening file '%1%'", ap.string()); @@ -71,8 +69,7 @@ void PosixSourceAccessor::readFile( if (rd == -1) { if (errno != EINTR) throw SysError("reading from file '%s'", showPath(path)); - } - else if (rd == 0) + } else if (rd == 0) throw SysError("unexpected end-of-file reading '%s'", showPath(path)); else { assert(rd <= left); @@ -84,7 +81,8 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { - if (auto parent = path.parent()) assertNoSymlinks(*parent); + if (auto parent = path.parent()) + assertNoSymlinks(*parent); return nix::pathExists(makeAbsPath(path).string()); } @@ -99,13 +97,15 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa { auto cache(_cache.readLock()); auto i = cache->find(absPath); - if (i != cache->end()) return i->second; + if (i != cache->end()) + return i->second; } auto st = nix::maybeLstat(absPath.c_str()); auto cache(_cache.lock()); - if (cache->size() >= 16384) cache->clear(); + if (cache->size() >= 16384) + cache->clear(); cache->emplace(absPath, st); return st; @@ -113,22 +113,25 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa std::optional PosixSourceAccessor::maybeLstat(const CanonPath & path) { - if (auto parent = path.parent()) assertNoSymlinks(*parent); + if (auto parent = path.parent()) + assertNoSymlinks(*parent); auto st = cachedLstat(path); - if (!st) return std::nullopt; + if (!st) + return std::nullopt; mtime = std::max(mtime, st->st_mtime); - return Stat { - .type = - S_ISREG(st->st_mode) ? tRegular : - S_ISDIR(st->st_mode) ? tDirectory : - S_ISLNK(st->st_mode) ? tSymlink : - S_ISCHR(st->st_mode) ? tChar : - S_ISBLK(st->st_mode) ? tBlock : + return Stat{ + .type = S_ISREG(st->st_mode) ? tRegular + : S_ISDIR(st->st_mode) ? tDirectory + : S_ISLNK(st->st_mode) ? tSymlink + : S_ISCHR(st->st_mode) ? tChar + : S_ISBLK(st->st_mode) ? tBlock + : #ifdef S_ISSOCK - S_ISSOCK(st->st_mode) ? tSocket : + S_ISSOCK(st->st_mode) ? tSocket + : #endif - S_ISFIFO(st->st_mode) ? tFifo : - tUnknown, + S_ISFIFO(st->st_mode) ? tFifo + : tUnknown, .fileSize = S_ISREG(st->st_mode) ? std::optional(st->st_size) : std::nullopt, .isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR, }; @@ -150,7 +153,8 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & * libstdc++ implementation [1] and the standard proposal * about the caching variations of directory_entry [2]. - * [1]: https://github.com/gcc-mirror/gcc/blob/8ea555b7b4725dbc5d9286f729166cd54ce5b615/libstdc%2B%2B-v3/include/bits/fs_dir.h#L341-L348 + * [1]: + https://github.com/gcc-mirror/gcc/blob/8ea555b7b4725dbc5d9286f729166cd54ce5b615/libstdc%2B%2B-v3/include/bits/fs_dir.h#L341-L348 * [2]: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0317r1.html */ @@ -187,7 +191,8 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { - if (auto parent = path.parent()) assertNoSymlinks(*parent); + if (auto parent = path.parent()) + assertNoSymlinks(*parent); return nix::readLink(makeAbsPath(path).string()); } @@ -216,4 +221,4 @@ ref makeFSSourceAccessor(std::filesystem::path root) { return make_ref(std::move(root)); } -} +} // namespace nix diff --git a/src/libutil/references.cc b/src/libutil/references.cc index 66ad9d37c..cd8a46754 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -7,27 +7,22 @@ #include #include - namespace nix { - static size_t refLength = 32; /* characters */ - -static void search( - std::string_view s, - StringSet & hashes, - StringSet & seen) +static void search(std::string_view s, StringSet & hashes, StringSet & seen) { static std::once_flag initialised; static bool isBase32[256]; - std::call_once(initialised, [](){ - for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false; + std::call_once(initialised, []() { + for (unsigned int i = 0; i < 256; ++i) + isBase32[i] = false; for (unsigned int i = 0; i < nix32Chars.size(); ++i) isBase32[(unsigned char) nix32Chars[i]] = true; }); - for (size_t i = 0; i + refLength <= s.size(); ) { + for (size_t i = 0; i + refLength <= s.size();) { int j; bool match = true; for (j = refLength - 1; j >= 0; --j) @@ -36,7 +31,8 @@ static void search( match = false; break; } - if (!match) continue; + if (!match) + continue; std::string ref(s.substr(i, refLength)); if (hashes.erase(ref)) { debug("found reference to '%1%' at offset '%2%'", ref, i); @@ -46,8 +42,7 @@ static void search( } } - -void RefScanSink::operator () (std::string_view data) +void RefScanSink::operator()(std::string_view data) { /* It's possible that a reference spans the previous and current fragment, so search in the concatenation of the tail of the @@ -65,14 +60,14 @@ void RefScanSink::operator () (std::string_view data) tail.append(data.data() + data.size() - tailLen, tailLen); } - RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink) : RewritingSink({{from, to}}, nextSink) { } RewritingSink::RewritingSink(const StringMap & rewrites, Sink & nextSink) - : rewrites(rewrites), nextSink(nextSink) + : rewrites(rewrites) + , nextSink(nextSink) { std::string::size_type maxRewriteSize = 0; for (auto & [from, to] : rewrites) { @@ -82,29 +77,29 @@ RewritingSink::RewritingSink(const StringMap & rewrites, Sink & nextSink) this->maxRewriteSize = maxRewriteSize; } -void RewritingSink::operator () (std::string_view data) +void RewritingSink::operator()(std::string_view data) { std::string s(prev); s.append(data); s = rewriteStrings(s, rewrites); - prev = s.size() < maxRewriteSize - ? s - : maxRewriteSize == 0 - ? "" - : std::string(s, s.size() - maxRewriteSize + 1, maxRewriteSize - 1); + prev = s.size() < maxRewriteSize ? s + : maxRewriteSize == 0 ? "" + : std::string(s, s.size() - maxRewriteSize + 1, maxRewriteSize - 1); auto consumed = s.size() - prev.size(); pos += consumed; - if (consumed) nextSink(s.substr(0, consumed)); + if (consumed) + nextSink(s.substr(0, consumed)); } void RewritingSink::flush() { - if (prev.empty()) return; + if (prev.empty()) + return; pos += prev.size(); nextSink(prev); prev.clear(); @@ -116,7 +111,7 @@ HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus) { } -void HashModuloSink::operator () (std::string_view data) +void HashModuloSink::operator()(std::string_view data) { rewritingSink(data); } @@ -136,4 +131,4 @@ HashResult HashModuloSink::finish() return {h.first, rewritingSink.pos}; } -} +} // namespace nix diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index a74531582..15629935e 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -9,20 +9,19 @@ #include #ifdef _WIN32 -# include -# include -# include "nix/util/windows-error.hh" +# include +# include +# include "nix/util/windows-error.hh" #else -# include +# include #endif - namespace nix { - -void BufferedSink::operator () (std::string_view data) +void BufferedSink::operator()(std::string_view data) { - if (!buffer) buffer = decltype(buffer)(new char[bufSize]); + if (!buffer) + buffer = decltype(buffer)(new char[bufSize]); while (!data.empty()) { /* Optimisation: bypass the buffer if the data exceeds the @@ -36,27 +35,31 @@ void BufferedSink::operator () (std::string_view data) when it's full. */ size_t n = bufPos + data.size() > bufSize ? bufSize - bufPos : data.size(); memcpy(buffer.get() + bufPos, data.data(), n); - data.remove_prefix(n); bufPos += n; - if (bufPos == bufSize) flush(); + data.remove_prefix(n); + bufPos += n; + if (bufPos == bufSize) + flush(); } } - void BufferedSink::flush() { - if (bufPos == 0) return; + if (bufPos == 0) + return; size_t n = bufPos; bufPos = 0; // don't trigger the assert() in ~BufferedSink() writeUnbuffered({buffer.get(), n}); } - FdSink::~FdSink() { - try { flush(); } catch (...) { ignoreExceptionInDestructor(); } + try { + flush(); + } catch (...) { + ignoreExceptionInDestructor(); + } } - void FdSink::writeUnbuffered(std::string_view data) { written += data.size(); @@ -68,24 +71,23 @@ void FdSink::writeUnbuffered(std::string_view data) } } - bool FdSink::good() { return _good; } - -void Source::operator () (char * data, size_t len) +void Source::operator()(char * data, size_t len) { while (len) { size_t n = read(data, len); - data += n; len -= n; + data += n; + len -= n; } } -void Source::operator () (std::string_view data) +void Source::operator()(std::string_view data) { - (*this)((char *)data.data(), data.size()); + (*this)((char *) data.data(), data.size()); } void Source::drainInto(Sink & sink) @@ -102,7 +104,6 @@ void Source::drainInto(Sink & sink) } } - std::string Source::drain() { StringSink s; @@ -110,28 +111,28 @@ std::string Source::drain() return std::move(s.s); } - size_t BufferedSource::read(char * data, size_t len) { - if (!buffer) buffer = decltype(buffer)(new char[bufSize]); + if (!buffer) + buffer = decltype(buffer)(new char[bufSize]); - if (!bufPosIn) bufPosIn = readUnbuffered(buffer.get(), bufSize); + if (!bufPosIn) + bufPosIn = readUnbuffered(buffer.get(), bufSize); /* Copy out the data in the buffer. */ size_t n = len > bufPosIn - bufPosOut ? bufPosIn - bufPosOut : len; memcpy(data, buffer.get() + bufPosOut, n); bufPosOut += n; - if (bufPosIn == bufPosOut) bufPosIn = bufPosOut = 0; + if (bufPosIn == bufPosOut) + bufPosIn = bufPosOut = 0; return n; } - bool BufferedSource::hasData() { return bufPosOut < bufPosIn; } - size_t FdSource::readUnbuffered(char * data, size_t len) { #ifdef _WIN32 @@ -147,23 +148,28 @@ size_t FdSource::readUnbuffered(char * data, size_t len) checkInterrupt(); n = ::read(fd, data, len); } while (n == -1 && errno == EINTR); - if (n == -1) { _good = false; throw SysError("reading from file"); } - if (n == 0) { _good = false; throw EndOfFile(std::string(*endOfFileError)); } + if (n == -1) { + _good = false; + throw SysError("reading from file"); + } + if (n == 0) { + _good = false; + throw EndOfFile(std::string(*endOfFileError)); + } #endif read += n; return n; } - bool FdSource::good() { return _good; } - bool FdSource::hasData() { - if (BufferedSource::hasData()) return true; + if (BufferedSource::hasData()) + return true; while (true) { fd_set fds; @@ -177,23 +183,23 @@ bool FdSource::hasData() auto n = select(fd_ + 1, &fds, nullptr, nullptr, &timeout); if (n < 0) { - if (errno == EINTR) continue; + if (errno == EINTR) + continue; throw SysError("polling file descriptor"); } return FD_ISSET(fd, &fds); } } - size_t StringSource::read(char * data, size_t len) { - if (pos == s.size()) throw EndOfFile("end of string reached"); + if (pos == s.size()) + throw EndOfFile("end of string reached"); size_t n = s.copy(data, len, pos); pos += n; return n; } - std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink @@ -203,15 +209,17 @@ std::unique_ptr sourceToSink(std::function fun) std::function fun; std::optional coro; - SourceToSink(std::function fun) : fun(fun) + SourceToSink(std::function fun) + : fun(fun) { } std::string_view cur; - void operator () (std::string_view in) override + void operator()(std::string_view in) override { - if (in.empty()) return; + if (in.empty()) + return; cur = in; if (!coro) { @@ -231,7 +239,9 @@ std::unique_ptr sourceToSink(std::function fun) }); } - if (!*coro) { unreachable(); } + if (!*coro) { + unreachable(); + } if (!cur.empty()) { (*coro)(false); @@ -248,10 +258,7 @@ std::unique_ptr sourceToSink(std::function fun) return std::make_unique(fun); } - -std::unique_ptr sinkToSource( - std::function fun, - std::function eof) +std::unique_ptr sinkToSource(std::function fun, std::function eof) { struct SinkToSource : Source { @@ -262,7 +269,8 @@ std::unique_ptr sinkToSource( std::optional coro; SinkToSource(std::function fun, std::function eof) - : fun(fun), eof(eof) + : fun(fun) + , eof(eof) { } @@ -305,7 +313,6 @@ std::unique_ptr sinkToSource( return std::make_unique(fun, eof); } - void writePadding(size_t len, Sink & sink) { if (len % 8) { @@ -315,7 +322,6 @@ void writePadding(size_t len, Sink & sink) } } - void writeString(std::string_view data, Sink & sink) { sink << data.size(); @@ -323,43 +329,38 @@ void writeString(std::string_view data, Sink & sink) writePadding(data.size(), sink); } - -Sink & operator << (Sink & sink, std::string_view s) +Sink & operator<<(Sink & sink, std::string_view s) { writeString(s, sink); return sink; } - -template void writeStrings(const T & ss, Sink & sink) +template +void writeStrings(const T & ss, Sink & sink) { sink << ss.size(); for (auto & i : ss) sink << i; } -Sink & operator << (Sink & sink, const Strings & s) +Sink & operator<<(Sink & sink, const Strings & s) { writeStrings(s, sink); return sink; } -Sink & operator << (Sink & sink, const StringSet & s) +Sink & operator<<(Sink & sink, const StringSet & s) { writeStrings(s, sink); return sink; } -Sink & operator << (Sink & sink, const Error & ex) +Sink & operator<<(Sink & sink, const Error & ex) { auto & info = ex.info(); - sink - << "Error" - << info.level - << "Error" // removed - << info.msg.str() - << 0 // FIXME: info.errPos - << info.traces.size(); + sink << "Error" << info.level << "Error" // removed + << info.msg.str() << 0 // FIXME: info.errPos + << info.traces.size(); for (auto & trace : info.traces) { sink << 0; // FIXME: trace.pos sink << trace.hint.str(); @@ -367,7 +368,6 @@ Sink & operator << (Sink & sink, const Error & ex) return sink; } - void readPadding(size_t len, Source & source) { if (len % 8) { @@ -375,39 +375,40 @@ void readPadding(size_t len, Source & source) size_t n = 8 - (len % 8); source(zero, n); for (unsigned int i = 0; i < n; i++) - if (zero[i]) throw SerialisationError("non-zero padding"); + if (zero[i]) + throw SerialisationError("non-zero padding"); } } - size_t readString(char * buf, size_t max, Source & source) { auto len = readNum(source); - if (len > max) throw SerialisationError("string is too long"); + if (len > max) + throw SerialisationError("string is too long"); source(buf, len); readPadding(len, source); return len; } - std::string readString(Source & source, size_t max) { auto len = readNum(source); - if (len > max) throw SerialisationError("string is too long"); + if (len > max) + throw SerialisationError("string is too long"); std::string res(len, 0); source(res.data(), len); readPadding(len, source); return res; } -Source & operator >> (Source & in, std::string & s) +Source & operator>>(Source & in, std::string & s) { s = readString(in); return in; } - -template T readStrings(Source & source) +template +T readStrings(Source & source) { auto count = readNum(source); T ss; @@ -419,7 +420,6 @@ template T readStrings(Source & source) template Paths readStrings(Source & source); template PathSet readStrings(Source & source); - Error readError(Source & source) { auto type = readString(source); @@ -427,7 +427,7 @@ Error readError(Source & source) auto level = (Verbosity) readInt(source); [[maybe_unused]] auto name = readString(source); // removed auto msg = readString(source); - ErrorInfo info { + ErrorInfo info{ .level = level, .msg = HintFmt(msg), }; @@ -437,15 +437,12 @@ Error readError(Source & source) for (size_t i = 0; i < nrTraces; ++i) { havePos = readNum(source); assert(havePos == 0); - info.traces.push_back(Trace { - .hint = HintFmt(readString(source)) - }); + info.traces.push_back(Trace{.hint = HintFmt(readString(source))}); } return Error(std::move(info)); } - -void StringSink::operator () (std::string_view data) +void StringSink::operator()(std::string_view data) { s.append(data); } @@ -464,4 +461,4 @@ size_t ChainSource::read(char * data, size_t len) } } -} +} // namespace nix diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 1f7f2c7de..374b5569d 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -51,8 +51,7 @@ std::string SecretKey::signDetached(std::string_view data) const { unsigned char sig[crypto_sign_BYTES]; unsigned long long sigLen; - crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(), - (unsigned char *) key.data()); + crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(), (unsigned char *) key.data()); return name + ":" + base64Encode(std::string((char *) sig, sigLen)); } @@ -84,7 +83,8 @@ bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) cons { auto ss = BorrowedCryptoValue::parse(sig); - if (ss.name != std::string_view { name }) return false; + if (ss.name != std::string_view{name}) + return false; return verifyDetachedAnon(data, ss.payload); } @@ -100,9 +100,9 @@ bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); - return crypto_sign_verify_detached((unsigned char *) sig2.data(), - (unsigned char *) data.data(), data.size(), - (unsigned char *) key.data()) == 0; + return crypto_sign_verify_detached( + (unsigned char *) sig2.data(), (unsigned char *) data.data(), data.size(), (unsigned char *) key.data()) + == 0; } bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys) @@ -110,9 +110,10 @@ bool verifyDetached(std::string_view data, std::string_view sig, const PublicKey auto ss = BorrowedCryptoValue::parse(sig); auto key = publicKeys.find(std::string(ss.name)); - if (key == publicKeys.end()) return false; + if (key == publicKeys.end()) + return false; return key->second.verifyDetachedAnon(data, ss.payload); } -} +} // namespace nix diff --git a/src/libutil/signature/signer.cc b/src/libutil/signature/signer.cc index 46445e9e9..9f6f663e9 100644 --- a/src/libutil/signature/signer.cc +++ b/src/libutil/signature/signer.cc @@ -8,7 +8,8 @@ namespace nix { LocalSigner::LocalSigner(SecretKey && privateKey) : privateKey(privateKey) , publicKey(privateKey.toPublicKey()) -{ } +{ +} std::string LocalSigner::signDetached(std::string_view s) const { @@ -20,4 +21,4 @@ const PublicKey & LocalSigner::getPublicKey() return publicKey; } -} +} // namespace nix diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc index fc9752456..9a0625828 100644 --- a/src/libutil/source-accessor.cc +++ b/src/libutil/source-accessor.cc @@ -10,17 +10,26 @@ bool SourceAccessor::Stat::isNotNARSerialisable() return this->type != tRegular && this->type != tSymlink && this->type != tDirectory; } -std::string SourceAccessor::Stat::typeString() { +std::string SourceAccessor::Stat::typeString() +{ switch (this->type) { - case tRegular: return "regular"; - case tSymlink: return "symlink"; - case tDirectory: return "directory"; - case tChar: return "character device"; - case tBlock: return "block device"; - case tSocket: return "socket"; - case tFifo: return "fifo"; - case tUnknown: - default: return "unknown"; + case tRegular: + return "regular"; + case tSymlink: + return "symlink"; + case tDirectory: + return "directory"; + case tChar: + return "character device"; + case tBlock: + return "block device"; + case tSocket: + return "socket"; + case tFifo: + return "fifo"; + case tUnknown: + default: + return "unknown"; } return "unknown"; } @@ -40,28 +49,19 @@ std::string SourceAccessor::readFile(const CanonPath & path) { StringSink sink; std::optional size; - readFile(path, sink, [&](uint64_t _size) - { - size = _size; - }); + readFile(path, sink, [&](uint64_t _size) { size = _size; }); assert(size && *size == sink.s.size()); return std::move(sink.s); } -void SourceAccessor::readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) +void SourceAccessor::readFile(const CanonPath & path, Sink & sink, std::function sizeCallback) { auto s = readFile(path); sizeCallback(s.size()); sink(s); } -Hash SourceAccessor::hashPath( - const CanonPath & path, - PathFilter & filter, - HashAlgorithm ha) +Hash SourceAccessor::hashPath(const CanonPath & path, PathFilter & filter, HashAlgorithm ha) { HashSink sink(ha); dumpPath(path, sink, filter); @@ -87,9 +87,7 @@ std::string SourceAccessor::showPath(const CanonPath & path) return displayPrefix + path.abs() + displaySuffix; } -CanonPath SourceAccessor::resolveSymlinks( - const CanonPath & path, - SymlinkResolution mode) +CanonPath SourceAccessor::resolveSymlinks(const CanonPath & path, SymlinkResolution mode) { auto res = CanonPath::root; @@ -128,4 +126,4 @@ CanonPath SourceAccessor::resolveSymlinks( return res; } -} +} // namespace nix diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index 6d42fa95f..2f1f1096b 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -3,7 +3,9 @@ namespace nix { std::string_view SourcePath::baseName() const -{ return path.baseName().value_or("source"); } +{ + return path.baseName().value_or("source"); +} SourcePath SourcePath::parent() const { @@ -13,39 +15,59 @@ SourcePath SourcePath::parent() const } std::string SourcePath::readFile() const -{ return accessor->readFile(path); } +{ + return accessor->readFile(path); +} bool SourcePath::pathExists() const -{ return accessor->pathExists(path); } +{ + return accessor->pathExists(path); +} SourceAccessor::Stat SourcePath::lstat() const -{ return accessor->lstat(path); } +{ + return accessor->lstat(path); +} std::optional SourcePath::maybeLstat() const -{ return accessor->maybeLstat(path); } +{ + return accessor->maybeLstat(path); +} SourceAccessor::DirEntries SourcePath::readDirectory() const -{ return accessor->readDirectory(path); } +{ + return accessor->readDirectory(path); +} std::string SourcePath::readLink() const -{ return accessor->readLink(path); } +{ + return accessor->readLink(path); +} -void SourcePath::dumpPath( - Sink & sink, - PathFilter & filter) const -{ return accessor->dumpPath(path, sink, filter); } +void SourcePath::dumpPath(Sink & sink, PathFilter & filter) const +{ + return accessor->dumpPath(path, sink, filter); +} std::optional SourcePath::getPhysicalPath() const -{ return accessor->getPhysicalPath(path); } +{ + return accessor->getPhysicalPath(path); +} std::string SourcePath::to_string() const -{ return accessor->showPath(path); } +{ + return accessor->showPath(path); +} -SourcePath SourcePath::operator / (const CanonPath & x) const -{ return {accessor, path / x}; } +SourcePath SourcePath::operator/(const CanonPath & x) const +{ + return {accessor, path / x}; +} -SourcePath SourcePath::operator / (std::string_view c) const -{ return {accessor, path / c}; } +SourcePath SourcePath::operator/(std::string_view c) const +{ + return {accessor, path / c}; +} bool SourcePath::operator==(const SourcePath & x) const noexcept { @@ -63,4 +85,4 @@ std::ostream & operator<<(std::ostream & str, const SourcePath & path) return str; } -} +} // namespace nix diff --git a/src/libutil/subdir-source-accessor.cc b/src/libutil/subdir-source-accessor.cc index 265836118..d4f57e2f7 100644 --- a/src/libutil/subdir-source-accessor.cc +++ b/src/libutil/subdir-source-accessor.cc @@ -56,4 +56,4 @@ ref projectSubdirSourceAccessor(ref parent, Cano return make_ref(std::move(parent), std::move(subdirectory)); } -} +} // namespace nix diff --git a/src/libutil/suggestions.cc b/src/libutil/suggestions.cc index aee23d45e..2367a12bf 100644 --- a/src/libutil/suggestions.cc +++ b/src/libutil/suggestions.cc @@ -15,20 +15,20 @@ int levenshteinDistance(std::string_view first, std::string_view second) int m = first.size(); int n = second.size(); - auto v0 = std::vector(n+1); - auto v1 = std::vector(n+1); + auto v0 = std::vector(n + 1); + auto v1 = std::vector(n + 1); for (auto i = 0; i <= n; i++) v0[i] = i; for (auto i = 0; i < m; i++) { - v1[0] = i+1; + v1[0] = i + 1; for (auto j = 0; j < n; j++) { - auto deletionCost = v0[j+1] + 1; + auto deletionCost = v0[j + 1] + 1; auto insertionCost = v1[j] + 1; auto substitutionCost = first[i] == second[j] ? v0[j] : v0[j] + 1; - v1[j+1] = std::min({deletionCost, insertionCost, substitutionCost}); + v1[j + 1] = std::min({deletionCost, insertionCost, substitutionCost}); } std::swap(v0, v1); @@ -37,18 +37,17 @@ int levenshteinDistance(std::string_view first, std::string_view second) return v0[n]; } -Suggestions Suggestions::bestMatches ( - const StringSet & allMatches, - std::string_view query) +Suggestions Suggestions::bestMatches(const StringSet & allMatches, std::string_view query) { std::set res; for (const auto & possibleMatch : allMatches) { - res.insert(Suggestion { - .distance = levenshteinDistance(query, possibleMatch), - .suggestion = possibleMatch, - }); + res.insert( + Suggestion{ + .distance = levenshteinDistance(query, possibleMatch), + .suggestion = possibleMatch, + }); } - return Suggestions { res }; + return Suggestions{res}; } Suggestions Suggestions::trim(int limit, int maxDistance) const @@ -75,31 +74,29 @@ std::string Suggestion::to_string() const std::string Suggestions::to_string() const { switch (suggestions.size()) { - case 0: - return ""; - case 1: - return suggestions.begin()->to_string(); - default: { - std::string res = "one of "; - auto iter = suggestions.begin(); - res += iter->to_string(); // Iter can’t be end() because the container isn’t null - iter++; - auto last = suggestions.end(); last--; - for ( ; iter != suggestions.end() ; iter++) { - res += (iter == last) ? " or " : ", "; - res += iter->to_string(); - } - return res; + case 0: + return ""; + case 1: + return suggestions.begin()->to_string(); + default: { + std::string res = "one of "; + auto iter = suggestions.begin(); + res += iter->to_string(); // Iter can’t be end() because the container isn’t null + iter++; + auto last = suggestions.end(); + last--; + for (; iter != suggestions.end(); iter++) { + res += (iter == last) ? " or " : ", "; + res += iter->to_string(); } + return res; + } } } Suggestions & Suggestions::operator+=(const Suggestions & other) { - suggestions.insert( - other.suggestions.begin(), - other.suggestions.end() - ); + suggestions.insert(other.suggestions.begin(), other.suggestions.end()); return *this; } @@ -113,4 +110,4 @@ std::ostream & operator<<(std::ostream & str, const Suggestions & suggestions) return str << suggestions.to_string(); } -} +} // namespace nix diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 299847850..0757b3a81 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -44,7 +44,7 @@ void checkLibArchive(archive * archive, int err, const std::string & reason) } constexpr auto defaultBufferSize = std::size_t{65536}; -} +} // namespace void TarArchive::check(int err, const std::string & reason) { @@ -247,4 +247,4 @@ time_t unpackTarfileToSink(TarArchive & archive, ExtendedFileSystemObjectSink & return lastModified; } -} +} // namespace nix diff --git a/src/libutil/tee-logger.cc b/src/libutil/tee-logger.cc index 55334a821..8433168a5 100644 --- a/src/libutil/tee-logger.cc +++ b/src/libutil/tee-logger.cc @@ -104,4 +104,4 @@ makeTeeLogger(std::unique_ptr mainLogger, std::vector(std::move(allLoggers)); } -} +} // namespace nix diff --git a/src/libutil/terminal.cc b/src/libutil/terminal.cc index 63473d1a9..b5765487c 100644 --- a/src/libutil/terminal.cc +++ b/src/libutil/terminal.cc @@ -3,12 +3,12 @@ #include "nix/util/sync.hh" #ifdef _WIN32 -# include -# define WIN32_LEAN_AND_MEAN -# include -# define isatty _isatty +# include +# define WIN32_LEAN_AND_MEAN +# include +# define isatty _isatty #else -# include +# include #endif #include #include @@ -57,16 +57,14 @@ inline std::pair charWidthUTF8Helper(std::string_view s) return {width, bytes}; } -} +} // namespace namespace nix { bool isTTY() { - static const bool tty = - isatty(STDERR_FILENO) - && getEnv("TERM").value_or("dumb") != "dumb" - && !(getEnv("NO_COLOR").has_value() || getEnv("NOCOLOR").has_value()); + static const bool tty = isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb" + && !(getEnv("NO_COLOR").has_value() || getEnv("NOCOLOR").has_value()); return tty; } @@ -87,11 +85,14 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w if (i != s.end() && *i == '[') { e += *i++; // eat parameter bytes - while (i != s.end() && *i >= 0x30 && *i <= 0x3f) e += *i++; + while (i != s.end() && *i >= 0x30 && *i <= 0x3f) + e += *i++; // eat intermediate bytes - while (i != s.end() && *i >= 0x20 && *i <= 0x2f) e += *i++; + while (i != s.end() && *i >= 0x20 && *i <= 0x2f) + e += *i++; // eat final byte - if (i != s.end() && *i >= 0x40 && *i <= 0x7e) e += last = *i++; + if (i != s.end() && *i >= 0x40 && *i <= 0x7e) + e += last = *i++; } else if (i != s.end() && *i == ']') { // OSC e += *i++; @@ -101,15 +102,18 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w // 2. BEL ('\a') (xterm-style, used by gcc) // eat ESC or BEL - while (i != s.end() && *i != '\e' && *i != '\a') e += *i++; + while (i != s.end() && *i != '\e' && *i != '\a') + e += *i++; if (i != s.end()) { - char v = *i; - e += *i++; - // eat backslash after ESC - if (i != s.end() && v == '\e' && *i == '\\') e += last = *i++; + char v = *i; + e += *i++; + // eat backslash after ESC + if (i != s.end() && v == '\e' && *i == '\\') + e += last = *i++; } } else { - if (i != s.end() && *i >= 0x40 && *i <= 0x5f) e += *i++; + if (i != s.end() && *i >= 0x40 && *i <= 0x5f) + e += *i++; } if (!filterAll && last == 'm') @@ -146,17 +150,16 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w static Sync> windowSize{{0, 0}}; - void updateWindowSize() { - #ifndef _WIN32 +#ifndef _WIN32 struct winsize ws; if (ioctl(2, TIOCGWINSZ, &ws) == 0) { auto windowSize_(windowSize.lock()); windowSize_->first = ws.ws_row; windowSize_->second = ws.ws_col; } - #else +#else CONSOLE_SCREEN_BUFFER_INFO info; // From https://stackoverflow.com/a/12642749 if (GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &info) != 0) { @@ -165,13 +168,12 @@ void updateWindowSize() windowSize_->first = info.srWindow.Bottom - info.srWindow.Top + 1; windowSize_->second = info.dwSize.X; } - #endif +#endif } - std::pair getWindowSize() { return *windowSize.lock(); } -} +} // namespace nix diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index 8958bc550..b7740bc3e 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -9,7 +9,8 @@ ThreadPool::ThreadPool(size_t _maxThreads) { if (!maxThreads) { maxThreads = std::thread::hardware_concurrency(); - if (!maxThreads) maxThreads = 1; + if (!maxThreads) + maxThreads = 1; } debug("starting pool of %d threads", maxThreads - 1); @@ -29,7 +30,8 @@ void ThreadPool::shutdown() std::swap(workers, state->workers); } - if (workers.empty()) return; + if (workers.empty()) + return; debug("reaping %d worker threads", workers.size()); @@ -127,9 +129,11 @@ void ThreadPool::doWork(bool mainThread) /* Wait until a work item is available or we're asked to quit. */ while (true) { - if (quit) return; + if (quit) + return; - if (!state->pending.empty()) break; + if (!state->pending.empty()) + break; /* If there are no active or pending items, and the main thread is running process(), then no new items @@ -158,6 +162,4 @@ void ThreadPool::doWork(bool mainThread) } } -} - - +} // namespace nix diff --git a/src/libutil/union-source-accessor.cc b/src/libutil/union-source-accessor.cc index 9950f6049..96b6a643a 100644 --- a/src/libutil/union-source-accessor.cc +++ b/src/libutil/union-source-accessor.cc @@ -79,4 +79,4 @@ ref makeUnionSourceAccessor(std::vector> && return make_ref(std::move(accessors)); } -} +} // namespace nix diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 2422caf14..50df7438b 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -3,12 +3,12 @@ #include "nix/util/util.hh" #ifdef _WIN32 -# include -# include +# include +# include #else -# include -# include -# include "nix/util/processes.hh" +# include +# include +# include "nix/util/processes.hh" #endif #include @@ -16,11 +16,14 @@ namespace nix { AutoCloseFD createUnixDomainSocket() { - AutoCloseFD fdSocket = toDescriptor(socket(PF_UNIX, SOCK_STREAM - #ifdef SOCK_CLOEXEC - | SOCK_CLOEXEC - #endif - , 0)); + AutoCloseFD fdSocket = toDescriptor(socket( + PF_UNIX, + SOCK_STREAM +#ifdef SOCK_CLOEXEC + | SOCK_CLOEXEC +#endif + , + 0)); if (!fdSocket) throw SysError("cannot create Unix domain socket"); #ifndef _WIN32 @@ -44,9 +47,8 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) return fdSocket; } -static void bindConnectProcHelper( - std::string_view operationName, auto && operation, - Socket fd, const std::string & path) +static void +bindConnectProcHelper(std::string_view operationName, auto && operation, Socket fd, const std::string & path) { struct sockaddr_un addr; addr.sun_family = AF_UNIX; @@ -118,4 +120,4 @@ AutoCloseFD connect(const std::filesystem::path & path) return fd; } -} +} // namespace nix diff --git a/src/libutil/unix/environment-variables.cc b/src/libutil/unix/environment-variables.cc index 0e1ed2794..c68e3bcad 100644 --- a/src/libutil/unix/environment-variables.cc +++ b/src/libutil/unix/environment-variables.cc @@ -19,4 +19,4 @@ int setEnvOs(const OsString & name, const OsString & value) return setEnv(name.c_str(), value.c_str()); } -} +} // namespace nix diff --git a/src/libutil/unix/file-descriptor.cc b/src/libutil/unix/file-descriptor.cc index 0051e8aa4..2b612e854 100644 --- a/src/libutil/unix/file-descriptor.cc +++ b/src/libutil/unix/file-descriptor.cc @@ -27,7 +27,7 @@ void pollFD(int fd, int events) throw SysError("poll on file descriptor failed"); } } -} +} // namespace std::string readFile(int fd) { @@ -45,28 +45,31 @@ void readFull(int fd, char * buf, size_t count) ssize_t res = read(fd, buf, count); if (res == -1) { switch (errno) { - case EINTR: continue; + case EINTR: + continue; case EAGAIN: pollFD(fd, POLLIN); continue; } throw SysError("reading from file"); } - if (res == 0) throw EndOfFile("unexpected end-of-file"); + if (res == 0) + throw EndOfFile("unexpected end-of-file"); count -= res; buf += res; } } - void writeFull(int fd, std::string_view s, bool allowInterrupts) { while (!s.empty()) { - if (allowInterrupts) checkInterrupt(); + if (allowInterrupts) + checkInterrupt(); ssize_t res = write(fd, s.data(), s.size()); if (res == -1) { switch (errno) { - case EINTR: continue; + case EINTR: + continue; case EAGAIN: pollFD(fd, POLLOUT); continue; @@ -78,7 +81,6 @@ void writeFull(int fd, std::string_view s, bool allowInterrupts) } } - std::string readLine(int fd, bool eofOk) { std::string s; @@ -89,7 +91,8 @@ std::string readLine(int fd, bool eofOk) ssize_t rd = read(fd, &ch, 1); if (rd == -1) { switch (errno) { - case EINTR: continue; + case EINTR: + continue; case EAGAIN: { pollFD(fd, POLLIN); continue; @@ -102,15 +105,14 @@ std::string readLine(int fd, bool eofOk) return s; else throw EndOfFile("unexpected EOF reading a line"); - } - else { - if (ch == '\n') return s; + } else { + if (ch == '\n') + return s; s += ch; } } } - void drainFD(int fd, Sink & sink, bool block) { // silence GCC maybe-uninitialized warning in finally @@ -138,9 +140,10 @@ void drainFD(int fd, Sink & sink, bool block) break; if (errno != EINTR) throw SysError("reading from file"); - } - else if (rd == 0) break; - else sink({reinterpret_cast(buf.data()), (size_t) rd}); + } else if (rd == 0) + break; + else + sink({reinterpret_cast(buf.data()), (size_t) rd}); } } @@ -150,9 +153,11 @@ void Pipe::create() { int fds[2]; #if HAVE_PIPE2 - if (pipe2(fds, O_CLOEXEC) != 0) throw SysError("creating pipe"); + if (pipe2(fds, O_CLOEXEC) != 0) + throw SysError("creating pipe"); #else - if (pipe(fds) != 0) throw SysError("creating pipe"); + if (pipe(fds) != 0) + throw SysError("creating pipe"); unix::closeOnExec(fds[0]); unix::closeOnExec(fds[1]); #endif @@ -160,17 +165,16 @@ void Pipe::create() writeSide = fds[1]; } - ////////////////////////////////////////////////////////////////////// #if defined(__linux__) || defined(__FreeBSD__) static int unix_close_range(unsigned int first, unsigned int last, int flags) { -#if !HAVE_CLOSE_RANGE - return syscall(SYS_close_range, first, last, (unsigned int)flags); -#else +# if !HAVE_CLOSE_RANGE + return syscall(SYS_close_range, first, last, (unsigned int) flags); +# else return close_range(first, last, flags); -#endif +# endif } #endif @@ -212,13 +216,11 @@ void unix::closeExtraFDs() close(fd); /* ignore result */ } - void unix::closeOnExec(int fd) { int prev; - if ((prev = fcntl(fd, F_GETFD, 0)) == -1 || - fcntl(fd, F_SETFD, prev | FD_CLOEXEC) == -1) + if ((prev = fcntl(fd, F_GETFD, 0)) == -1 || fcntl(fd, F_SETFD, prev | FD_CLOEXEC) == -1) throw SysError("setting close-on-exec flag"); } -} +} // namespace nix diff --git a/src/libutil/unix/file-path.cc b/src/libutil/unix/file-path.cc index 0fb1f468c..53b1fca36 100644 --- a/src/libutil/unix/file-path.cc +++ b/src/libutil/unix/file-path.cc @@ -10,7 +10,7 @@ namespace nix { std::optional maybePath(PathView path) { - return { path }; + return {path}; } std::filesystem::path pathNG(PathView path) @@ -18,4 +18,4 @@ std::filesystem::path pathNG(PathView path) return path; } -} +} // namespace nix diff --git a/src/libutil/unix/file-system.cc b/src/libutil/unix/file-system.cc index 7865de2e9..8ff66328b 100644 --- a/src/libutil/unix/file-system.cc +++ b/src/libutil/unix/file-system.cc @@ -66,4 +66,4 @@ void setWriteTime( #endif } -} +} // namespace nix diff --git a/src/libutil/unix/include/nix/util/monitor-fd.hh b/src/libutil/unix/include/nix/util/monitor-fd.hh index c10ad96bd..5c1e5f195 100644 --- a/src/libutil/unix/include/nix/util/monitor-fd.hh +++ b/src/libutil/unix/include/nix/util/monitor-fd.hh @@ -127,4 +127,4 @@ public: } }; -} +} // namespace nix diff --git a/src/libutil/unix/include/nix/util/signals-impl.hh b/src/libutil/unix/include/nix/util/signals-impl.hh index 7397744b2..1bcc90cdf 100644 --- a/src/libutil/unix/include/nix/util/signals-impl.hh +++ b/src/libutil/unix/include/nix/util/signals-impl.hh @@ -47,7 +47,7 @@ void _interrupted(); * necessarily match the current thread's mask. * See saveSignalMask() to set the saved mask to the current mask. */ -void setChildSignalMask(sigset_t *sigs); +void setChildSignalMask(sigset_t * sigs); /** * Start a thread that handles various signals. Also block those signals @@ -73,7 +73,7 @@ void restoreSignals(); void triggerInterrupt(); -} +} // namespace unix static inline void setInterrupted(bool isInterrupted) { @@ -116,8 +116,8 @@ struct ReceiveInterrupts ReceiveInterrupts() : target(pthread_self()) , callback(createInterruptCallback([&]() { pthread_kill(target, SIGUSR1); })) - { } + { + } }; - -} +} // namespace nix diff --git a/src/libutil/unix/muxable-pipe.cc b/src/libutil/unix/muxable-pipe.cc index 57bcdb0ad..1b8b09adc 100644 --- a/src/libutil/unix/muxable-pipe.cc +++ b/src/libutil/unix/muxable-pipe.cc @@ -44,4 +44,4 @@ void MuxablePipePollState::iterate( } } -} +} // namespace nix diff --git a/src/libutil/unix/os-string.cc b/src/libutil/unix/os-string.cc index 1a2be1554..08d275bc6 100644 --- a/src/libutil/unix/os-string.cc +++ b/src/libutil/unix/os-string.cc @@ -18,4 +18,4 @@ std::filesystem::path::string_type string_to_os_string(std::string_view s) return std::string{s}; } -} +} // namespace nix diff --git a/src/libutil/unix/processes.cc b/src/libutil/unix/processes.cc index 0d50fc303..9582ff840 100644 --- a/src/libutil/unix/processes.cc +++ b/src/libutil/unix/processes.cc @@ -20,51 +20,45 @@ #include #ifdef __APPLE__ -# include +# include #endif #ifdef __linux__ -# include -# include +# include +# include #endif #include "util-config-private.hh" #include "util-unix-config-private.hh" - namespace nix { -Pid::Pid() -{ -} - +Pid::Pid() {} Pid::Pid(pid_t pid) : pid(pid) { } - Pid::~Pid() { - if (pid != -1) kill(); + if (pid != -1) + kill(); } - -void Pid::operator =(pid_t pid) +void Pid::operator=(pid_t pid) { - if (this->pid != -1 && this->pid != pid) kill(); + if (this->pid != -1 && this->pid != pid) + kill(); this->pid = pid; killSignal = SIGKILL; // reset signal to default } - Pid::operator pid_t() { return pid; } - int Pid::kill() { assert(pid != -1); @@ -87,7 +81,6 @@ int Pid::kill() return wait(); } - int Pid::wait() { assert(pid != -1); @@ -104,19 +97,16 @@ int Pid::wait() } } - void Pid::setSeparatePG(bool separatePG) { this->separatePG = separatePG; } - void Pid::setKillSignal(int signal) { this->killSignal = signal; } - pid_t Pid::release() { pid_t p = pid; @@ -124,7 +114,6 @@ pid_t Pid::release() return p; } - void killUser(uid_t uid) { debug("killing all processes running under uid '%1%'", uid); @@ -136,7 +125,6 @@ void killUser(uid_t uid) fork a process, switch to uid, and send a mass kill. */ Pid pid = startProcess([&] { - if (setuid(uid) == -1) throw SysError("setting uid"); @@ -147,11 +135,14 @@ void killUser(uid_t uid) calling process. In the OSX libc, it's set to true, which means "follow POSIX", which we don't want here */ - if (syscall(SYS_kill, -1, SIGKILL, false) == 0) break; + if (syscall(SYS_kill, -1, SIGKILL, false) == 0) + break; #else - if (kill(-1, SIGKILL) == 0) break; + if (kill(-1, SIGKILL) == 0) + break; #endif - if (errno == ESRCH || errno == EPERM) break; /* no more processes */ + if (errno == ESRCH || errno == EPERM) + break; /* no more processes */ if (errno != EINTR) throw SysError("cannot kill processes for uid '%1%'", uid); } @@ -169,7 +160,6 @@ void killUser(uid_t uid) uid | grep -q $uid'. */ } - ////////////////////////////////////////////////////////////////////// using ChildWrapperFunction = std::function; @@ -177,6 +167,7 @@ using ChildWrapperFunction = std::function; /* Wrapper around vfork to prevent the child process from clobbering the caller's stack frame in the parent. */ static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) __attribute__((noinline)); + static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) { #ifdef __linux__ @@ -184,22 +175,21 @@ static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) #else pid_t pid = fork(); #endif - if (pid != 0) return pid; + if (pid != 0) + return pid; fun(); unreachable(); } - #ifdef __linux__ static int childEntry(void * arg) { - auto & fun = *reinterpret_cast(arg); + auto & fun = *reinterpret_cast(arg); fun(); return 1; } #endif - pid_t startProcess(std::function fun, const ProcessOptions & options) { auto newLogger = makeSimpleLogger(); @@ -222,8 +212,10 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) } catch (std::exception & e) { try { std::cerr << options.errorPrefix << e.what() << "\n"; - } catch (...) { } - } catch (...) { } + } catch (...) { + } + } catch (...) { + } if (options.runExitHandlers) exit(1); else @@ -233,34 +225,41 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) pid_t pid = -1; if (options.cloneFlags) { - #ifdef __linux__ +#ifdef __linux__ // Not supported, since then we don't know when to free the stack. assert(!(options.cloneFlags & CLONE_VM)); size_t stackSize = 1 * 1024 * 1024; - auto stack = static_cast(mmap(0, stackSize, - PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0)); - if (stack == MAP_FAILED) throw SysError("allocating stack"); + auto stack = static_cast( + mmap(0, stackSize, PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0)); + if (stack == MAP_FAILED) + throw SysError("allocating stack"); Finally freeStack([&] { munmap(stack, stackSize); }); pid = clone(childEntry, stack + stackSize, options.cloneFlags | SIGCHLD, &wrapper); - #else +#else throw Error("clone flags are only supported on Linux"); - #endif +#endif } else pid = doFork(options.allowVfork, wrapper); - if (pid == -1) throw SysError("unable to fork"); + if (pid == -1) + throw SysError("unable to fork"); return pid; } - -std::string runProgram(Path program, bool lookupPath, const Strings & args, - const std::optional & input, bool isInteractive) +std::string runProgram( + Path program, bool lookupPath, const Strings & args, const std::optional & input, bool isInteractive) { - auto res = runProgram(RunOptions {.program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive}); + auto res = runProgram( + RunOptions{ + .program = program, + .lookupPath = lookupPath, + .args = args, + .input = input, + .isInteractive = isInteractive}); if (!statusOk(res.first)) throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first)); @@ -301,8 +300,10 @@ void runProgram2(const RunOptions & options) /* Create a pipe. */ Pipe out, in; - if (options.standardOut) out.create(); - if (source) in.create(); + if (options.standardOut) + out.create(); + if (source) + in.create(); ProcessOptions processOptions; // vfork implies that the environment of the main process and the fork will @@ -313,41 +314,43 @@ void runProgram2(const RunOptions & options) auto suspension = logger->suspendIf(options.isInteractive); /* Fork. */ - Pid pid = startProcess([&] { - if (options.environment) - replaceEnv(*options.environment); - if (options.standardOut && dup2(out.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("dupping stdout"); - if (options.mergeStderrToStdout) - if (dup2(STDOUT_FILENO, STDERR_FILENO) == -1) - throw SysError("cannot dup stdout into stderr"); - if (source && dup2(in.readSide.get(), STDIN_FILENO) == -1) - throw SysError("dupping stdin"); + Pid pid = startProcess( + [&] { + if (options.environment) + replaceEnv(*options.environment); + if (options.standardOut && dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("dupping stdout"); + if (options.mergeStderrToStdout) + if (dup2(STDOUT_FILENO, STDERR_FILENO) == -1) + throw SysError("cannot dup stdout into stderr"); + if (source && dup2(in.readSide.get(), STDIN_FILENO) == -1) + throw SysError("dupping stdin"); - if (options.chdir && chdir((*options.chdir).c_str()) == -1) - throw SysError("chdir failed"); - if (options.gid && setgid(*options.gid) == -1) - throw SysError("setgid failed"); - /* Drop all other groups if we're setgid. */ - if (options.gid && setgroups(0, 0) == -1) - throw SysError("setgroups failed"); - if (options.uid && setuid(*options.uid) == -1) - throw SysError("setuid failed"); + if (options.chdir && chdir((*options.chdir).c_str()) == -1) + throw SysError("chdir failed"); + if (options.gid && setgid(*options.gid) == -1) + throw SysError("setgid failed"); + /* Drop all other groups if we're setgid. */ + if (options.gid && setgroups(0, 0) == -1) + throw SysError("setgroups failed"); + if (options.uid && setuid(*options.uid) == -1) + throw SysError("setuid failed"); - Strings args_(options.args); - args_.push_front(options.program); + Strings args_(options.args); + args_.push_front(options.program); - restoreProcessContext(); + restoreProcessContext(); - if (options.lookupPath) - execvp(options.program.c_str(), stringsToCharPtrs(args_).data()); + if (options.lookupPath) + execvp(options.program.c_str(), stringsToCharPtrs(args_).data()); // This allows you to refer to a program with a pathname relative // to the PATH variable. - else - execv(options.program.c_str(), stringsToCharPtrs(args_).data()); + else + execv(options.program.c_str(), stringsToCharPtrs(args_).data()); - throw SysError("executing '%1%'", options.program); - }, processOptions); + throw SysError("executing '%1%'", options.program); + }, + processOptions); out.writeSide.close(); @@ -360,7 +363,6 @@ void runProgram2(const RunOptions & options) writerThread.join(); }); - if (source) { in.readSide.close(); writerThread = std::thread([&] { @@ -390,7 +392,8 @@ void runProgram2(const RunOptions & options) int status = pid.wait(); /* Wait for the writer thread to finish. */ - if (source) promise.get_future().get(); + if (source) + promise.get_future().get(); if (status) throw ExecError(status, "program '%1%' %2%", options.program, statusToString(status)); @@ -411,13 +414,12 @@ std::string statusToString(int status) #else return fmt("failed due to signal %1%", sig); #endif - } - else + } else return "died abnormally"; - } else return "succeeded"; + } else + return "succeeded"; } - bool statusOk(int status) { return WIFEXITED(status) && WEXITSTATUS(status) == 0; @@ -428,7 +430,7 @@ int execvpe(const char * file0, const char * const argv[], const char * const en auto file = ExecutablePath::load().findPath(file0); // `const_cast` is safe. See the note in // https://pubs.opengroup.org/onlinepubs/9799919799/functions/exec.html - return execve(file.c_str(), const_cast(argv), const_cast(envp)); + return execve(file.c_str(), const_cast(argv), const_cast(envp)); } -} +} // namespace nix diff --git a/src/libutil/unix/signals.cc b/src/libutil/unix/signals.cc index 665b9b096..8a94cc2b1 100644 --- a/src/libutil/unix/signals.cc +++ b/src/libutil/unix/signals.cc @@ -34,15 +34,14 @@ void unix::_interrupted() } } - ////////////////////////////////////////////////////////////////////// - /* We keep track of interrupt callbacks using integer tokens, so we can iterate safely without having to lock the data structure while executing arbitrary functions. */ -struct InterruptCallbacks { +struct InterruptCallbacks +{ typedef int64_t Token; /* We use unique tokens so that we can't accidentally delete the wrong @@ -97,7 +96,6 @@ void unix::triggerInterrupt() } } - static sigset_t savedSignalMask; static bool savedSignalMaskIsSet = false; @@ -105,7 +103,8 @@ void unix::setChildSignalMask(sigset_t * sigs) { assert(sigs); // C style function, but think of sigs as a reference -#if (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE >= 1) || (defined(_XOPEN_SOURCE) && _XOPEN_SOURCE) || (defined(_POSIX_SOURCE) && _POSIX_SOURCE) +#if (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE >= 1) || (defined(_XOPEN_SOURCE) && _XOPEN_SOURCE) \ + || (defined(_POSIX_SOURCE) && _POSIX_SOURCE) sigemptyset(&savedSignalMask); // There's no "assign" or "copy" function, so we rely on (math) idempotence // of the or operator: a or a = a. @@ -120,7 +119,8 @@ void unix::setChildSignalMask(sigset_t * sigs) savedSignalMaskIsSet = true; } -void unix::saveSignalMask() { +void unix::saveSignalMask() +{ if (sigprocmask(SIG_BLOCK, nullptr, &savedSignalMask)) throw SysError("querying signal mask"); @@ -166,11 +166,11 @@ void unix::restoreSignals() throw SysError("restoring signals"); } - /* RAII helper to automatically deregister a callback. */ struct InterruptCallbackImpl : InterruptCallback { InterruptCallbacks::Token token; + ~InterruptCallbackImpl() override { auto interruptCallbacks(_interruptCallbacks.lock()); @@ -184,10 +184,10 @@ std::unique_ptr createInterruptCallback(std::function auto token = interruptCallbacks->nextToken++; interruptCallbacks->callbacks.emplace(token, callback); - std::unique_ptr res {new InterruptCallbackImpl{}}; + std::unique_ptr res{new InterruptCallbackImpl{}}; res->token = token; return std::unique_ptr(res.release()); } -} +} // namespace nix diff --git a/src/libutil/unix/users.cc b/src/libutil/unix/users.cc index 5ac851e95..09b38be5e 100644 --- a/src/libutil/unix/users.cc +++ b/src/libutil/unix/users.cc @@ -23,16 +23,14 @@ Path getHomeOf(uid_t userId) std::vector buf(16384); struct passwd pwbuf; struct passwd * pw; - if (getpwuid_r(userId, &pwbuf, buf.data(), buf.size(), &pw) != 0 - || !pw || !pw->pw_dir || !pw->pw_dir[0]) + if (getpwuid_r(userId, &pwbuf, buf.data(), buf.size(), &pw) != 0 || !pw || !pw->pw_dir || !pw->pw_dir[0]) throw Error("cannot determine user's home directory"); return pw->pw_dir; } Path getHome() { - static Path homeDir = []() - { + static Path homeDir = []() { std::optional unownedUserHomeDir = {}; auto homeDir = getEnv("HOME"); if (homeDir) { @@ -41,7 +39,10 @@ Path getHome() int result = stat(homeDir->c_str(), &st); if (result != 0) { if (errno != ENOENT) { - warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno); + warn( + "couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", + *homeDir, + errno); homeDir.reset(); } } else if (st.st_uid != geteuid()) { @@ -51,7 +52,10 @@ Path getHome() if (!homeDir) { homeDir = getHomeOf(geteuid()); if (unownedUserHomeDir.has_value() && unownedUserHomeDir != homeDir) { - warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file ('%s')", *unownedUserHomeDir, *homeDir); + warn( + "$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file ('%s')", + *unownedUserHomeDir, + *homeDir); } } return *homeDir; @@ -59,8 +63,9 @@ Path getHome() return homeDir; } -bool isRootUser() { +bool isRootUser() +{ return getuid() == 0; } -} +} // namespace nix diff --git a/src/libutil/url.cc b/src/libutil/url.cc index b7286072d..eac0b188e 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -13,18 +13,15 @@ std::regex revRegex(revRegexS, std::regex::ECMAScript); ParsedURL parseURL(const std::string & url) { static std::regex uriRegex( - "((" + schemeNameRegex + "):" - + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))" - + "(?:\\?(" + queryRegex + "))?" - + "(?:#(" + fragmentRegex + "))?", + "((" + schemeNameRegex + "):" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + + ")))" + "(?:\\?(" + queryRegex + "))?" + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); std::smatch match; if (std::regex_match(url, match, uriRegex)) { std::string scheme = match[2]; - auto authority = match[3].matched - ? std::optional(match[3]) : std::nullopt; + auto authority = match[3].matched ? std::optional(match[3]) : std::nullopt; std::string path = match[4].matched ? match[4] : match[5]; auto & query = match[6]; auto & fragment = match[7]; @@ -32,8 +29,7 @@ ParsedURL parseURL(const std::string & url) auto transportIsFile = parseUrlScheme(scheme).transport == "file"; if (authority && *authority != "" && transportIsFile) - throw BadURL("file:// URL '%s' has unexpected authority '%s'", - url, *authority); + throw BadURL("file:// URL '%s' has unexpected authority '%s'", url, *authority); if (transportIsFile && path.empty()) path = "/"; @@ -43,8 +39,7 @@ ParsedURL parseURL(const std::string & url) .authority = authority, .path = percentDecode(path), .query = decodeQuery(query), - .fragment = percentDecode(std::string(fragment)) - }; + .fragment = percentDecode(std::string(fragment))}; } else @@ -54,7 +49,7 @@ ParsedURL parseURL(const std::string & url) std::string percentDecode(std::string_view in) { std::string decoded; - for (size_t i = 0; i < in.size(); ) { + for (size_t i = 0; i < in.size();) { if (in[i] == '%') { if (i + 2 >= in.size()) throw BadURL("invalid URI parameter '%s'", in); @@ -81,9 +76,7 @@ StringMap decodeQuery(const std::string & query) continue; } - result.emplace( - s.substr(0, e), - percentDecode(std::string_view(s).substr(e + 1))); + result.emplace(s.substr(0, e), percentDecode(std::string_view(s).substr(e + 1))); } return result; @@ -97,10 +90,7 @@ std::string percentEncode(std::string_view s, std::string_view keep) std::string res; for (auto & c : s) // unreserved + keep - if ((c >= 'a' && c <= 'z') - || (c >= 'A' && c <= 'Z') - || (c >= '0' && c <= '9') - || strchr("-._~", c) + if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || strchr("-._~", c) || keep.find(c) != std::string::npos) res += c; else @@ -113,7 +103,8 @@ std::string encodeQuery(const StringMap & ss) std::string res; bool first = true; for (auto & [name, value] : ss) { - if (!first) res += '&'; + if (!first) + res += '&'; first = false; res += percentEncode(name, allowedInQuery); res += '='; @@ -124,29 +115,20 @@ std::string encodeQuery(const StringMap & ss) std::string ParsedURL::to_string() const { - return - scheme - + ":" - + (authority ? "//" + *authority : "") - + percentEncode(path, allowedInPath) - + (query.empty() ? "" : "?" + encodeQuery(query)) - + (fragment.empty() ? "" : "#" + percentEncode(fragment)); + return scheme + ":" + (authority ? "//" + *authority : "") + percentEncode(path, allowedInPath) + + (query.empty() ? "" : "?" + encodeQuery(query)) + (fragment.empty() ? "" : "#" + percentEncode(fragment)); } -std::ostream & operator << (std::ostream & os, const ParsedURL & url) +std::ostream & operator<<(std::ostream & os, const ParsedURL & url) { os << url.to_string(); return os; } -bool ParsedURL::operator ==(const ParsedURL & other) const noexcept +bool ParsedURL::operator==(const ParsedURL & other) const noexcept { - return - scheme == other.scheme - && authority == other.authority - && path == other.path - && query == other.query - && fragment == other.fragment; + return scheme == other.scheme && authority == other.authority && path == other.path && query == other.query + && fragment == other.fragment; } ParsedURL ParsedURL::canonicalise() @@ -167,7 +149,7 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) { auto application = splitPrefixTo(scheme, '+'); auto transport = scheme; - return ParsedUrlScheme { + return ParsedUrlScheme{ .application = application, .transport = transport, }; @@ -181,11 +163,7 @@ std::string fixGitURL(const std::string & url) if (hasPrefix(url, "file:")) return url; if (url.find("://") == std::string::npos) { - return (ParsedURL { - .scheme = "file", - .authority = "", - .path = url - }).to_string(); + return (ParsedURL{.scheme = "file", .authority = "", .path = url}).to_string(); } return url; } @@ -198,4 +176,4 @@ bool isValidSchemeName(std::string_view s) return std::regex_match(s.begin(), s.end(), regex, std::regex_constants::match_default); } -} +} // namespace nix diff --git a/src/libutil/users.cc b/src/libutil/users.cc index 5a5d740c6..f19a5d39c 100644 --- a/src/libutil/users.cc +++ b/src/libutil/users.cc @@ -20,7 +20,6 @@ Path getCacheDir() } } - Path getConfigDir() { auto dir = getEnv("NIX_CONFIG_HOME"); @@ -41,14 +40,13 @@ std::vector getConfigDirs() Path configHome = getConfigDir(); auto configDirs = getEnv("XDG_CONFIG_DIRS").value_or("/etc/xdg"); std::vector result = tokenizeString>(configDirs, ":"); - for (auto& p : result) { + for (auto & p : result) { p += "/nix"; } result.insert(result.begin(), configHome); return result; } - Path getDataDir() { auto dir = getEnv("NIX_DATA_HOME"); @@ -86,7 +84,6 @@ Path createNixStateDir() return dir; } - std::string expandTilde(std::string_view path) { // TODO: expand ~user ? @@ -97,4 +94,4 @@ std::string expandTilde(std::string_view path) return std::string(path); } -} +} // namespace nix diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 23dafe8c9..5cbbb80ee 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -13,12 +13,13 @@ #include #ifdef NDEBUG -#error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)." +# error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)." #endif namespace nix { -void initLibUtil() { +void initLibUtil() +{ // Check that exception handling works. Exception handling has been observed // not to work on darwin when the linker flags aren't quite right. // In this case we don't want to expose the user to some unrelated uncaught @@ -27,7 +28,8 @@ void initLibUtil() { // When exception handling fails, the message tends to be printed by the // C++ runtime, followed by an abort. // For example on macOS we might see an error such as - // libc++abi: terminating with uncaught exception of type nix::SystemError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. + // libc++abi: terminating with uncaught exception of type nix::SystemError: error: C++ exception handling is broken. + // This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. bool caught = false; try { throwExceptionSelfCheck(); @@ -46,37 +48,33 @@ void initLibUtil() { std::vector stringsToCharPtrs(const Strings & ss) { std::vector res; - for (auto & s : ss) res.push_back((char *) s.c_str()); + for (auto & s : ss) + res.push_back((char *) s.c_str()); res.push_back(0); return res; } - ////////////////////////////////////////////////////////////////////// - std::string chomp(std::string_view s) { size_t i = s.find_last_not_of(" \n\r\t"); return i == s.npos ? "" : std::string(s, 0, i + 1); } - std::string trim(std::string_view s, std::string_view whitespace) { auto i = s.find_first_not_of(whitespace); - if (i == s.npos) return ""; + if (i == s.npos) + return ""; auto j = s.find_last_not_of(whitespace); return std::string(s, i, j == s.npos ? j : j - i + 1); } - -std::string replaceStrings( - std::string res, - std::string_view from, - std::string_view to) +std::string replaceStrings(std::string res, std::string_view from, std::string_view to) { - if (from.empty()) return res; + if (from.empty()) + return res; size_t pos = 0; while ((pos = res.find(from, pos)) != res.npos) { res.replace(pos, from.size(), to); @@ -85,11 +83,11 @@ std::string replaceStrings( return res; } - std::string rewriteStrings(std::string s, const StringMap & rewrites) { for (auto & i : rewrites) { - if (i.first == i.second) continue; + if (i.first == i.second) + continue; size_t j = 0; while ((j = s.find(i.first, j)) != s.npos) s.replace(j, i.first.size(), i.second); @@ -110,7 +108,7 @@ std::optional string2Int(const std::string_view s) } // Explicitly instantiated in one place for faster compilation -template std::optional string2Int(const std::string_view s); +template std::optional string2Int(const std::string_view s); template std::optional string2Int(const std::string_view s); template std::optional string2Int(const std::string_view s); template std::optional string2Int(const std::string_view s); @@ -134,12 +132,9 @@ std::optional string2Float(const std::string_view s) template std::optional string2Float(const std::string_view s); template std::optional string2Float(const std::string_view s); - std::string renderSize(uint64_t value, bool align) { - static const std::array prefixes{{ - 'K', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y' - }}; + static const std::array prefixes{{'K', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'}}; size_t power = 0; double res = value; while (res > 1024 && power < prefixes.size()) { @@ -149,20 +144,16 @@ std::string renderSize(uint64_t value, bool align) return fmt(align ? "%6.1f %ciB" : "%.1f %ciB", power == 0 ? res / 1024 : res, prefixes.at(power)); } - bool hasPrefix(std::string_view s, std::string_view prefix) { return s.compare(0, prefix.size(), prefix) == 0; } - bool hasSuffix(std::string_view s, std::string_view suffix) { - return s.size() >= suffix.size() - && s.substr(s.size() - suffix.size()) == suffix; + return s.size() >= suffix.size() && s.substr(s.size() - suffix.size()) == suffix; } - std::string toLower(std::string s) { for (auto & c : s) @@ -170,19 +161,20 @@ std::string toLower(std::string s) return s; } - std::string escapeShellArgAlways(const std::string_view s) { std::string r; r.reserve(s.size() + 2); r += '\''; for (auto & i : s) - if (i == '\'') r += "'\\''"; else r += i; + if (i == '\'') + r += "'\\''"; + else + r += i; r += '\''; return r; } - void ignoreExceptionInDestructor(Verbosity lvl) { /* Make sure no exceptions leave this function. @@ -195,7 +187,8 @@ void ignoreExceptionInDestructor(Verbosity lvl) } catch (std::exception & e) { printMsg(lvl, ANSI_RED "error (ignored):" ANSI_NORMAL " %s", e.what()); } - } catch (...) { } + } catch (...) { + } } void ignoreExceptionExceptInterrupt(Verbosity lvl) @@ -211,7 +204,6 @@ void ignoreExceptionExceptInterrupt(Verbosity lvl) } } - constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; std::string base64Encode(std::string_view s) @@ -229,19 +221,20 @@ std::string base64Encode(std::string_view s) } } - if (nbits) res.push_back(base64Chars[data << (6 - nbits) & 0x3f]); - while (res.size() % 4) res.push_back('='); + if (nbits) + res.push_back(base64Chars[data << (6 - nbits) & 0x3f]); + while (res.size() % 4) + res.push_back('='); return res; } - std::string base64Decode(std::string_view s) { constexpr char npos = -1; constexpr std::array base64DecodeChars = [&] { - std::array result{}; - for (auto& c : result) + std::array result{}; + for (auto & c : result) c = npos; for (int i = 0; i < 64; i++) result[base64Chars[i]] = i; @@ -255,8 +248,10 @@ std::string base64Decode(std::string_view s) unsigned int d = 0, bits = 0; for (char c : s) { - if (c == '=') break; - if (c == '\n') continue; + if (c == '=') + break; + if (c == '\n') + continue; char digit = base64DecodeChars[(unsigned char) c]; if (digit == npos) @@ -273,7 +268,6 @@ std::string base64Decode(std::string_view s) return res; } - std::string stripIndentation(std::string_view s) { size_t minIndent = 10000; @@ -301,7 +295,8 @@ std::string stripIndentation(std::string_view s) size_t pos = 0; while (pos < s.size()) { auto eol = s.find('\n', pos); - if (eol == s.npos) eol = s.size(); + if (eol == s.npos) + eol = s.size(); if (eol - pos > minIndent) res.append(s.substr(pos + minIndent, eol - pos - minIndent)); res.push_back('\n'); @@ -311,7 +306,6 @@ std::string stripIndentation(std::string_view s) return res; } - std::pair getLine(std::string_view s) { auto newline = s.find('\n'); @@ -326,10 +320,9 @@ std::pair getLine(std::string_view s) } } - std::string showBytes(uint64_t bytes) { return fmt("%.2f MiB", bytes / (1024.0 * 1024.0)); } -} +} // namespace nix diff --git a/src/libutil/widecharwidth/widechar_width.h b/src/libutil/widecharwidth/widechar_width.h index 92e63e913..d2416c04e 100644 --- a/src/libutil/widecharwidth/widechar_width.h +++ b/src/libutil/widecharwidth/widechar_width.h @@ -30,1406 +30,318 @@ namespace { /* Special width values */ enum { - widechar_nonprint = -1, // The character is not printable. - widechar_combining = -2, // The character is a zero-width combiner. - widechar_ambiguous = -3, // The character is East-Asian ambiguous width. - widechar_private_use = -4, // The character is for private use. - widechar_unassigned = -5, // The character is unassigned. - widechar_widened_in_9 = -6, // Width is 1 in Unicode 8, 2 in Unicode 9+. - widechar_non_character = -7 // The character is a noncharacter. + widechar_nonprint = -1, // The character is not printable. + widechar_combining = -2, // The character is a zero-width combiner. + widechar_ambiguous = -3, // The character is East-Asian ambiguous width. + widechar_private_use = -4, // The character is for private use. + widechar_unassigned = -5, // The character is unassigned. + widechar_widened_in_9 = -6, // Width is 1 in Unicode 8, 2 in Unicode 9+. + widechar_non_character = -7 // The character is a noncharacter. }; /* An inclusive range of characters. */ -struct widechar_range { - uint32_t lo; - uint32_t hi; +struct widechar_range +{ + uint32_t lo; + uint32_t hi; }; /* Simple ASCII characters - used a lot, so we check them first. */ -static const struct widechar_range widechar_ascii_table[] = { - {0x00020, 0x0007E} -}; +static const struct widechar_range widechar_ascii_table[] = {{0x00020, 0x0007E}}; /* Private usage range. */ static const struct widechar_range widechar_private_table[] = { - {0x0E000, 0x0F8FF}, - {0xF0000, 0xFFFFD}, - {0x100000, 0x10FFFD} -}; + {0x0E000, 0x0F8FF}, {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD}}; /* Nonprinting characters. */ static const struct widechar_range widechar_nonprint_table[] = { - {0x00000, 0x0001F}, - {0x0007F, 0x0009F}, - {0x000AD, 0x000AD}, - {0x00600, 0x00605}, - {0x0061C, 0x0061C}, - {0x006DD, 0x006DD}, - {0x0070F, 0x0070F}, - {0x00890, 0x00891}, - {0x008E2, 0x008E2}, - {0x0180E, 0x0180E}, - {0x0200B, 0x0200F}, - {0x02028, 0x0202E}, - {0x02060, 0x02064}, - {0x02066, 0x0206F}, - {0x0D800, 0x0DFFF}, - {0x0FEFF, 0x0FEFF}, - {0x0FFF9, 0x0FFFB}, - {0x110BD, 0x110BD}, - {0x110CD, 0x110CD}, - {0x13430, 0x1343F}, - {0x1BCA0, 0x1BCA3}, - {0x1D173, 0x1D17A}, - {0xE0001, 0xE0001}, - {0xE0020, 0xE007F} -}; + {0x00000, 0x0001F}, {0x0007F, 0x0009F}, {0x000AD, 0x000AD}, {0x00600, 0x00605}, {0x0061C, 0x0061C}, + {0x006DD, 0x006DD}, {0x0070F, 0x0070F}, {0x00890, 0x00891}, {0x008E2, 0x008E2}, {0x0180E, 0x0180E}, + {0x0200B, 0x0200F}, {0x02028, 0x0202E}, {0x02060, 0x02064}, {0x02066, 0x0206F}, {0x0D800, 0x0DFFF}, + {0x0FEFF, 0x0FEFF}, {0x0FFF9, 0x0FFFB}, {0x110BD, 0x110BD}, {0x110CD, 0x110CD}, {0x13430, 0x1343F}, + {0x1BCA0, 0x1BCA3}, {0x1D173, 0x1D17A}, {0xE0001, 0xE0001}, {0xE0020, 0xE007F}}; /* Width 0 combining marks. */ static const struct widechar_range widechar_combining_table[] = { - {0x00300, 0x0036F}, - {0x00483, 0x00489}, - {0x00591, 0x005BD}, - {0x005BF, 0x005BF}, - {0x005C1, 0x005C2}, - {0x005C4, 0x005C5}, - {0x005C7, 0x005C7}, - {0x00610, 0x0061A}, - {0x0064B, 0x0065F}, - {0x00670, 0x00670}, - {0x006D6, 0x006DC}, - {0x006DF, 0x006E4}, - {0x006E7, 0x006E8}, - {0x006EA, 0x006ED}, - {0x00711, 0x00711}, - {0x00730, 0x0074A}, - {0x007A6, 0x007B0}, - {0x007EB, 0x007F3}, - {0x007FD, 0x007FD}, - {0x00816, 0x00819}, - {0x0081B, 0x00823}, - {0x00825, 0x00827}, - {0x00829, 0x0082D}, - {0x00859, 0x0085B}, - {0x00897, 0x0089F}, - {0x008CA, 0x008E1}, - {0x008E3, 0x00903}, - {0x0093A, 0x0093C}, - {0x0093E, 0x0094F}, - {0x00951, 0x00957}, - {0x00962, 0x00963}, - {0x00981, 0x00983}, - {0x009BC, 0x009BC}, - {0x009BE, 0x009C4}, - {0x009C7, 0x009C8}, - {0x009CB, 0x009CD}, - {0x009D7, 0x009D7}, - {0x009E2, 0x009E3}, - {0x009FE, 0x009FE}, - {0x00A01, 0x00A03}, - {0x00A3C, 0x00A3C}, - {0x00A3E, 0x00A42}, - {0x00A47, 0x00A48}, - {0x00A4B, 0x00A4D}, - {0x00A51, 0x00A51}, - {0x00A70, 0x00A71}, - {0x00A75, 0x00A75}, - {0x00A81, 0x00A83}, - {0x00ABC, 0x00ABC}, - {0x00ABE, 0x00AC5}, - {0x00AC7, 0x00AC9}, - {0x00ACB, 0x00ACD}, - {0x00AE2, 0x00AE3}, - {0x00AFA, 0x00AFF}, - {0x00B01, 0x00B03}, - {0x00B3C, 0x00B3C}, - {0x00B3E, 0x00B44}, - {0x00B47, 0x00B48}, - {0x00B4B, 0x00B4D}, - {0x00B55, 0x00B57}, - {0x00B62, 0x00B63}, - {0x00B82, 0x00B82}, - {0x00BBE, 0x00BC2}, - {0x00BC6, 0x00BC8}, - {0x00BCA, 0x00BCD}, - {0x00BD7, 0x00BD7}, - {0x00C00, 0x00C04}, - {0x00C3C, 0x00C3C}, - {0x00C3E, 0x00C44}, - {0x00C46, 0x00C48}, - {0x00C4A, 0x00C4D}, - {0x00C55, 0x00C56}, - {0x00C62, 0x00C63}, - {0x00C81, 0x00C83}, - {0x00CBC, 0x00CBC}, - {0x00CBE, 0x00CC4}, - {0x00CC6, 0x00CC8}, - {0x00CCA, 0x00CCD}, - {0x00CD5, 0x00CD6}, - {0x00CE2, 0x00CE3}, - {0x00CF3, 0x00CF3}, - {0x00D00, 0x00D03}, - {0x00D3B, 0x00D3C}, - {0x00D3E, 0x00D44}, - {0x00D46, 0x00D48}, - {0x00D4A, 0x00D4D}, - {0x00D57, 0x00D57}, - {0x00D62, 0x00D63}, - {0x00D81, 0x00D83}, - {0x00DCA, 0x00DCA}, - {0x00DCF, 0x00DD4}, - {0x00DD6, 0x00DD6}, - {0x00DD8, 0x00DDF}, - {0x00DF2, 0x00DF3}, - {0x00E31, 0x00E31}, - {0x00E34, 0x00E3A}, - {0x00E47, 0x00E4E}, - {0x00EB1, 0x00EB1}, - {0x00EB4, 0x00EBC}, - {0x00EC8, 0x00ECE}, - {0x00F18, 0x00F19}, - {0x00F35, 0x00F35}, - {0x00F37, 0x00F37}, - {0x00F39, 0x00F39}, - {0x00F3E, 0x00F3F}, - {0x00F71, 0x00F84}, - {0x00F86, 0x00F87}, - {0x00F8D, 0x00F97}, - {0x00F99, 0x00FBC}, - {0x00FC6, 0x00FC6}, - {0x0102B, 0x0103E}, - {0x01056, 0x01059}, - {0x0105E, 0x01060}, - {0x01062, 0x01064}, - {0x01067, 0x0106D}, - {0x01071, 0x01074}, - {0x01082, 0x0108D}, - {0x0108F, 0x0108F}, - {0x0109A, 0x0109D}, - {0x0135D, 0x0135F}, - {0x01712, 0x01715}, - {0x01732, 0x01734}, - {0x01752, 0x01753}, - {0x01772, 0x01773}, - {0x017B4, 0x017D3}, - {0x017DD, 0x017DD}, - {0x0180B, 0x0180D}, - {0x0180F, 0x0180F}, - {0x01885, 0x01886}, - {0x018A9, 0x018A9}, - {0x01920, 0x0192B}, - {0x01930, 0x0193B}, - {0x01A17, 0x01A1B}, - {0x01A55, 0x01A5E}, - {0x01A60, 0x01A7C}, - {0x01A7F, 0x01A7F}, - {0x01AB0, 0x01ACE}, - {0x01B00, 0x01B04}, - {0x01B34, 0x01B44}, - {0x01B6B, 0x01B73}, - {0x01B80, 0x01B82}, - {0x01BA1, 0x01BAD}, - {0x01BE6, 0x01BF3}, - {0x01C24, 0x01C37}, - {0x01CD0, 0x01CD2}, - {0x01CD4, 0x01CE8}, - {0x01CED, 0x01CED}, - {0x01CF4, 0x01CF4}, - {0x01CF7, 0x01CF9}, - {0x01DC0, 0x01DFF}, - {0x020D0, 0x020F0}, - {0x02CEF, 0x02CF1}, - {0x02D7F, 0x02D7F}, - {0x02DE0, 0x02DFF}, - {0x0302A, 0x0302F}, - {0x03099, 0x0309A}, - {0x0A66F, 0x0A672}, - {0x0A674, 0x0A67D}, - {0x0A69E, 0x0A69F}, - {0x0A6F0, 0x0A6F1}, - {0x0A802, 0x0A802}, - {0x0A806, 0x0A806}, - {0x0A80B, 0x0A80B}, - {0x0A823, 0x0A827}, - {0x0A82C, 0x0A82C}, - {0x0A880, 0x0A881}, - {0x0A8B4, 0x0A8C5}, - {0x0A8E0, 0x0A8F1}, - {0x0A8FF, 0x0A8FF}, - {0x0A926, 0x0A92D}, - {0x0A947, 0x0A953}, - {0x0A980, 0x0A983}, - {0x0A9B3, 0x0A9C0}, - {0x0A9E5, 0x0A9E5}, - {0x0AA29, 0x0AA36}, - {0x0AA43, 0x0AA43}, - {0x0AA4C, 0x0AA4D}, - {0x0AA7B, 0x0AA7D}, - {0x0AAB0, 0x0AAB0}, - {0x0AAB2, 0x0AAB4}, - {0x0AAB7, 0x0AAB8}, - {0x0AABE, 0x0AABF}, - {0x0AAC1, 0x0AAC1}, - {0x0AAEB, 0x0AAEF}, - {0x0AAF5, 0x0AAF6}, - {0x0ABE3, 0x0ABEA}, - {0x0ABEC, 0x0ABED}, - {0x0FB1E, 0x0FB1E}, - {0x0FE00, 0x0FE0F}, - {0x0FE20, 0x0FE2F}, - {0x101FD, 0x101FD}, - {0x102E0, 0x102E0}, - {0x10376, 0x1037A}, - {0x10A01, 0x10A03}, - {0x10A05, 0x10A06}, - {0x10A0C, 0x10A0F}, - {0x10A38, 0x10A3A}, - {0x10A3F, 0x10A3F}, - {0x10AE5, 0x10AE6}, - {0x10D24, 0x10D27}, - {0x10D69, 0x10D6D}, - {0x10EAB, 0x10EAC}, - {0x10EFC, 0x10EFF}, - {0x10F46, 0x10F50}, - {0x10F82, 0x10F85}, - {0x11000, 0x11002}, - {0x11038, 0x11046}, - {0x11070, 0x11070}, - {0x11073, 0x11074}, - {0x1107F, 0x11082}, - {0x110B0, 0x110BA}, - {0x110C2, 0x110C2}, - {0x11100, 0x11102}, - {0x11127, 0x11134}, - {0x11145, 0x11146}, - {0x11173, 0x11173}, - {0x11180, 0x11182}, - {0x111B3, 0x111C0}, - {0x111C9, 0x111CC}, - {0x111CE, 0x111CF}, - {0x1122C, 0x11237}, - {0x1123E, 0x1123E}, - {0x11241, 0x11241}, - {0x112DF, 0x112EA}, - {0x11300, 0x11303}, - {0x1133B, 0x1133C}, - {0x1133E, 0x11344}, - {0x11347, 0x11348}, - {0x1134B, 0x1134D}, - {0x11357, 0x11357}, - {0x11362, 0x11363}, - {0x11366, 0x1136C}, - {0x11370, 0x11374}, - {0x113B8, 0x113C0}, - {0x113C2, 0x113C2}, - {0x113C5, 0x113C5}, - {0x113C7, 0x113CA}, - {0x113CC, 0x113D0}, - {0x113D2, 0x113D2}, - {0x113E1, 0x113E2}, - {0x11435, 0x11446}, - {0x1145E, 0x1145E}, - {0x114B0, 0x114C3}, - {0x115AF, 0x115B5}, - {0x115B8, 0x115C0}, - {0x115DC, 0x115DD}, - {0x11630, 0x11640}, - {0x116AB, 0x116B7}, - {0x1171D, 0x1172B}, - {0x1182C, 0x1183A}, - {0x11930, 0x11935}, - {0x11937, 0x11938}, - {0x1193B, 0x1193E}, - {0x11940, 0x11940}, - {0x11942, 0x11943}, - {0x119D1, 0x119D7}, - {0x119DA, 0x119E0}, - {0x119E4, 0x119E4}, - {0x11A01, 0x11A0A}, - {0x11A33, 0x11A39}, - {0x11A3B, 0x11A3E}, - {0x11A47, 0x11A47}, - {0x11A51, 0x11A5B}, - {0x11A8A, 0x11A99}, - {0x11C2F, 0x11C36}, - {0x11C38, 0x11C3F}, - {0x11C92, 0x11CA7}, - {0x11CA9, 0x11CB6}, - {0x11D31, 0x11D36}, - {0x11D3A, 0x11D3A}, - {0x11D3C, 0x11D3D}, - {0x11D3F, 0x11D45}, - {0x11D47, 0x11D47}, - {0x11D8A, 0x11D8E}, - {0x11D90, 0x11D91}, - {0x11D93, 0x11D97}, - {0x11EF3, 0x11EF6}, - {0x11F00, 0x11F01}, - {0x11F03, 0x11F03}, - {0x11F34, 0x11F3A}, - {0x11F3E, 0x11F42}, - {0x11F5A, 0x11F5A}, - {0x13440, 0x13440}, - {0x13447, 0x13455}, - {0x1611E, 0x1612F}, - {0x16AF0, 0x16AF4}, - {0x16B30, 0x16B36}, - {0x16F4F, 0x16F4F}, - {0x16F51, 0x16F87}, - {0x16F8F, 0x16F92}, - {0x16FE4, 0x16FE4}, - {0x16FF0, 0x16FF1}, - {0x1BC9D, 0x1BC9E}, - {0x1CF00, 0x1CF2D}, - {0x1CF30, 0x1CF46}, - {0x1D165, 0x1D169}, - {0x1D16D, 0x1D172}, - {0x1D17B, 0x1D182}, - {0x1D185, 0x1D18B}, - {0x1D1AA, 0x1D1AD}, - {0x1D242, 0x1D244}, - {0x1DA00, 0x1DA36}, - {0x1DA3B, 0x1DA6C}, - {0x1DA75, 0x1DA75}, - {0x1DA84, 0x1DA84}, - {0x1DA9B, 0x1DA9F}, - {0x1DAA1, 0x1DAAF}, - {0x1E000, 0x1E006}, - {0x1E008, 0x1E018}, - {0x1E01B, 0x1E021}, - {0x1E023, 0x1E024}, - {0x1E026, 0x1E02A}, - {0x1E08F, 0x1E08F}, - {0x1E130, 0x1E136}, - {0x1E2AE, 0x1E2AE}, - {0x1E2EC, 0x1E2EF}, - {0x1E4EC, 0x1E4EF}, - {0x1E5EE, 0x1E5EF}, - {0x1E8D0, 0x1E8D6}, - {0x1E944, 0x1E94A}, - {0xE0100, 0xE01EF} -}; + {0x00300, 0x0036F}, {0x00483, 0x00489}, {0x00591, 0x005BD}, {0x005BF, 0x005BF}, {0x005C1, 0x005C2}, + {0x005C4, 0x005C5}, {0x005C7, 0x005C7}, {0x00610, 0x0061A}, {0x0064B, 0x0065F}, {0x00670, 0x00670}, + {0x006D6, 0x006DC}, {0x006DF, 0x006E4}, {0x006E7, 0x006E8}, {0x006EA, 0x006ED}, {0x00711, 0x00711}, + {0x00730, 0x0074A}, {0x007A6, 0x007B0}, {0x007EB, 0x007F3}, {0x007FD, 0x007FD}, {0x00816, 0x00819}, + {0x0081B, 0x00823}, {0x00825, 0x00827}, {0x00829, 0x0082D}, {0x00859, 0x0085B}, {0x00897, 0x0089F}, + {0x008CA, 0x008E1}, {0x008E3, 0x00903}, {0x0093A, 0x0093C}, {0x0093E, 0x0094F}, {0x00951, 0x00957}, + {0x00962, 0x00963}, {0x00981, 0x00983}, {0x009BC, 0x009BC}, {0x009BE, 0x009C4}, {0x009C7, 0x009C8}, + {0x009CB, 0x009CD}, {0x009D7, 0x009D7}, {0x009E2, 0x009E3}, {0x009FE, 0x009FE}, {0x00A01, 0x00A03}, + {0x00A3C, 0x00A3C}, {0x00A3E, 0x00A42}, {0x00A47, 0x00A48}, {0x00A4B, 0x00A4D}, {0x00A51, 0x00A51}, + {0x00A70, 0x00A71}, {0x00A75, 0x00A75}, {0x00A81, 0x00A83}, {0x00ABC, 0x00ABC}, {0x00ABE, 0x00AC5}, + {0x00AC7, 0x00AC9}, {0x00ACB, 0x00ACD}, {0x00AE2, 0x00AE3}, {0x00AFA, 0x00AFF}, {0x00B01, 0x00B03}, + {0x00B3C, 0x00B3C}, {0x00B3E, 0x00B44}, {0x00B47, 0x00B48}, {0x00B4B, 0x00B4D}, {0x00B55, 0x00B57}, + {0x00B62, 0x00B63}, {0x00B82, 0x00B82}, {0x00BBE, 0x00BC2}, {0x00BC6, 0x00BC8}, {0x00BCA, 0x00BCD}, + {0x00BD7, 0x00BD7}, {0x00C00, 0x00C04}, {0x00C3C, 0x00C3C}, {0x00C3E, 0x00C44}, {0x00C46, 0x00C48}, + {0x00C4A, 0x00C4D}, {0x00C55, 0x00C56}, {0x00C62, 0x00C63}, {0x00C81, 0x00C83}, {0x00CBC, 0x00CBC}, + {0x00CBE, 0x00CC4}, {0x00CC6, 0x00CC8}, {0x00CCA, 0x00CCD}, {0x00CD5, 0x00CD6}, {0x00CE2, 0x00CE3}, + {0x00CF3, 0x00CF3}, {0x00D00, 0x00D03}, {0x00D3B, 0x00D3C}, {0x00D3E, 0x00D44}, {0x00D46, 0x00D48}, + {0x00D4A, 0x00D4D}, {0x00D57, 0x00D57}, {0x00D62, 0x00D63}, {0x00D81, 0x00D83}, {0x00DCA, 0x00DCA}, + {0x00DCF, 0x00DD4}, {0x00DD6, 0x00DD6}, {0x00DD8, 0x00DDF}, {0x00DF2, 0x00DF3}, {0x00E31, 0x00E31}, + {0x00E34, 0x00E3A}, {0x00E47, 0x00E4E}, {0x00EB1, 0x00EB1}, {0x00EB4, 0x00EBC}, {0x00EC8, 0x00ECE}, + {0x00F18, 0x00F19}, {0x00F35, 0x00F35}, {0x00F37, 0x00F37}, {0x00F39, 0x00F39}, {0x00F3E, 0x00F3F}, + {0x00F71, 0x00F84}, {0x00F86, 0x00F87}, {0x00F8D, 0x00F97}, {0x00F99, 0x00FBC}, {0x00FC6, 0x00FC6}, + {0x0102B, 0x0103E}, {0x01056, 0x01059}, {0x0105E, 0x01060}, {0x01062, 0x01064}, {0x01067, 0x0106D}, + {0x01071, 0x01074}, {0x01082, 0x0108D}, {0x0108F, 0x0108F}, {0x0109A, 0x0109D}, {0x0135D, 0x0135F}, + {0x01712, 0x01715}, {0x01732, 0x01734}, {0x01752, 0x01753}, {0x01772, 0x01773}, {0x017B4, 0x017D3}, + {0x017DD, 0x017DD}, {0x0180B, 0x0180D}, {0x0180F, 0x0180F}, {0x01885, 0x01886}, {0x018A9, 0x018A9}, + {0x01920, 0x0192B}, {0x01930, 0x0193B}, {0x01A17, 0x01A1B}, {0x01A55, 0x01A5E}, {0x01A60, 0x01A7C}, + {0x01A7F, 0x01A7F}, {0x01AB0, 0x01ACE}, {0x01B00, 0x01B04}, {0x01B34, 0x01B44}, {0x01B6B, 0x01B73}, + {0x01B80, 0x01B82}, {0x01BA1, 0x01BAD}, {0x01BE6, 0x01BF3}, {0x01C24, 0x01C37}, {0x01CD0, 0x01CD2}, + {0x01CD4, 0x01CE8}, {0x01CED, 0x01CED}, {0x01CF4, 0x01CF4}, {0x01CF7, 0x01CF9}, {0x01DC0, 0x01DFF}, + {0x020D0, 0x020F0}, {0x02CEF, 0x02CF1}, {0x02D7F, 0x02D7F}, {0x02DE0, 0x02DFF}, {0x0302A, 0x0302F}, + {0x03099, 0x0309A}, {0x0A66F, 0x0A672}, {0x0A674, 0x0A67D}, {0x0A69E, 0x0A69F}, {0x0A6F0, 0x0A6F1}, + {0x0A802, 0x0A802}, {0x0A806, 0x0A806}, {0x0A80B, 0x0A80B}, {0x0A823, 0x0A827}, {0x0A82C, 0x0A82C}, + {0x0A880, 0x0A881}, {0x0A8B4, 0x0A8C5}, {0x0A8E0, 0x0A8F1}, {0x0A8FF, 0x0A8FF}, {0x0A926, 0x0A92D}, + {0x0A947, 0x0A953}, {0x0A980, 0x0A983}, {0x0A9B3, 0x0A9C0}, {0x0A9E5, 0x0A9E5}, {0x0AA29, 0x0AA36}, + {0x0AA43, 0x0AA43}, {0x0AA4C, 0x0AA4D}, {0x0AA7B, 0x0AA7D}, {0x0AAB0, 0x0AAB0}, {0x0AAB2, 0x0AAB4}, + {0x0AAB7, 0x0AAB8}, {0x0AABE, 0x0AABF}, {0x0AAC1, 0x0AAC1}, {0x0AAEB, 0x0AAEF}, {0x0AAF5, 0x0AAF6}, + {0x0ABE3, 0x0ABEA}, {0x0ABEC, 0x0ABED}, {0x0FB1E, 0x0FB1E}, {0x0FE00, 0x0FE0F}, {0x0FE20, 0x0FE2F}, + {0x101FD, 0x101FD}, {0x102E0, 0x102E0}, {0x10376, 0x1037A}, {0x10A01, 0x10A03}, {0x10A05, 0x10A06}, + {0x10A0C, 0x10A0F}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A3F}, {0x10AE5, 0x10AE6}, {0x10D24, 0x10D27}, + {0x10D69, 0x10D6D}, {0x10EAB, 0x10EAC}, {0x10EFC, 0x10EFF}, {0x10F46, 0x10F50}, {0x10F82, 0x10F85}, + {0x11000, 0x11002}, {0x11038, 0x11046}, {0x11070, 0x11070}, {0x11073, 0x11074}, {0x1107F, 0x11082}, + {0x110B0, 0x110BA}, {0x110C2, 0x110C2}, {0x11100, 0x11102}, {0x11127, 0x11134}, {0x11145, 0x11146}, + {0x11173, 0x11173}, {0x11180, 0x11182}, {0x111B3, 0x111C0}, {0x111C9, 0x111CC}, {0x111CE, 0x111CF}, + {0x1122C, 0x11237}, {0x1123E, 0x1123E}, {0x11241, 0x11241}, {0x112DF, 0x112EA}, {0x11300, 0x11303}, + {0x1133B, 0x1133C}, {0x1133E, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11357, 0x11357}, + {0x11362, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x113B8, 0x113C0}, {0x113C2, 0x113C2}, + {0x113C5, 0x113C5}, {0x113C7, 0x113CA}, {0x113CC, 0x113D0}, {0x113D2, 0x113D2}, {0x113E1, 0x113E2}, + {0x11435, 0x11446}, {0x1145E, 0x1145E}, {0x114B0, 0x114C3}, {0x115AF, 0x115B5}, {0x115B8, 0x115C0}, + {0x115DC, 0x115DD}, {0x11630, 0x11640}, {0x116AB, 0x116B7}, {0x1171D, 0x1172B}, {0x1182C, 0x1183A}, + {0x11930, 0x11935}, {0x11937, 0x11938}, {0x1193B, 0x1193E}, {0x11940, 0x11940}, {0x11942, 0x11943}, + {0x119D1, 0x119D7}, {0x119DA, 0x119E0}, {0x119E4, 0x119E4}, {0x11A01, 0x11A0A}, {0x11A33, 0x11A39}, + {0x11A3B, 0x11A3E}, {0x11A47, 0x11A47}, {0x11A51, 0x11A5B}, {0x11A8A, 0x11A99}, {0x11C2F, 0x11C36}, + {0x11C38, 0x11C3F}, {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D31, 0x11D36}, {0x11D3A, 0x11D3A}, + {0x11D3C, 0x11D3D}, {0x11D3F, 0x11D45}, {0x11D47, 0x11D47}, {0x11D8A, 0x11D8E}, {0x11D90, 0x11D91}, + {0x11D93, 0x11D97}, {0x11EF3, 0x11EF6}, {0x11F00, 0x11F01}, {0x11F03, 0x11F03}, {0x11F34, 0x11F3A}, + {0x11F3E, 0x11F42}, {0x11F5A, 0x11F5A}, {0x13440, 0x13440}, {0x13447, 0x13455}, {0x1611E, 0x1612F}, + {0x16AF0, 0x16AF4}, {0x16B30, 0x16B36}, {0x16F4F, 0x16F4F}, {0x16F51, 0x16F87}, {0x16F8F, 0x16F92}, + {0x16FE4, 0x16FE4}, {0x16FF0, 0x16FF1}, {0x1BC9D, 0x1BC9E}, {0x1CF00, 0x1CF2D}, {0x1CF30, 0x1CF46}, + {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, + {0x1D242, 0x1D244}, {0x1DA00, 0x1DA36}, {0x1DA3B, 0x1DA6C}, {0x1DA75, 0x1DA75}, {0x1DA84, 0x1DA84}, + {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, + {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E08F, 0x1E08F}, {0x1E130, 0x1E136}, {0x1E2AE, 0x1E2AE}, + {0x1E2EC, 0x1E2EF}, {0x1E4EC, 0x1E4EF}, {0x1E5EE, 0x1E5EF}, {0x1E8D0, 0x1E8D6}, {0x1E944, 0x1E94A}, + {0xE0100, 0xE01EF}}; /* Width 0 combining letters. */ -static const struct widechar_range widechar_combiningletters_table[] = { - {0x01160, 0x011FF}, - {0x0D7B0, 0x0D7FF} -}; +static const struct widechar_range widechar_combiningletters_table[] = {{0x01160, 0x011FF}, {0x0D7B0, 0x0D7FF}}; /* Width 2 characters. */ static const struct widechar_range widechar_doublewide_table[] = { - {0x01100, 0x0115F}, - {0x02329, 0x0232A}, - {0x02630, 0x02637}, - {0x0268A, 0x0268F}, - {0x02E80, 0x02E99}, - {0x02E9B, 0x02EF3}, - {0x02F00, 0x02FD5}, - {0x02FF0, 0x0303E}, - {0x03041, 0x03096}, - {0x03099, 0x030FF}, - {0x03105, 0x0312F}, - {0x03131, 0x0318E}, - {0x03190, 0x031E5}, - {0x031EF, 0x0321E}, - {0x03220, 0x03247}, - {0x03250, 0x0A48C}, - {0x0A490, 0x0A4C6}, - {0x0A960, 0x0A97C}, - {0x0AC00, 0x0D7A3}, - {0x0F900, 0x0FAFF}, - {0x0FE10, 0x0FE19}, - {0x0FE30, 0x0FE52}, - {0x0FE54, 0x0FE66}, - {0x0FE68, 0x0FE6B}, - {0x0FF01, 0x0FF60}, - {0x0FFE0, 0x0FFE6}, - {0x16FE0, 0x16FE4}, - {0x16FF0, 0x16FF1}, - {0x17000, 0x187F7}, - {0x18800, 0x18CD5}, - {0x18CFF, 0x18D08}, - {0x1AFF0, 0x1AFF3}, - {0x1AFF5, 0x1AFFB}, - {0x1AFFD, 0x1AFFE}, - {0x1B000, 0x1B122}, - {0x1B132, 0x1B132}, - {0x1B150, 0x1B152}, - {0x1B155, 0x1B155}, - {0x1B164, 0x1B167}, - {0x1B170, 0x1B2FB}, - {0x1D300, 0x1D356}, - {0x1D360, 0x1D376}, - {0x1F200, 0x1F200}, - {0x1F202, 0x1F202}, - {0x1F210, 0x1F219}, - {0x1F21B, 0x1F22E}, - {0x1F230, 0x1F231}, - {0x1F237, 0x1F237}, - {0x1F23B, 0x1F23B}, - {0x1F240, 0x1F248}, - {0x1F260, 0x1F265}, - {0x1F57A, 0x1F57A}, - {0x1F5A4, 0x1F5A4}, - {0x1F6D1, 0x1F6D2}, - {0x1F6D5, 0x1F6D7}, - {0x1F6DC, 0x1F6DF}, - {0x1F6F4, 0x1F6FC}, - {0x1F7E0, 0x1F7EB}, - {0x1F7F0, 0x1F7F0}, - {0x1F90C, 0x1F90F}, - {0x1F919, 0x1F93A}, - {0x1F93C, 0x1F945}, - {0x1F947, 0x1F97F}, - {0x1F985, 0x1F9BF}, - {0x1F9C1, 0x1F9FF}, - {0x1FA70, 0x1FA7C}, - {0x1FA80, 0x1FA89}, - {0x1FA8F, 0x1FAC6}, - {0x1FACE, 0x1FADC}, - {0x1FADF, 0x1FAE9}, - {0x1FAF0, 0x1FAF8}, - {0x20000, 0x2FFFD}, - {0x30000, 0x3FFFD} -}; + {0x01100, 0x0115F}, {0x02329, 0x0232A}, {0x02630, 0x02637}, {0x0268A, 0x0268F}, {0x02E80, 0x02E99}, + {0x02E9B, 0x02EF3}, {0x02F00, 0x02FD5}, {0x02FF0, 0x0303E}, {0x03041, 0x03096}, {0x03099, 0x030FF}, + {0x03105, 0x0312F}, {0x03131, 0x0318E}, {0x03190, 0x031E5}, {0x031EF, 0x0321E}, {0x03220, 0x03247}, + {0x03250, 0x0A48C}, {0x0A490, 0x0A4C6}, {0x0A960, 0x0A97C}, {0x0AC00, 0x0D7A3}, {0x0F900, 0x0FAFF}, + {0x0FE10, 0x0FE19}, {0x0FE30, 0x0FE52}, {0x0FE54, 0x0FE66}, {0x0FE68, 0x0FE6B}, {0x0FF01, 0x0FF60}, + {0x0FFE0, 0x0FFE6}, {0x16FE0, 0x16FE4}, {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, + {0x18CFF, 0x18D08}, {0x1AFF0, 0x1AFF3}, {0x1AFF5, 0x1AFFB}, {0x1AFFD, 0x1AFFE}, {0x1B000, 0x1B122}, + {0x1B132, 0x1B132}, {0x1B150, 0x1B152}, {0x1B155, 0x1B155}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, + {0x1D300, 0x1D356}, {0x1D360, 0x1D376}, {0x1F200, 0x1F200}, {0x1F202, 0x1F202}, {0x1F210, 0x1F219}, + {0x1F21B, 0x1F22E}, {0x1F230, 0x1F231}, {0x1F237, 0x1F237}, {0x1F23B, 0x1F23B}, {0x1F240, 0x1F248}, + {0x1F260, 0x1F265}, {0x1F57A, 0x1F57A}, {0x1F5A4, 0x1F5A4}, {0x1F6D1, 0x1F6D2}, {0x1F6D5, 0x1F6D7}, + {0x1F6DC, 0x1F6DF}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, {0x1F7F0, 0x1F7F0}, {0x1F90C, 0x1F90F}, + {0x1F919, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F97F}, {0x1F985, 0x1F9BF}, {0x1F9C1, 0x1F9FF}, + {0x1FA70, 0x1FA7C}, {0x1FA80, 0x1FA89}, {0x1FA8F, 0x1FAC6}, {0x1FACE, 0x1FADC}, {0x1FADF, 0x1FAE9}, + {0x1FAF0, 0x1FAF8}, {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}}; /* Ambiguous-width characters. */ static const struct widechar_range widechar_ambiguous_table[] = { - {0x000A1, 0x000A1}, - {0x000A4, 0x000A4}, - {0x000A7, 0x000A8}, - {0x000AA, 0x000AA}, - {0x000AD, 0x000AE}, - {0x000B0, 0x000B4}, - {0x000B6, 0x000BA}, - {0x000BC, 0x000BF}, - {0x000C6, 0x000C6}, - {0x000D0, 0x000D0}, - {0x000D7, 0x000D8}, - {0x000DE, 0x000E1}, - {0x000E6, 0x000E6}, - {0x000E8, 0x000EA}, - {0x000EC, 0x000ED}, - {0x000F0, 0x000F0}, - {0x000F2, 0x000F3}, - {0x000F7, 0x000FA}, - {0x000FC, 0x000FC}, - {0x000FE, 0x000FE}, - {0x00101, 0x00101}, - {0x00111, 0x00111}, - {0x00113, 0x00113}, - {0x0011B, 0x0011B}, - {0x00126, 0x00127}, - {0x0012B, 0x0012B}, - {0x00131, 0x00133}, - {0x00138, 0x00138}, - {0x0013F, 0x00142}, - {0x00144, 0x00144}, - {0x00148, 0x0014B}, - {0x0014D, 0x0014D}, - {0x00152, 0x00153}, - {0x00166, 0x00167}, - {0x0016B, 0x0016B}, - {0x001CE, 0x001CE}, - {0x001D0, 0x001D0}, - {0x001D2, 0x001D2}, - {0x001D4, 0x001D4}, - {0x001D6, 0x001D6}, - {0x001D8, 0x001D8}, - {0x001DA, 0x001DA}, - {0x001DC, 0x001DC}, - {0x00251, 0x00251}, - {0x00261, 0x00261}, - {0x002C4, 0x002C4}, - {0x002C7, 0x002C7}, - {0x002C9, 0x002CB}, - {0x002CD, 0x002CD}, - {0x002D0, 0x002D0}, - {0x002D8, 0x002DB}, - {0x002DD, 0x002DD}, - {0x002DF, 0x002DF}, - {0x00300, 0x0036F}, - {0x00391, 0x003A1}, - {0x003A3, 0x003A9}, - {0x003B1, 0x003C1}, - {0x003C3, 0x003C9}, - {0x00401, 0x00401}, - {0x00410, 0x0044F}, - {0x00451, 0x00451}, - {0x02010, 0x02010}, - {0x02013, 0x02016}, - {0x02018, 0x02019}, - {0x0201C, 0x0201D}, - {0x02020, 0x02022}, - {0x02024, 0x02027}, - {0x02030, 0x02030}, - {0x02032, 0x02033}, - {0x02035, 0x02035}, - {0x0203B, 0x0203B}, - {0x0203E, 0x0203E}, - {0x02074, 0x02074}, - {0x0207F, 0x0207F}, - {0x02081, 0x02084}, - {0x020AC, 0x020AC}, - {0x02103, 0x02103}, - {0x02105, 0x02105}, - {0x02109, 0x02109}, - {0x02113, 0x02113}, - {0x02116, 0x02116}, - {0x02121, 0x02122}, - {0x02126, 0x02126}, - {0x0212B, 0x0212B}, - {0x02153, 0x02154}, - {0x0215B, 0x0215E}, - {0x02160, 0x0216B}, - {0x02170, 0x02179}, - {0x02189, 0x02189}, - {0x02190, 0x02199}, - {0x021B8, 0x021B9}, - {0x021D2, 0x021D2}, - {0x021D4, 0x021D4}, - {0x021E7, 0x021E7}, - {0x02200, 0x02200}, - {0x02202, 0x02203}, - {0x02207, 0x02208}, - {0x0220B, 0x0220B}, - {0x0220F, 0x0220F}, - {0x02211, 0x02211}, - {0x02215, 0x02215}, - {0x0221A, 0x0221A}, - {0x0221D, 0x02220}, - {0x02223, 0x02223}, - {0x02225, 0x02225}, - {0x02227, 0x0222C}, - {0x0222E, 0x0222E}, - {0x02234, 0x02237}, - {0x0223C, 0x0223D}, - {0x02248, 0x02248}, - {0x0224C, 0x0224C}, - {0x02252, 0x02252}, - {0x02260, 0x02261}, - {0x02264, 0x02267}, - {0x0226A, 0x0226B}, - {0x0226E, 0x0226F}, - {0x02282, 0x02283}, - {0x02286, 0x02287}, - {0x02295, 0x02295}, - {0x02299, 0x02299}, - {0x022A5, 0x022A5}, - {0x022BF, 0x022BF}, - {0x02312, 0x02312}, - {0x02460, 0x024E9}, - {0x024EB, 0x0254B}, - {0x02550, 0x02573}, - {0x02580, 0x0258F}, - {0x02592, 0x02595}, - {0x025A0, 0x025A1}, - {0x025A3, 0x025A9}, - {0x025B2, 0x025B3}, - {0x025B6, 0x025B7}, - {0x025BC, 0x025BD}, - {0x025C0, 0x025C1}, - {0x025C6, 0x025C8}, - {0x025CB, 0x025CB}, - {0x025CE, 0x025D1}, - {0x025E2, 0x025E5}, - {0x025EF, 0x025EF}, - {0x02605, 0x02606}, - {0x02609, 0x02609}, - {0x0260E, 0x0260F}, - {0x0261C, 0x0261C}, - {0x0261E, 0x0261E}, - {0x02640, 0x02640}, - {0x02642, 0x02642}, - {0x02660, 0x02661}, - {0x02663, 0x02665}, - {0x02667, 0x0266A}, - {0x0266C, 0x0266D}, - {0x0266F, 0x0266F}, - {0x0269E, 0x0269F}, - {0x026BF, 0x026BF}, - {0x026C6, 0x026CD}, - {0x026CF, 0x026D3}, - {0x026D5, 0x026E1}, - {0x026E3, 0x026E3}, - {0x026E8, 0x026E9}, - {0x026EB, 0x026F1}, - {0x026F4, 0x026F4}, - {0x026F6, 0x026F9}, - {0x026FB, 0x026FC}, - {0x026FE, 0x026FF}, - {0x0273D, 0x0273D}, - {0x02776, 0x0277F}, - {0x02B56, 0x02B59}, - {0x03248, 0x0324F}, - {0x0E000, 0x0F8FF}, - {0x0FE00, 0x0FE0F}, - {0x0FFFD, 0x0FFFD}, - {0x1F100, 0x1F10A}, - {0x1F110, 0x1F12D}, - {0x1F130, 0x1F169}, - {0x1F170, 0x1F18D}, - {0x1F18F, 0x1F190}, - {0x1F19B, 0x1F1AC}, - {0xE0100, 0xE01EF}, - {0xF0000, 0xFFFFD}, - {0x100000, 0x10FFFD} -}; + {0x000A1, 0x000A1}, {0x000A4, 0x000A4}, {0x000A7, 0x000A8}, {0x000AA, 0x000AA}, {0x000AD, 0x000AE}, + {0x000B0, 0x000B4}, {0x000B6, 0x000BA}, {0x000BC, 0x000BF}, {0x000C6, 0x000C6}, {0x000D0, 0x000D0}, + {0x000D7, 0x000D8}, {0x000DE, 0x000E1}, {0x000E6, 0x000E6}, {0x000E8, 0x000EA}, {0x000EC, 0x000ED}, + {0x000F0, 0x000F0}, {0x000F2, 0x000F3}, {0x000F7, 0x000FA}, {0x000FC, 0x000FC}, {0x000FE, 0x000FE}, + {0x00101, 0x00101}, {0x00111, 0x00111}, {0x00113, 0x00113}, {0x0011B, 0x0011B}, {0x00126, 0x00127}, + {0x0012B, 0x0012B}, {0x00131, 0x00133}, {0x00138, 0x00138}, {0x0013F, 0x00142}, {0x00144, 0x00144}, + {0x00148, 0x0014B}, {0x0014D, 0x0014D}, {0x00152, 0x00153}, {0x00166, 0x00167}, {0x0016B, 0x0016B}, + {0x001CE, 0x001CE}, {0x001D0, 0x001D0}, {0x001D2, 0x001D2}, {0x001D4, 0x001D4}, {0x001D6, 0x001D6}, + {0x001D8, 0x001D8}, {0x001DA, 0x001DA}, {0x001DC, 0x001DC}, {0x00251, 0x00251}, {0x00261, 0x00261}, + {0x002C4, 0x002C4}, {0x002C7, 0x002C7}, {0x002C9, 0x002CB}, {0x002CD, 0x002CD}, {0x002D0, 0x002D0}, + {0x002D8, 0x002DB}, {0x002DD, 0x002DD}, {0x002DF, 0x002DF}, {0x00300, 0x0036F}, {0x00391, 0x003A1}, + {0x003A3, 0x003A9}, {0x003B1, 0x003C1}, {0x003C3, 0x003C9}, {0x00401, 0x00401}, {0x00410, 0x0044F}, + {0x00451, 0x00451}, {0x02010, 0x02010}, {0x02013, 0x02016}, {0x02018, 0x02019}, {0x0201C, 0x0201D}, + {0x02020, 0x02022}, {0x02024, 0x02027}, {0x02030, 0x02030}, {0x02032, 0x02033}, {0x02035, 0x02035}, + {0x0203B, 0x0203B}, {0x0203E, 0x0203E}, {0x02074, 0x02074}, {0x0207F, 0x0207F}, {0x02081, 0x02084}, + {0x020AC, 0x020AC}, {0x02103, 0x02103}, {0x02105, 0x02105}, {0x02109, 0x02109}, {0x02113, 0x02113}, + {0x02116, 0x02116}, {0x02121, 0x02122}, {0x02126, 0x02126}, {0x0212B, 0x0212B}, {0x02153, 0x02154}, + {0x0215B, 0x0215E}, {0x02160, 0x0216B}, {0x02170, 0x02179}, {0x02189, 0x02189}, {0x02190, 0x02199}, + {0x021B8, 0x021B9}, {0x021D2, 0x021D2}, {0x021D4, 0x021D4}, {0x021E7, 0x021E7}, {0x02200, 0x02200}, + {0x02202, 0x02203}, {0x02207, 0x02208}, {0x0220B, 0x0220B}, {0x0220F, 0x0220F}, {0x02211, 0x02211}, + {0x02215, 0x02215}, {0x0221A, 0x0221A}, {0x0221D, 0x02220}, {0x02223, 0x02223}, {0x02225, 0x02225}, + {0x02227, 0x0222C}, {0x0222E, 0x0222E}, {0x02234, 0x02237}, {0x0223C, 0x0223D}, {0x02248, 0x02248}, + {0x0224C, 0x0224C}, {0x02252, 0x02252}, {0x02260, 0x02261}, {0x02264, 0x02267}, {0x0226A, 0x0226B}, + {0x0226E, 0x0226F}, {0x02282, 0x02283}, {0x02286, 0x02287}, {0x02295, 0x02295}, {0x02299, 0x02299}, + {0x022A5, 0x022A5}, {0x022BF, 0x022BF}, {0x02312, 0x02312}, {0x02460, 0x024E9}, {0x024EB, 0x0254B}, + {0x02550, 0x02573}, {0x02580, 0x0258F}, {0x02592, 0x02595}, {0x025A0, 0x025A1}, {0x025A3, 0x025A9}, + {0x025B2, 0x025B3}, {0x025B6, 0x025B7}, {0x025BC, 0x025BD}, {0x025C0, 0x025C1}, {0x025C6, 0x025C8}, + {0x025CB, 0x025CB}, {0x025CE, 0x025D1}, {0x025E2, 0x025E5}, {0x025EF, 0x025EF}, {0x02605, 0x02606}, + {0x02609, 0x02609}, {0x0260E, 0x0260F}, {0x0261C, 0x0261C}, {0x0261E, 0x0261E}, {0x02640, 0x02640}, + {0x02642, 0x02642}, {0x02660, 0x02661}, {0x02663, 0x02665}, {0x02667, 0x0266A}, {0x0266C, 0x0266D}, + {0x0266F, 0x0266F}, {0x0269E, 0x0269F}, {0x026BF, 0x026BF}, {0x026C6, 0x026CD}, {0x026CF, 0x026D3}, + {0x026D5, 0x026E1}, {0x026E3, 0x026E3}, {0x026E8, 0x026E9}, {0x026EB, 0x026F1}, {0x026F4, 0x026F4}, + {0x026F6, 0x026F9}, {0x026FB, 0x026FC}, {0x026FE, 0x026FF}, {0x0273D, 0x0273D}, {0x02776, 0x0277F}, + {0x02B56, 0x02B59}, {0x03248, 0x0324F}, {0x0E000, 0x0F8FF}, {0x0FE00, 0x0FE0F}, {0x0FFFD, 0x0FFFD}, + {0x1F100, 0x1F10A}, {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D}, {0x1F18F, 0x1F190}, + {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF}, {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD}}; /* Unassigned characters. */ static const struct widechar_range widechar_unassigned_table[] = { - {0x00378, 0x00379}, - {0x00380, 0x00383}, - {0x0038B, 0x0038B}, - {0x0038D, 0x0038D}, - {0x003A2, 0x003A2}, - {0x00530, 0x00530}, - {0x00557, 0x00558}, - {0x0058B, 0x0058C}, - {0x00590, 0x00590}, - {0x005C8, 0x005CF}, - {0x005EB, 0x005EE}, - {0x005F5, 0x005FF}, - {0x0070E, 0x0070E}, - {0x0074B, 0x0074C}, - {0x007B2, 0x007BF}, - {0x007FB, 0x007FC}, - {0x0082E, 0x0082F}, - {0x0083F, 0x0083F}, - {0x0085C, 0x0085D}, - {0x0085F, 0x0085F}, - {0x0086B, 0x0086F}, - {0x0088F, 0x0088F}, - {0x00892, 0x00896}, - {0x00984, 0x00984}, - {0x0098D, 0x0098E}, - {0x00991, 0x00992}, - {0x009A9, 0x009A9}, - {0x009B1, 0x009B1}, - {0x009B3, 0x009B5}, - {0x009BA, 0x009BB}, - {0x009C5, 0x009C6}, - {0x009C9, 0x009CA}, - {0x009CF, 0x009D6}, - {0x009D8, 0x009DB}, - {0x009DE, 0x009DE}, - {0x009E4, 0x009E5}, - {0x009FF, 0x00A00}, - {0x00A04, 0x00A04}, - {0x00A0B, 0x00A0E}, - {0x00A11, 0x00A12}, - {0x00A29, 0x00A29}, - {0x00A31, 0x00A31}, - {0x00A34, 0x00A34}, - {0x00A37, 0x00A37}, - {0x00A3A, 0x00A3B}, - {0x00A3D, 0x00A3D}, - {0x00A43, 0x00A46}, - {0x00A49, 0x00A4A}, - {0x00A4E, 0x00A50}, - {0x00A52, 0x00A58}, - {0x00A5D, 0x00A5D}, - {0x00A5F, 0x00A65}, - {0x00A77, 0x00A80}, - {0x00A84, 0x00A84}, - {0x00A8E, 0x00A8E}, - {0x00A92, 0x00A92}, - {0x00AA9, 0x00AA9}, - {0x00AB1, 0x00AB1}, - {0x00AB4, 0x00AB4}, - {0x00ABA, 0x00ABB}, - {0x00AC6, 0x00AC6}, - {0x00ACA, 0x00ACA}, - {0x00ACE, 0x00ACF}, - {0x00AD1, 0x00ADF}, - {0x00AE4, 0x00AE5}, - {0x00AF2, 0x00AF8}, - {0x00B00, 0x00B00}, - {0x00B04, 0x00B04}, - {0x00B0D, 0x00B0E}, - {0x00B11, 0x00B12}, - {0x00B29, 0x00B29}, - {0x00B31, 0x00B31}, - {0x00B34, 0x00B34}, - {0x00B3A, 0x00B3B}, - {0x00B45, 0x00B46}, - {0x00B49, 0x00B4A}, - {0x00B4E, 0x00B54}, - {0x00B58, 0x00B5B}, - {0x00B5E, 0x00B5E}, - {0x00B64, 0x00B65}, - {0x00B78, 0x00B81}, - {0x00B84, 0x00B84}, - {0x00B8B, 0x00B8D}, - {0x00B91, 0x00B91}, - {0x00B96, 0x00B98}, - {0x00B9B, 0x00B9B}, - {0x00B9D, 0x00B9D}, - {0x00BA0, 0x00BA2}, - {0x00BA5, 0x00BA7}, - {0x00BAB, 0x00BAD}, - {0x00BBA, 0x00BBD}, - {0x00BC3, 0x00BC5}, - {0x00BC9, 0x00BC9}, - {0x00BCE, 0x00BCF}, - {0x00BD1, 0x00BD6}, - {0x00BD8, 0x00BE5}, - {0x00BFB, 0x00BFF}, - {0x00C0D, 0x00C0D}, - {0x00C11, 0x00C11}, - {0x00C29, 0x00C29}, - {0x00C3A, 0x00C3B}, - {0x00C45, 0x00C45}, - {0x00C49, 0x00C49}, - {0x00C4E, 0x00C54}, - {0x00C57, 0x00C57}, - {0x00C5B, 0x00C5C}, - {0x00C5E, 0x00C5F}, - {0x00C64, 0x00C65}, - {0x00C70, 0x00C76}, - {0x00C8D, 0x00C8D}, - {0x00C91, 0x00C91}, - {0x00CA9, 0x00CA9}, - {0x00CB4, 0x00CB4}, - {0x00CBA, 0x00CBB}, - {0x00CC5, 0x00CC5}, - {0x00CC9, 0x00CC9}, - {0x00CCE, 0x00CD4}, - {0x00CD7, 0x00CDC}, - {0x00CDF, 0x00CDF}, - {0x00CE4, 0x00CE5}, - {0x00CF0, 0x00CF0}, - {0x00CF4, 0x00CFF}, - {0x00D0D, 0x00D0D}, - {0x00D11, 0x00D11}, - {0x00D45, 0x00D45}, - {0x00D49, 0x00D49}, - {0x00D50, 0x00D53}, - {0x00D64, 0x00D65}, - {0x00D80, 0x00D80}, - {0x00D84, 0x00D84}, - {0x00D97, 0x00D99}, - {0x00DB2, 0x00DB2}, - {0x00DBC, 0x00DBC}, - {0x00DBE, 0x00DBF}, - {0x00DC7, 0x00DC9}, - {0x00DCB, 0x00DCE}, - {0x00DD5, 0x00DD5}, - {0x00DD7, 0x00DD7}, - {0x00DE0, 0x00DE5}, - {0x00DF0, 0x00DF1}, - {0x00DF5, 0x00E00}, - {0x00E3B, 0x00E3E}, - {0x00E5C, 0x00E80}, - {0x00E83, 0x00E83}, - {0x00E85, 0x00E85}, - {0x00E8B, 0x00E8B}, - {0x00EA4, 0x00EA4}, - {0x00EA6, 0x00EA6}, - {0x00EBE, 0x00EBF}, - {0x00EC5, 0x00EC5}, - {0x00EC7, 0x00EC7}, - {0x00ECF, 0x00ECF}, - {0x00EDA, 0x00EDB}, - {0x00EE0, 0x00EFF}, - {0x00F48, 0x00F48}, - {0x00F6D, 0x00F70}, - {0x00F98, 0x00F98}, - {0x00FBD, 0x00FBD}, - {0x00FCD, 0x00FCD}, - {0x00FDB, 0x00FFF}, - {0x010C6, 0x010C6}, - {0x010C8, 0x010CC}, - {0x010CE, 0x010CF}, - {0x01249, 0x01249}, - {0x0124E, 0x0124F}, - {0x01257, 0x01257}, - {0x01259, 0x01259}, - {0x0125E, 0x0125F}, - {0x01289, 0x01289}, - {0x0128E, 0x0128F}, - {0x012B1, 0x012B1}, - {0x012B6, 0x012B7}, - {0x012BF, 0x012BF}, - {0x012C1, 0x012C1}, - {0x012C6, 0x012C7}, - {0x012D7, 0x012D7}, - {0x01311, 0x01311}, - {0x01316, 0x01317}, - {0x0135B, 0x0135C}, - {0x0137D, 0x0137F}, - {0x0139A, 0x0139F}, - {0x013F6, 0x013F7}, - {0x013FE, 0x013FF}, - {0x0169D, 0x0169F}, - {0x016F9, 0x016FF}, - {0x01716, 0x0171E}, - {0x01737, 0x0173F}, - {0x01754, 0x0175F}, - {0x0176D, 0x0176D}, - {0x01771, 0x01771}, - {0x01774, 0x0177F}, - {0x017DE, 0x017DF}, - {0x017EA, 0x017EF}, - {0x017FA, 0x017FF}, - {0x0181A, 0x0181F}, - {0x01879, 0x0187F}, - {0x018AB, 0x018AF}, - {0x018F6, 0x018FF}, - {0x0191F, 0x0191F}, - {0x0192C, 0x0192F}, - {0x0193C, 0x0193F}, - {0x01941, 0x01943}, - {0x0196E, 0x0196F}, - {0x01975, 0x0197F}, - {0x019AC, 0x019AF}, - {0x019CA, 0x019CF}, - {0x019DB, 0x019DD}, - {0x01A1C, 0x01A1D}, - {0x01A5F, 0x01A5F}, - {0x01A7D, 0x01A7E}, - {0x01A8A, 0x01A8F}, - {0x01A9A, 0x01A9F}, - {0x01AAE, 0x01AAF}, - {0x01ACF, 0x01AFF}, - {0x01B4D, 0x01B4D}, - {0x01BF4, 0x01BFB}, - {0x01C38, 0x01C3A}, - {0x01C4A, 0x01C4C}, - {0x01C8B, 0x01C8F}, - {0x01CBB, 0x01CBC}, - {0x01CC8, 0x01CCF}, - {0x01CFB, 0x01CFF}, - {0x01F16, 0x01F17}, - {0x01F1E, 0x01F1F}, - {0x01F46, 0x01F47}, - {0x01F4E, 0x01F4F}, - {0x01F58, 0x01F58}, - {0x01F5A, 0x01F5A}, - {0x01F5C, 0x01F5C}, - {0x01F5E, 0x01F5E}, - {0x01F7E, 0x01F7F}, - {0x01FB5, 0x01FB5}, - {0x01FC5, 0x01FC5}, - {0x01FD4, 0x01FD5}, - {0x01FDC, 0x01FDC}, - {0x01FF0, 0x01FF1}, - {0x01FF5, 0x01FF5}, - {0x01FFF, 0x01FFF}, - {0x02065, 0x02065}, - {0x02072, 0x02073}, - {0x0208F, 0x0208F}, - {0x0209D, 0x0209F}, - {0x020C1, 0x020CF}, - {0x020F1, 0x020FF}, - {0x0218C, 0x0218F}, - {0x0242A, 0x0243F}, - {0x0244B, 0x0245F}, - {0x02B74, 0x02B75}, - {0x02B96, 0x02B96}, - {0x02CF4, 0x02CF8}, - {0x02D26, 0x02D26}, - {0x02D28, 0x02D2C}, - {0x02D2E, 0x02D2F}, - {0x02D68, 0x02D6E}, - {0x02D71, 0x02D7E}, - {0x02D97, 0x02D9F}, - {0x02DA7, 0x02DA7}, - {0x02DAF, 0x02DAF}, - {0x02DB7, 0x02DB7}, - {0x02DBF, 0x02DBF}, - {0x02DC7, 0x02DC7}, - {0x02DCF, 0x02DCF}, - {0x02DD7, 0x02DD7}, - {0x02DDF, 0x02DDF}, - {0x02E5E, 0x02E7F}, - {0x02E9A, 0x02E9A}, - {0x02EF4, 0x02EFF}, - {0x02FD6, 0x02FEF}, - {0x03040, 0x03040}, - {0x03097, 0x03098}, - {0x03100, 0x03104}, - {0x03130, 0x03130}, - {0x0318F, 0x0318F}, - {0x031E6, 0x031EE}, - {0x0321F, 0x0321F}, - {0x03401, 0x04DBE}, - {0x04E01, 0x09FFE}, - {0x0A48D, 0x0A48F}, - {0x0A4C7, 0x0A4CF}, - {0x0A62C, 0x0A63F}, - {0x0A6F8, 0x0A6FF}, - {0x0A7CE, 0x0A7CF}, - {0x0A7D2, 0x0A7D2}, - {0x0A7D4, 0x0A7D4}, - {0x0A7DD, 0x0A7F1}, - {0x0A82D, 0x0A82F}, - {0x0A83A, 0x0A83F}, - {0x0A878, 0x0A87F}, - {0x0A8C6, 0x0A8CD}, - {0x0A8DA, 0x0A8DF}, - {0x0A954, 0x0A95E}, - {0x0A97D, 0x0A97F}, - {0x0A9CE, 0x0A9CE}, - {0x0A9DA, 0x0A9DD}, - {0x0A9FF, 0x0A9FF}, - {0x0AA37, 0x0AA3F}, - {0x0AA4E, 0x0AA4F}, - {0x0AA5A, 0x0AA5B}, - {0x0AAC3, 0x0AADA}, - {0x0AAF7, 0x0AB00}, - {0x0AB07, 0x0AB08}, - {0x0AB0F, 0x0AB10}, - {0x0AB17, 0x0AB1F}, - {0x0AB27, 0x0AB27}, - {0x0AB2F, 0x0AB2F}, - {0x0AB6C, 0x0AB6F}, - {0x0ABEE, 0x0ABEF}, - {0x0ABFA, 0x0ABFF}, - {0x0AC01, 0x0D7A2}, - {0x0D7A4, 0x0D7AF}, - {0x0D7C7, 0x0D7CA}, - {0x0D7FC, 0x0D7FF}, - {0x0FA6E, 0x0FA6F}, - {0x0FADA, 0x0FAFF}, - {0x0FB07, 0x0FB12}, - {0x0FB18, 0x0FB1C}, - {0x0FB37, 0x0FB37}, - {0x0FB3D, 0x0FB3D}, - {0x0FB3F, 0x0FB3F}, - {0x0FB42, 0x0FB42}, - {0x0FB45, 0x0FB45}, - {0x0FBC3, 0x0FBD2}, - {0x0FD90, 0x0FD91}, - {0x0FDC8, 0x0FDCE}, - {0x0FE1A, 0x0FE1F}, - {0x0FE53, 0x0FE53}, - {0x0FE67, 0x0FE67}, - {0x0FE6C, 0x0FE6F}, - {0x0FE75, 0x0FE75}, - {0x0FEFD, 0x0FEFE}, - {0x0FF00, 0x0FF00}, - {0x0FFBF, 0x0FFC1}, - {0x0FFC8, 0x0FFC9}, - {0x0FFD0, 0x0FFD1}, - {0x0FFD8, 0x0FFD9}, - {0x0FFDD, 0x0FFDF}, - {0x0FFE7, 0x0FFE7}, - {0x0FFEF, 0x0FFF8}, - {0x1000C, 0x1000C}, - {0x10027, 0x10027}, - {0x1003B, 0x1003B}, - {0x1003E, 0x1003E}, - {0x1004E, 0x1004F}, - {0x1005E, 0x1007F}, - {0x100FB, 0x100FF}, - {0x10103, 0x10106}, - {0x10134, 0x10136}, - {0x1018F, 0x1018F}, - {0x1019D, 0x1019F}, - {0x101A1, 0x101CF}, - {0x101FE, 0x1027F}, - {0x1029D, 0x1029F}, - {0x102D1, 0x102DF}, - {0x102FC, 0x102FF}, - {0x10324, 0x1032C}, - {0x1034B, 0x1034F}, - {0x1037B, 0x1037F}, - {0x1039E, 0x1039E}, - {0x103C4, 0x103C7}, - {0x103D6, 0x103FF}, - {0x1049E, 0x1049F}, - {0x104AA, 0x104AF}, - {0x104D4, 0x104D7}, - {0x104FC, 0x104FF}, - {0x10528, 0x1052F}, - {0x10564, 0x1056E}, - {0x1057B, 0x1057B}, - {0x1058B, 0x1058B}, - {0x10593, 0x10593}, - {0x10596, 0x10596}, - {0x105A2, 0x105A2}, - {0x105B2, 0x105B2}, - {0x105BA, 0x105BA}, - {0x105BD, 0x105BF}, - {0x105F4, 0x105FF}, - {0x10737, 0x1073F}, - {0x10756, 0x1075F}, - {0x10768, 0x1077F}, - {0x10786, 0x10786}, - {0x107B1, 0x107B1}, - {0x107BB, 0x107FF}, - {0x10806, 0x10807}, - {0x10809, 0x10809}, - {0x10836, 0x10836}, - {0x10839, 0x1083B}, - {0x1083D, 0x1083E}, - {0x10856, 0x10856}, - {0x1089F, 0x108A6}, - {0x108B0, 0x108DF}, - {0x108F3, 0x108F3}, - {0x108F6, 0x108FA}, - {0x1091C, 0x1091E}, - {0x1093A, 0x1093E}, - {0x10940, 0x1097F}, - {0x109B8, 0x109BB}, - {0x109D0, 0x109D1}, - {0x10A04, 0x10A04}, - {0x10A07, 0x10A0B}, - {0x10A14, 0x10A14}, - {0x10A18, 0x10A18}, - {0x10A36, 0x10A37}, - {0x10A3B, 0x10A3E}, - {0x10A49, 0x10A4F}, - {0x10A59, 0x10A5F}, - {0x10AA0, 0x10ABF}, - {0x10AE7, 0x10AEA}, - {0x10AF7, 0x10AFF}, - {0x10B36, 0x10B38}, - {0x10B56, 0x10B57}, - {0x10B73, 0x10B77}, - {0x10B92, 0x10B98}, - {0x10B9D, 0x10BA8}, - {0x10BB0, 0x10BFF}, - {0x10C49, 0x10C7F}, - {0x10CB3, 0x10CBF}, - {0x10CF3, 0x10CF9}, - {0x10D28, 0x10D2F}, - {0x10D3A, 0x10D3F}, - {0x10D66, 0x10D68}, - {0x10D86, 0x10D8D}, - {0x10D90, 0x10E5F}, - {0x10E7F, 0x10E7F}, - {0x10EAA, 0x10EAA}, - {0x10EAE, 0x10EAF}, - {0x10EB2, 0x10EC1}, - {0x10EC5, 0x10EFB}, - {0x10F28, 0x10F2F}, - {0x10F5A, 0x10F6F}, - {0x10F8A, 0x10FAF}, - {0x10FCC, 0x10FDF}, - {0x10FF7, 0x10FFF}, - {0x1104E, 0x11051}, - {0x11076, 0x1107E}, - {0x110C3, 0x110CC}, - {0x110CE, 0x110CF}, - {0x110E9, 0x110EF}, - {0x110FA, 0x110FF}, - {0x11135, 0x11135}, - {0x11148, 0x1114F}, - {0x11177, 0x1117F}, - {0x111E0, 0x111E0}, - {0x111F5, 0x111FF}, - {0x11212, 0x11212}, - {0x11242, 0x1127F}, - {0x11287, 0x11287}, - {0x11289, 0x11289}, - {0x1128E, 0x1128E}, - {0x1129E, 0x1129E}, - {0x112AA, 0x112AF}, - {0x112EB, 0x112EF}, - {0x112FA, 0x112FF}, - {0x11304, 0x11304}, - {0x1130D, 0x1130E}, - {0x11311, 0x11312}, - {0x11329, 0x11329}, - {0x11331, 0x11331}, - {0x11334, 0x11334}, - {0x1133A, 0x1133A}, - {0x11345, 0x11346}, - {0x11349, 0x1134A}, - {0x1134E, 0x1134F}, - {0x11351, 0x11356}, - {0x11358, 0x1135C}, - {0x11364, 0x11365}, - {0x1136D, 0x1136F}, - {0x11375, 0x1137F}, - {0x1138A, 0x1138A}, - {0x1138C, 0x1138D}, - {0x1138F, 0x1138F}, - {0x113B6, 0x113B6}, - {0x113C1, 0x113C1}, - {0x113C3, 0x113C4}, - {0x113C6, 0x113C6}, - {0x113CB, 0x113CB}, - {0x113D6, 0x113D6}, - {0x113D9, 0x113E0}, - {0x113E3, 0x113FF}, - {0x1145C, 0x1145C}, - {0x11462, 0x1147F}, - {0x114C8, 0x114CF}, - {0x114DA, 0x1157F}, - {0x115B6, 0x115B7}, - {0x115DE, 0x115FF}, - {0x11645, 0x1164F}, - {0x1165A, 0x1165F}, - {0x1166D, 0x1167F}, - {0x116BA, 0x116BF}, - {0x116CA, 0x116CF}, - {0x116E4, 0x116FF}, - {0x1171B, 0x1171C}, - {0x1172C, 0x1172F}, - {0x11747, 0x117FF}, - {0x1183C, 0x1189F}, - {0x118F3, 0x118FE}, - {0x11907, 0x11908}, - {0x1190A, 0x1190B}, - {0x11914, 0x11914}, - {0x11917, 0x11917}, - {0x11936, 0x11936}, - {0x11939, 0x1193A}, - {0x11947, 0x1194F}, - {0x1195A, 0x1199F}, - {0x119A8, 0x119A9}, - {0x119D8, 0x119D9}, - {0x119E5, 0x119FF}, - {0x11A48, 0x11A4F}, - {0x11AA3, 0x11AAF}, - {0x11AF9, 0x11AFF}, - {0x11B0A, 0x11BBF}, - {0x11BE2, 0x11BEF}, - {0x11BFA, 0x11BFF}, - {0x11C09, 0x11C09}, - {0x11C37, 0x11C37}, - {0x11C46, 0x11C4F}, - {0x11C6D, 0x11C6F}, - {0x11C90, 0x11C91}, - {0x11CA8, 0x11CA8}, - {0x11CB7, 0x11CFF}, - {0x11D07, 0x11D07}, - {0x11D0A, 0x11D0A}, - {0x11D37, 0x11D39}, - {0x11D3B, 0x11D3B}, - {0x11D3E, 0x11D3E}, - {0x11D48, 0x11D4F}, - {0x11D5A, 0x11D5F}, - {0x11D66, 0x11D66}, - {0x11D69, 0x11D69}, - {0x11D8F, 0x11D8F}, - {0x11D92, 0x11D92}, - {0x11D99, 0x11D9F}, - {0x11DAA, 0x11EDF}, - {0x11EF9, 0x11EFF}, - {0x11F11, 0x11F11}, - {0x11F3B, 0x11F3D}, - {0x11F5B, 0x11FAF}, - {0x11FB1, 0x11FBF}, - {0x11FF2, 0x11FFE}, - {0x1239A, 0x123FF}, - {0x1246F, 0x1246F}, - {0x12475, 0x1247F}, - {0x12544, 0x12F8F}, - {0x12FF3, 0x12FFF}, - {0x13456, 0x1345F}, - {0x143FB, 0x143FF}, - {0x14647, 0x160FF}, - {0x1613A, 0x167FF}, - {0x16A39, 0x16A3F}, - {0x16A5F, 0x16A5F}, - {0x16A6A, 0x16A6D}, - {0x16ABF, 0x16ABF}, - {0x16ACA, 0x16ACF}, - {0x16AEE, 0x16AEF}, - {0x16AF6, 0x16AFF}, - {0x16B46, 0x16B4F}, - {0x16B5A, 0x16B5A}, - {0x16B62, 0x16B62}, - {0x16B78, 0x16B7C}, - {0x16B90, 0x16D3F}, - {0x16D7A, 0x16E3F}, - {0x16E9B, 0x16EFF}, - {0x16F4B, 0x16F4E}, - {0x16F88, 0x16F8E}, - {0x16FA0, 0x16FDF}, - {0x16FE5, 0x16FEF}, - {0x16FF2, 0x16FFF}, - {0x17001, 0x187F6}, - {0x187F8, 0x187FF}, - {0x18CD6, 0x18CFE}, - {0x18D01, 0x18D07}, - {0x18D09, 0x1AFEF}, - {0x1AFF4, 0x1AFF4}, - {0x1AFFC, 0x1AFFC}, - {0x1AFFF, 0x1AFFF}, - {0x1B123, 0x1B131}, - {0x1B133, 0x1B14F}, - {0x1B153, 0x1B154}, - {0x1B156, 0x1B163}, - {0x1B168, 0x1B16F}, - {0x1B2FC, 0x1BBFF}, - {0x1BC6B, 0x1BC6F}, - {0x1BC7D, 0x1BC7F}, - {0x1BC89, 0x1BC8F}, - {0x1BC9A, 0x1BC9B}, - {0x1BCA4, 0x1CBFF}, - {0x1CCFA, 0x1CCFF}, - {0x1CEB4, 0x1CEFF}, - {0x1CF2E, 0x1CF2F}, - {0x1CF47, 0x1CF4F}, - {0x1CFC4, 0x1CFFF}, - {0x1D0F6, 0x1D0FF}, - {0x1D127, 0x1D128}, - {0x1D1EB, 0x1D1FF}, - {0x1D246, 0x1D2BF}, - {0x1D2D4, 0x1D2DF}, - {0x1D2F4, 0x1D2FF}, - {0x1D357, 0x1D35F}, - {0x1D379, 0x1D3FF}, - {0x1D455, 0x1D455}, - {0x1D49D, 0x1D49D}, - {0x1D4A0, 0x1D4A1}, - {0x1D4A3, 0x1D4A4}, - {0x1D4A7, 0x1D4A8}, - {0x1D4AD, 0x1D4AD}, - {0x1D4BA, 0x1D4BA}, - {0x1D4BC, 0x1D4BC}, - {0x1D4C4, 0x1D4C4}, - {0x1D506, 0x1D506}, - {0x1D50B, 0x1D50C}, - {0x1D515, 0x1D515}, - {0x1D51D, 0x1D51D}, - {0x1D53A, 0x1D53A}, - {0x1D53F, 0x1D53F}, - {0x1D545, 0x1D545}, - {0x1D547, 0x1D549}, - {0x1D551, 0x1D551}, - {0x1D6A6, 0x1D6A7}, - {0x1D7CC, 0x1D7CD}, - {0x1DA8C, 0x1DA9A}, - {0x1DAA0, 0x1DAA0}, - {0x1DAB0, 0x1DEFF}, - {0x1DF1F, 0x1DF24}, - {0x1DF2B, 0x1DFFF}, - {0x1E007, 0x1E007}, - {0x1E019, 0x1E01A}, - {0x1E022, 0x1E022}, - {0x1E025, 0x1E025}, - {0x1E02B, 0x1E02F}, - {0x1E06E, 0x1E08E}, - {0x1E090, 0x1E0FF}, - {0x1E12D, 0x1E12F}, - {0x1E13E, 0x1E13F}, - {0x1E14A, 0x1E14D}, - {0x1E150, 0x1E28F}, - {0x1E2AF, 0x1E2BF}, - {0x1E2FA, 0x1E2FE}, - {0x1E300, 0x1E4CF}, - {0x1E4FA, 0x1E5CF}, - {0x1E5FB, 0x1E5FE}, - {0x1E600, 0x1E7DF}, - {0x1E7E7, 0x1E7E7}, - {0x1E7EC, 0x1E7EC}, - {0x1E7EF, 0x1E7EF}, - {0x1E7FF, 0x1E7FF}, - {0x1E8C5, 0x1E8C6}, - {0x1E8D7, 0x1E8FF}, - {0x1E94C, 0x1E94F}, - {0x1E95A, 0x1E95D}, - {0x1E960, 0x1EC70}, - {0x1ECB5, 0x1ED00}, - {0x1ED3E, 0x1EDFF}, - {0x1EE04, 0x1EE04}, - {0x1EE20, 0x1EE20}, - {0x1EE23, 0x1EE23}, - {0x1EE25, 0x1EE26}, - {0x1EE28, 0x1EE28}, - {0x1EE33, 0x1EE33}, - {0x1EE38, 0x1EE38}, - {0x1EE3A, 0x1EE3A}, - {0x1EE3C, 0x1EE41}, - {0x1EE43, 0x1EE46}, - {0x1EE48, 0x1EE48}, - {0x1EE4A, 0x1EE4A}, - {0x1EE4C, 0x1EE4C}, - {0x1EE50, 0x1EE50}, - {0x1EE53, 0x1EE53}, - {0x1EE55, 0x1EE56}, - {0x1EE58, 0x1EE58}, - {0x1EE5A, 0x1EE5A}, - {0x1EE5C, 0x1EE5C}, - {0x1EE5E, 0x1EE5E}, - {0x1EE60, 0x1EE60}, - {0x1EE63, 0x1EE63}, - {0x1EE65, 0x1EE66}, - {0x1EE6B, 0x1EE6B}, - {0x1EE73, 0x1EE73}, - {0x1EE78, 0x1EE78}, - {0x1EE7D, 0x1EE7D}, - {0x1EE7F, 0x1EE7F}, - {0x1EE8A, 0x1EE8A}, - {0x1EE9C, 0x1EEA0}, - {0x1EEA4, 0x1EEA4}, - {0x1EEAA, 0x1EEAA}, - {0x1EEBC, 0x1EEEF}, - {0x1EEF2, 0x1EFFF}, - {0x1F02C, 0x1F02F}, - {0x1F094, 0x1F09F}, - {0x1F0AF, 0x1F0B0}, - {0x1F0C0, 0x1F0C0}, - {0x1F0D0, 0x1F0D0}, - {0x1F0F6, 0x1F0FF}, - {0x1F1AE, 0x1F1E5}, - {0x1F203, 0x1F20F}, - {0x1F23C, 0x1F23F}, - {0x1F249, 0x1F24F}, - {0x1F252, 0x1F25F}, - {0x1F266, 0x1F2FF}, - {0x1F6D8, 0x1F6DB}, - {0x1F6ED, 0x1F6EF}, - {0x1F6FD, 0x1F6FF}, - {0x1F777, 0x1F77A}, - {0x1F7DA, 0x1F7DF}, - {0x1F7EC, 0x1F7EF}, - {0x1F7F1, 0x1F7FF}, - {0x1F80C, 0x1F80F}, - {0x1F848, 0x1F84F}, - {0x1F85A, 0x1F85F}, - {0x1F888, 0x1F88F}, - {0x1F8AE, 0x1F8AF}, - {0x1F8BC, 0x1F8BF}, - {0x1F8C2, 0x1F8FF}, - {0x1FA54, 0x1FA5F}, - {0x1FA6E, 0x1FA6F}, - {0x1FA7D, 0x1FA7F}, - {0x1FA8A, 0x1FA8E}, - {0x1FAC7, 0x1FACD}, - {0x1FADD, 0x1FADE}, - {0x1FAEA, 0x1FAEF}, - {0x1FAF9, 0x1FAFF}, - {0x1FB93, 0x1FB93}, - {0x1FBFA, 0x1FFFD}, - {0x20001, 0x2A6DE}, - {0x2A6E0, 0x2A6FF}, - {0x2A701, 0x2B738}, - {0x2B73A, 0x2B73F}, - {0x2B741, 0x2B81C}, - {0x2B81E, 0x2B81F}, - {0x2B821, 0x2CEA0}, - {0x2CEA2, 0x2CEAF}, - {0x2CEB1, 0x2EBDF}, - {0x2EBE1, 0x2EBEF}, - {0x2EBF1, 0x2EE5C}, - {0x2EE5E, 0x2F7FF}, - {0x2FA1E, 0x2FFFD}, - {0x30001, 0x31349}, - {0x3134B, 0x3134F}, - {0x31351, 0x323AE}, - {0x323B0, 0x3FFFD}, - {0x40000, 0x4FFFD}, - {0x50000, 0x5FFFD}, - {0x60000, 0x6FFFD}, - {0x70000, 0x7FFFD}, - {0x80000, 0x8FFFD}, - {0x90000, 0x9FFFD}, - {0xA0000, 0xAFFFD}, - {0xB0000, 0xBFFFD}, - {0xC0000, 0xCFFFD}, - {0xD0000, 0xDFFFD}, - {0xE0000, 0xE0000}, - {0xE0002, 0xE001F}, - {0xE0080, 0xE00FF}, - {0xE01F0, 0xEFFFD} -}; + {0x00378, 0x00379}, {0x00380, 0x00383}, {0x0038B, 0x0038B}, {0x0038D, 0x0038D}, {0x003A2, 0x003A2}, + {0x00530, 0x00530}, {0x00557, 0x00558}, {0x0058B, 0x0058C}, {0x00590, 0x00590}, {0x005C8, 0x005CF}, + {0x005EB, 0x005EE}, {0x005F5, 0x005FF}, {0x0070E, 0x0070E}, {0x0074B, 0x0074C}, {0x007B2, 0x007BF}, + {0x007FB, 0x007FC}, {0x0082E, 0x0082F}, {0x0083F, 0x0083F}, {0x0085C, 0x0085D}, {0x0085F, 0x0085F}, + {0x0086B, 0x0086F}, {0x0088F, 0x0088F}, {0x00892, 0x00896}, {0x00984, 0x00984}, {0x0098D, 0x0098E}, + {0x00991, 0x00992}, {0x009A9, 0x009A9}, {0x009B1, 0x009B1}, {0x009B3, 0x009B5}, {0x009BA, 0x009BB}, + {0x009C5, 0x009C6}, {0x009C9, 0x009CA}, {0x009CF, 0x009D6}, {0x009D8, 0x009DB}, {0x009DE, 0x009DE}, + {0x009E4, 0x009E5}, {0x009FF, 0x00A00}, {0x00A04, 0x00A04}, {0x00A0B, 0x00A0E}, {0x00A11, 0x00A12}, + {0x00A29, 0x00A29}, {0x00A31, 0x00A31}, {0x00A34, 0x00A34}, {0x00A37, 0x00A37}, {0x00A3A, 0x00A3B}, + {0x00A3D, 0x00A3D}, {0x00A43, 0x00A46}, {0x00A49, 0x00A4A}, {0x00A4E, 0x00A50}, {0x00A52, 0x00A58}, + {0x00A5D, 0x00A5D}, {0x00A5F, 0x00A65}, {0x00A77, 0x00A80}, {0x00A84, 0x00A84}, {0x00A8E, 0x00A8E}, + {0x00A92, 0x00A92}, {0x00AA9, 0x00AA9}, {0x00AB1, 0x00AB1}, {0x00AB4, 0x00AB4}, {0x00ABA, 0x00ABB}, + {0x00AC6, 0x00AC6}, {0x00ACA, 0x00ACA}, {0x00ACE, 0x00ACF}, {0x00AD1, 0x00ADF}, {0x00AE4, 0x00AE5}, + {0x00AF2, 0x00AF8}, {0x00B00, 0x00B00}, {0x00B04, 0x00B04}, {0x00B0D, 0x00B0E}, {0x00B11, 0x00B12}, + {0x00B29, 0x00B29}, {0x00B31, 0x00B31}, {0x00B34, 0x00B34}, {0x00B3A, 0x00B3B}, {0x00B45, 0x00B46}, + {0x00B49, 0x00B4A}, {0x00B4E, 0x00B54}, {0x00B58, 0x00B5B}, {0x00B5E, 0x00B5E}, {0x00B64, 0x00B65}, + {0x00B78, 0x00B81}, {0x00B84, 0x00B84}, {0x00B8B, 0x00B8D}, {0x00B91, 0x00B91}, {0x00B96, 0x00B98}, + {0x00B9B, 0x00B9B}, {0x00B9D, 0x00B9D}, {0x00BA0, 0x00BA2}, {0x00BA5, 0x00BA7}, {0x00BAB, 0x00BAD}, + {0x00BBA, 0x00BBD}, {0x00BC3, 0x00BC5}, {0x00BC9, 0x00BC9}, {0x00BCE, 0x00BCF}, {0x00BD1, 0x00BD6}, + {0x00BD8, 0x00BE5}, {0x00BFB, 0x00BFF}, {0x00C0D, 0x00C0D}, {0x00C11, 0x00C11}, {0x00C29, 0x00C29}, + {0x00C3A, 0x00C3B}, {0x00C45, 0x00C45}, {0x00C49, 0x00C49}, {0x00C4E, 0x00C54}, {0x00C57, 0x00C57}, + {0x00C5B, 0x00C5C}, {0x00C5E, 0x00C5F}, {0x00C64, 0x00C65}, {0x00C70, 0x00C76}, {0x00C8D, 0x00C8D}, + {0x00C91, 0x00C91}, {0x00CA9, 0x00CA9}, {0x00CB4, 0x00CB4}, {0x00CBA, 0x00CBB}, {0x00CC5, 0x00CC5}, + {0x00CC9, 0x00CC9}, {0x00CCE, 0x00CD4}, {0x00CD7, 0x00CDC}, {0x00CDF, 0x00CDF}, {0x00CE4, 0x00CE5}, + {0x00CF0, 0x00CF0}, {0x00CF4, 0x00CFF}, {0x00D0D, 0x00D0D}, {0x00D11, 0x00D11}, {0x00D45, 0x00D45}, + {0x00D49, 0x00D49}, {0x00D50, 0x00D53}, {0x00D64, 0x00D65}, {0x00D80, 0x00D80}, {0x00D84, 0x00D84}, + {0x00D97, 0x00D99}, {0x00DB2, 0x00DB2}, {0x00DBC, 0x00DBC}, {0x00DBE, 0x00DBF}, {0x00DC7, 0x00DC9}, + {0x00DCB, 0x00DCE}, {0x00DD5, 0x00DD5}, {0x00DD7, 0x00DD7}, {0x00DE0, 0x00DE5}, {0x00DF0, 0x00DF1}, + {0x00DF5, 0x00E00}, {0x00E3B, 0x00E3E}, {0x00E5C, 0x00E80}, {0x00E83, 0x00E83}, {0x00E85, 0x00E85}, + {0x00E8B, 0x00E8B}, {0x00EA4, 0x00EA4}, {0x00EA6, 0x00EA6}, {0x00EBE, 0x00EBF}, {0x00EC5, 0x00EC5}, + {0x00EC7, 0x00EC7}, {0x00ECF, 0x00ECF}, {0x00EDA, 0x00EDB}, {0x00EE0, 0x00EFF}, {0x00F48, 0x00F48}, + {0x00F6D, 0x00F70}, {0x00F98, 0x00F98}, {0x00FBD, 0x00FBD}, {0x00FCD, 0x00FCD}, {0x00FDB, 0x00FFF}, + {0x010C6, 0x010C6}, {0x010C8, 0x010CC}, {0x010CE, 0x010CF}, {0x01249, 0x01249}, {0x0124E, 0x0124F}, + {0x01257, 0x01257}, {0x01259, 0x01259}, {0x0125E, 0x0125F}, {0x01289, 0x01289}, {0x0128E, 0x0128F}, + {0x012B1, 0x012B1}, {0x012B6, 0x012B7}, {0x012BF, 0x012BF}, {0x012C1, 0x012C1}, {0x012C6, 0x012C7}, + {0x012D7, 0x012D7}, {0x01311, 0x01311}, {0x01316, 0x01317}, {0x0135B, 0x0135C}, {0x0137D, 0x0137F}, + {0x0139A, 0x0139F}, {0x013F6, 0x013F7}, {0x013FE, 0x013FF}, {0x0169D, 0x0169F}, {0x016F9, 0x016FF}, + {0x01716, 0x0171E}, {0x01737, 0x0173F}, {0x01754, 0x0175F}, {0x0176D, 0x0176D}, {0x01771, 0x01771}, + {0x01774, 0x0177F}, {0x017DE, 0x017DF}, {0x017EA, 0x017EF}, {0x017FA, 0x017FF}, {0x0181A, 0x0181F}, + {0x01879, 0x0187F}, {0x018AB, 0x018AF}, {0x018F6, 0x018FF}, {0x0191F, 0x0191F}, {0x0192C, 0x0192F}, + {0x0193C, 0x0193F}, {0x01941, 0x01943}, {0x0196E, 0x0196F}, {0x01975, 0x0197F}, {0x019AC, 0x019AF}, + {0x019CA, 0x019CF}, {0x019DB, 0x019DD}, {0x01A1C, 0x01A1D}, {0x01A5F, 0x01A5F}, {0x01A7D, 0x01A7E}, + {0x01A8A, 0x01A8F}, {0x01A9A, 0x01A9F}, {0x01AAE, 0x01AAF}, {0x01ACF, 0x01AFF}, {0x01B4D, 0x01B4D}, + {0x01BF4, 0x01BFB}, {0x01C38, 0x01C3A}, {0x01C4A, 0x01C4C}, {0x01C8B, 0x01C8F}, {0x01CBB, 0x01CBC}, + {0x01CC8, 0x01CCF}, {0x01CFB, 0x01CFF}, {0x01F16, 0x01F17}, {0x01F1E, 0x01F1F}, {0x01F46, 0x01F47}, + {0x01F4E, 0x01F4F}, {0x01F58, 0x01F58}, {0x01F5A, 0x01F5A}, {0x01F5C, 0x01F5C}, {0x01F5E, 0x01F5E}, + {0x01F7E, 0x01F7F}, {0x01FB5, 0x01FB5}, {0x01FC5, 0x01FC5}, {0x01FD4, 0x01FD5}, {0x01FDC, 0x01FDC}, + {0x01FF0, 0x01FF1}, {0x01FF5, 0x01FF5}, {0x01FFF, 0x01FFF}, {0x02065, 0x02065}, {0x02072, 0x02073}, + {0x0208F, 0x0208F}, {0x0209D, 0x0209F}, {0x020C1, 0x020CF}, {0x020F1, 0x020FF}, {0x0218C, 0x0218F}, + {0x0242A, 0x0243F}, {0x0244B, 0x0245F}, {0x02B74, 0x02B75}, {0x02B96, 0x02B96}, {0x02CF4, 0x02CF8}, + {0x02D26, 0x02D26}, {0x02D28, 0x02D2C}, {0x02D2E, 0x02D2F}, {0x02D68, 0x02D6E}, {0x02D71, 0x02D7E}, + {0x02D97, 0x02D9F}, {0x02DA7, 0x02DA7}, {0x02DAF, 0x02DAF}, {0x02DB7, 0x02DB7}, {0x02DBF, 0x02DBF}, + {0x02DC7, 0x02DC7}, {0x02DCF, 0x02DCF}, {0x02DD7, 0x02DD7}, {0x02DDF, 0x02DDF}, {0x02E5E, 0x02E7F}, + {0x02E9A, 0x02E9A}, {0x02EF4, 0x02EFF}, {0x02FD6, 0x02FEF}, {0x03040, 0x03040}, {0x03097, 0x03098}, + {0x03100, 0x03104}, {0x03130, 0x03130}, {0x0318F, 0x0318F}, {0x031E6, 0x031EE}, {0x0321F, 0x0321F}, + {0x03401, 0x04DBE}, {0x04E01, 0x09FFE}, {0x0A48D, 0x0A48F}, {0x0A4C7, 0x0A4CF}, {0x0A62C, 0x0A63F}, + {0x0A6F8, 0x0A6FF}, {0x0A7CE, 0x0A7CF}, {0x0A7D2, 0x0A7D2}, {0x0A7D4, 0x0A7D4}, {0x0A7DD, 0x0A7F1}, + {0x0A82D, 0x0A82F}, {0x0A83A, 0x0A83F}, {0x0A878, 0x0A87F}, {0x0A8C6, 0x0A8CD}, {0x0A8DA, 0x0A8DF}, + {0x0A954, 0x0A95E}, {0x0A97D, 0x0A97F}, {0x0A9CE, 0x0A9CE}, {0x0A9DA, 0x0A9DD}, {0x0A9FF, 0x0A9FF}, + {0x0AA37, 0x0AA3F}, {0x0AA4E, 0x0AA4F}, {0x0AA5A, 0x0AA5B}, {0x0AAC3, 0x0AADA}, {0x0AAF7, 0x0AB00}, + {0x0AB07, 0x0AB08}, {0x0AB0F, 0x0AB10}, {0x0AB17, 0x0AB1F}, {0x0AB27, 0x0AB27}, {0x0AB2F, 0x0AB2F}, + {0x0AB6C, 0x0AB6F}, {0x0ABEE, 0x0ABEF}, {0x0ABFA, 0x0ABFF}, {0x0AC01, 0x0D7A2}, {0x0D7A4, 0x0D7AF}, + {0x0D7C7, 0x0D7CA}, {0x0D7FC, 0x0D7FF}, {0x0FA6E, 0x0FA6F}, {0x0FADA, 0x0FAFF}, {0x0FB07, 0x0FB12}, + {0x0FB18, 0x0FB1C}, {0x0FB37, 0x0FB37}, {0x0FB3D, 0x0FB3D}, {0x0FB3F, 0x0FB3F}, {0x0FB42, 0x0FB42}, + {0x0FB45, 0x0FB45}, {0x0FBC3, 0x0FBD2}, {0x0FD90, 0x0FD91}, {0x0FDC8, 0x0FDCE}, {0x0FE1A, 0x0FE1F}, + {0x0FE53, 0x0FE53}, {0x0FE67, 0x0FE67}, {0x0FE6C, 0x0FE6F}, {0x0FE75, 0x0FE75}, {0x0FEFD, 0x0FEFE}, + {0x0FF00, 0x0FF00}, {0x0FFBF, 0x0FFC1}, {0x0FFC8, 0x0FFC9}, {0x0FFD0, 0x0FFD1}, {0x0FFD8, 0x0FFD9}, + {0x0FFDD, 0x0FFDF}, {0x0FFE7, 0x0FFE7}, {0x0FFEF, 0x0FFF8}, {0x1000C, 0x1000C}, {0x10027, 0x10027}, + {0x1003B, 0x1003B}, {0x1003E, 0x1003E}, {0x1004E, 0x1004F}, {0x1005E, 0x1007F}, {0x100FB, 0x100FF}, + {0x10103, 0x10106}, {0x10134, 0x10136}, {0x1018F, 0x1018F}, {0x1019D, 0x1019F}, {0x101A1, 0x101CF}, + {0x101FE, 0x1027F}, {0x1029D, 0x1029F}, {0x102D1, 0x102DF}, {0x102FC, 0x102FF}, {0x10324, 0x1032C}, + {0x1034B, 0x1034F}, {0x1037B, 0x1037F}, {0x1039E, 0x1039E}, {0x103C4, 0x103C7}, {0x103D6, 0x103FF}, + {0x1049E, 0x1049F}, {0x104AA, 0x104AF}, {0x104D4, 0x104D7}, {0x104FC, 0x104FF}, {0x10528, 0x1052F}, + {0x10564, 0x1056E}, {0x1057B, 0x1057B}, {0x1058B, 0x1058B}, {0x10593, 0x10593}, {0x10596, 0x10596}, + {0x105A2, 0x105A2}, {0x105B2, 0x105B2}, {0x105BA, 0x105BA}, {0x105BD, 0x105BF}, {0x105F4, 0x105FF}, + {0x10737, 0x1073F}, {0x10756, 0x1075F}, {0x10768, 0x1077F}, {0x10786, 0x10786}, {0x107B1, 0x107B1}, + {0x107BB, 0x107FF}, {0x10806, 0x10807}, {0x10809, 0x10809}, {0x10836, 0x10836}, {0x10839, 0x1083B}, + {0x1083D, 0x1083E}, {0x10856, 0x10856}, {0x1089F, 0x108A6}, {0x108B0, 0x108DF}, {0x108F3, 0x108F3}, + {0x108F6, 0x108FA}, {0x1091C, 0x1091E}, {0x1093A, 0x1093E}, {0x10940, 0x1097F}, {0x109B8, 0x109BB}, + {0x109D0, 0x109D1}, {0x10A04, 0x10A04}, {0x10A07, 0x10A0B}, {0x10A14, 0x10A14}, {0x10A18, 0x10A18}, + {0x10A36, 0x10A37}, {0x10A3B, 0x10A3E}, {0x10A49, 0x10A4F}, {0x10A59, 0x10A5F}, {0x10AA0, 0x10ABF}, + {0x10AE7, 0x10AEA}, {0x10AF7, 0x10AFF}, {0x10B36, 0x10B38}, {0x10B56, 0x10B57}, {0x10B73, 0x10B77}, + {0x10B92, 0x10B98}, {0x10B9D, 0x10BA8}, {0x10BB0, 0x10BFF}, {0x10C49, 0x10C7F}, {0x10CB3, 0x10CBF}, + {0x10CF3, 0x10CF9}, {0x10D28, 0x10D2F}, {0x10D3A, 0x10D3F}, {0x10D66, 0x10D68}, {0x10D86, 0x10D8D}, + {0x10D90, 0x10E5F}, {0x10E7F, 0x10E7F}, {0x10EAA, 0x10EAA}, {0x10EAE, 0x10EAF}, {0x10EB2, 0x10EC1}, + {0x10EC5, 0x10EFB}, {0x10F28, 0x10F2F}, {0x10F5A, 0x10F6F}, {0x10F8A, 0x10FAF}, {0x10FCC, 0x10FDF}, + {0x10FF7, 0x10FFF}, {0x1104E, 0x11051}, {0x11076, 0x1107E}, {0x110C3, 0x110CC}, {0x110CE, 0x110CF}, + {0x110E9, 0x110EF}, {0x110FA, 0x110FF}, {0x11135, 0x11135}, {0x11148, 0x1114F}, {0x11177, 0x1117F}, + {0x111E0, 0x111E0}, {0x111F5, 0x111FF}, {0x11212, 0x11212}, {0x11242, 0x1127F}, {0x11287, 0x11287}, + {0x11289, 0x11289}, {0x1128E, 0x1128E}, {0x1129E, 0x1129E}, {0x112AA, 0x112AF}, {0x112EB, 0x112EF}, + {0x112FA, 0x112FF}, {0x11304, 0x11304}, {0x1130D, 0x1130E}, {0x11311, 0x11312}, {0x11329, 0x11329}, + {0x11331, 0x11331}, {0x11334, 0x11334}, {0x1133A, 0x1133A}, {0x11345, 0x11346}, {0x11349, 0x1134A}, + {0x1134E, 0x1134F}, {0x11351, 0x11356}, {0x11358, 0x1135C}, {0x11364, 0x11365}, {0x1136D, 0x1136F}, + {0x11375, 0x1137F}, {0x1138A, 0x1138A}, {0x1138C, 0x1138D}, {0x1138F, 0x1138F}, {0x113B6, 0x113B6}, + {0x113C1, 0x113C1}, {0x113C3, 0x113C4}, {0x113C6, 0x113C6}, {0x113CB, 0x113CB}, {0x113D6, 0x113D6}, + {0x113D9, 0x113E0}, {0x113E3, 0x113FF}, {0x1145C, 0x1145C}, {0x11462, 0x1147F}, {0x114C8, 0x114CF}, + {0x114DA, 0x1157F}, {0x115B6, 0x115B7}, {0x115DE, 0x115FF}, {0x11645, 0x1164F}, {0x1165A, 0x1165F}, + {0x1166D, 0x1167F}, {0x116BA, 0x116BF}, {0x116CA, 0x116CF}, {0x116E4, 0x116FF}, {0x1171B, 0x1171C}, + {0x1172C, 0x1172F}, {0x11747, 0x117FF}, {0x1183C, 0x1189F}, {0x118F3, 0x118FE}, {0x11907, 0x11908}, + {0x1190A, 0x1190B}, {0x11914, 0x11914}, {0x11917, 0x11917}, {0x11936, 0x11936}, {0x11939, 0x1193A}, + {0x11947, 0x1194F}, {0x1195A, 0x1199F}, {0x119A8, 0x119A9}, {0x119D8, 0x119D9}, {0x119E5, 0x119FF}, + {0x11A48, 0x11A4F}, {0x11AA3, 0x11AAF}, {0x11AF9, 0x11AFF}, {0x11B0A, 0x11BBF}, {0x11BE2, 0x11BEF}, + {0x11BFA, 0x11BFF}, {0x11C09, 0x11C09}, {0x11C37, 0x11C37}, {0x11C46, 0x11C4F}, {0x11C6D, 0x11C6F}, + {0x11C90, 0x11C91}, {0x11CA8, 0x11CA8}, {0x11CB7, 0x11CFF}, {0x11D07, 0x11D07}, {0x11D0A, 0x11D0A}, + {0x11D37, 0x11D39}, {0x11D3B, 0x11D3B}, {0x11D3E, 0x11D3E}, {0x11D48, 0x11D4F}, {0x11D5A, 0x11D5F}, + {0x11D66, 0x11D66}, {0x11D69, 0x11D69}, {0x11D8F, 0x11D8F}, {0x11D92, 0x11D92}, {0x11D99, 0x11D9F}, + {0x11DAA, 0x11EDF}, {0x11EF9, 0x11EFF}, {0x11F11, 0x11F11}, {0x11F3B, 0x11F3D}, {0x11F5B, 0x11FAF}, + {0x11FB1, 0x11FBF}, {0x11FF2, 0x11FFE}, {0x1239A, 0x123FF}, {0x1246F, 0x1246F}, {0x12475, 0x1247F}, + {0x12544, 0x12F8F}, {0x12FF3, 0x12FFF}, {0x13456, 0x1345F}, {0x143FB, 0x143FF}, {0x14647, 0x160FF}, + {0x1613A, 0x167FF}, {0x16A39, 0x16A3F}, {0x16A5F, 0x16A5F}, {0x16A6A, 0x16A6D}, {0x16ABF, 0x16ABF}, + {0x16ACA, 0x16ACF}, {0x16AEE, 0x16AEF}, {0x16AF6, 0x16AFF}, {0x16B46, 0x16B4F}, {0x16B5A, 0x16B5A}, + {0x16B62, 0x16B62}, {0x16B78, 0x16B7C}, {0x16B90, 0x16D3F}, {0x16D7A, 0x16E3F}, {0x16E9B, 0x16EFF}, + {0x16F4B, 0x16F4E}, {0x16F88, 0x16F8E}, {0x16FA0, 0x16FDF}, {0x16FE5, 0x16FEF}, {0x16FF2, 0x16FFF}, + {0x17001, 0x187F6}, {0x187F8, 0x187FF}, {0x18CD6, 0x18CFE}, {0x18D01, 0x18D07}, {0x18D09, 0x1AFEF}, + {0x1AFF4, 0x1AFF4}, {0x1AFFC, 0x1AFFC}, {0x1AFFF, 0x1AFFF}, {0x1B123, 0x1B131}, {0x1B133, 0x1B14F}, + {0x1B153, 0x1B154}, {0x1B156, 0x1B163}, {0x1B168, 0x1B16F}, {0x1B2FC, 0x1BBFF}, {0x1BC6B, 0x1BC6F}, + {0x1BC7D, 0x1BC7F}, {0x1BC89, 0x1BC8F}, {0x1BC9A, 0x1BC9B}, {0x1BCA4, 0x1CBFF}, {0x1CCFA, 0x1CCFF}, + {0x1CEB4, 0x1CEFF}, {0x1CF2E, 0x1CF2F}, {0x1CF47, 0x1CF4F}, {0x1CFC4, 0x1CFFF}, {0x1D0F6, 0x1D0FF}, + {0x1D127, 0x1D128}, {0x1D1EB, 0x1D1FF}, {0x1D246, 0x1D2BF}, {0x1D2D4, 0x1D2DF}, {0x1D2F4, 0x1D2FF}, + {0x1D357, 0x1D35F}, {0x1D379, 0x1D3FF}, {0x1D455, 0x1D455}, {0x1D49D, 0x1D49D}, {0x1D4A0, 0x1D4A1}, + {0x1D4A3, 0x1D4A4}, {0x1D4A7, 0x1D4A8}, {0x1D4AD, 0x1D4AD}, {0x1D4BA, 0x1D4BA}, {0x1D4BC, 0x1D4BC}, + {0x1D4C4, 0x1D4C4}, {0x1D506, 0x1D506}, {0x1D50B, 0x1D50C}, {0x1D515, 0x1D515}, {0x1D51D, 0x1D51D}, + {0x1D53A, 0x1D53A}, {0x1D53F, 0x1D53F}, {0x1D545, 0x1D545}, {0x1D547, 0x1D549}, {0x1D551, 0x1D551}, + {0x1D6A6, 0x1D6A7}, {0x1D7CC, 0x1D7CD}, {0x1DA8C, 0x1DA9A}, {0x1DAA0, 0x1DAA0}, {0x1DAB0, 0x1DEFF}, + {0x1DF1F, 0x1DF24}, {0x1DF2B, 0x1DFFF}, {0x1E007, 0x1E007}, {0x1E019, 0x1E01A}, {0x1E022, 0x1E022}, + {0x1E025, 0x1E025}, {0x1E02B, 0x1E02F}, {0x1E06E, 0x1E08E}, {0x1E090, 0x1E0FF}, {0x1E12D, 0x1E12F}, + {0x1E13E, 0x1E13F}, {0x1E14A, 0x1E14D}, {0x1E150, 0x1E28F}, {0x1E2AF, 0x1E2BF}, {0x1E2FA, 0x1E2FE}, + {0x1E300, 0x1E4CF}, {0x1E4FA, 0x1E5CF}, {0x1E5FB, 0x1E5FE}, {0x1E600, 0x1E7DF}, {0x1E7E7, 0x1E7E7}, + {0x1E7EC, 0x1E7EC}, {0x1E7EF, 0x1E7EF}, {0x1E7FF, 0x1E7FF}, {0x1E8C5, 0x1E8C6}, {0x1E8D7, 0x1E8FF}, + {0x1E94C, 0x1E94F}, {0x1E95A, 0x1E95D}, {0x1E960, 0x1EC70}, {0x1ECB5, 0x1ED00}, {0x1ED3E, 0x1EDFF}, + {0x1EE04, 0x1EE04}, {0x1EE20, 0x1EE20}, {0x1EE23, 0x1EE23}, {0x1EE25, 0x1EE26}, {0x1EE28, 0x1EE28}, + {0x1EE33, 0x1EE33}, {0x1EE38, 0x1EE38}, {0x1EE3A, 0x1EE3A}, {0x1EE3C, 0x1EE41}, {0x1EE43, 0x1EE46}, + {0x1EE48, 0x1EE48}, {0x1EE4A, 0x1EE4A}, {0x1EE4C, 0x1EE4C}, {0x1EE50, 0x1EE50}, {0x1EE53, 0x1EE53}, + {0x1EE55, 0x1EE56}, {0x1EE58, 0x1EE58}, {0x1EE5A, 0x1EE5A}, {0x1EE5C, 0x1EE5C}, {0x1EE5E, 0x1EE5E}, + {0x1EE60, 0x1EE60}, {0x1EE63, 0x1EE63}, {0x1EE65, 0x1EE66}, {0x1EE6B, 0x1EE6B}, {0x1EE73, 0x1EE73}, + {0x1EE78, 0x1EE78}, {0x1EE7D, 0x1EE7D}, {0x1EE7F, 0x1EE7F}, {0x1EE8A, 0x1EE8A}, {0x1EE9C, 0x1EEA0}, + {0x1EEA4, 0x1EEA4}, {0x1EEAA, 0x1EEAA}, {0x1EEBC, 0x1EEEF}, {0x1EEF2, 0x1EFFF}, {0x1F02C, 0x1F02F}, + {0x1F094, 0x1F09F}, {0x1F0AF, 0x1F0B0}, {0x1F0C0, 0x1F0C0}, {0x1F0D0, 0x1F0D0}, {0x1F0F6, 0x1F0FF}, + {0x1F1AE, 0x1F1E5}, {0x1F203, 0x1F20F}, {0x1F23C, 0x1F23F}, {0x1F249, 0x1F24F}, {0x1F252, 0x1F25F}, + {0x1F266, 0x1F2FF}, {0x1F6D8, 0x1F6DB}, {0x1F6ED, 0x1F6EF}, {0x1F6FD, 0x1F6FF}, {0x1F777, 0x1F77A}, + {0x1F7DA, 0x1F7DF}, {0x1F7EC, 0x1F7EF}, {0x1F7F1, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, + {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8AF}, {0x1F8BC, 0x1F8BF}, {0x1F8C2, 0x1F8FF}, + {0x1FA54, 0x1FA5F}, {0x1FA6E, 0x1FA6F}, {0x1FA7D, 0x1FA7F}, {0x1FA8A, 0x1FA8E}, {0x1FAC7, 0x1FACD}, + {0x1FADD, 0x1FADE}, {0x1FAEA, 0x1FAEF}, {0x1FAF9, 0x1FAFF}, {0x1FB93, 0x1FB93}, {0x1FBFA, 0x1FFFD}, + {0x20001, 0x2A6DE}, {0x2A6E0, 0x2A6FF}, {0x2A701, 0x2B738}, {0x2B73A, 0x2B73F}, {0x2B741, 0x2B81C}, + {0x2B81E, 0x2B81F}, {0x2B821, 0x2CEA0}, {0x2CEA2, 0x2CEAF}, {0x2CEB1, 0x2EBDF}, {0x2EBE1, 0x2EBEF}, + {0x2EBF1, 0x2EE5C}, {0x2EE5E, 0x2F7FF}, {0x2FA1E, 0x2FFFD}, {0x30001, 0x31349}, {0x3134B, 0x3134F}, + {0x31351, 0x323AE}, {0x323B0, 0x3FFFD}, {0x40000, 0x4FFFD}, {0x50000, 0x5FFFD}, {0x60000, 0x6FFFD}, + {0x70000, 0x7FFFD}, {0x80000, 0x8FFFD}, {0x90000, 0x9FFFD}, {0xA0000, 0xAFFFD}, {0xB0000, 0xBFFFD}, + {0xC0000, 0xCFFFD}, {0xD0000, 0xDFFFD}, {0xE0000, 0xE0000}, {0xE0002, 0xE001F}, {0xE0080, 0xE00FF}, + {0xE01F0, 0xEFFFD}}; /* Non-characters. */ static const struct widechar_range widechar_nonchar_table[] = { @@ -1450,88 +362,36 @@ static const struct widechar_range widechar_nonchar_table[] = { {0xDFFFE, 0xDFFFF}, {0xEFFFE, 0xEFFFF}, {0xFFFFE, 0xFFFFF}, - {0x10FFFE, 0x10FFFF} -}; + {0x10FFFE, 0x10FFFF}}; /* Characters that were widened from width 1 to 2 in Unicode 9. */ static const struct widechar_range widechar_widened_table[] = { - {0x0231A, 0x0231B}, - {0x023E9, 0x023EC}, - {0x023F0, 0x023F0}, - {0x023F3, 0x023F3}, - {0x025FD, 0x025FE}, - {0x02614, 0x02615}, - {0x02648, 0x02653}, - {0x0267F, 0x0267F}, - {0x02693, 0x02693}, - {0x026A1, 0x026A1}, - {0x026AA, 0x026AB}, - {0x026BD, 0x026BE}, - {0x026C4, 0x026C5}, - {0x026CE, 0x026CE}, - {0x026D4, 0x026D4}, - {0x026EA, 0x026EA}, - {0x026F2, 0x026F3}, - {0x026F5, 0x026F5}, - {0x026FA, 0x026FA}, - {0x026FD, 0x026FD}, - {0x02705, 0x02705}, - {0x0270A, 0x0270B}, - {0x02728, 0x02728}, - {0x0274C, 0x0274C}, - {0x0274E, 0x0274E}, - {0x02753, 0x02755}, - {0x02757, 0x02757}, - {0x02795, 0x02797}, - {0x027B0, 0x027B0}, - {0x027BF, 0x027BF}, - {0x02B1B, 0x02B1C}, - {0x02B50, 0x02B50}, - {0x02B55, 0x02B55}, - {0x1F004, 0x1F004}, - {0x1F0CF, 0x1F0CF}, - {0x1F18E, 0x1F18E}, - {0x1F191, 0x1F19A}, - {0x1F201, 0x1F201}, - {0x1F21A, 0x1F21A}, - {0x1F22F, 0x1F22F}, - {0x1F232, 0x1F236}, - {0x1F238, 0x1F23A}, - {0x1F250, 0x1F251}, - {0x1F300, 0x1F320}, - {0x1F32D, 0x1F335}, - {0x1F337, 0x1F37C}, - {0x1F37E, 0x1F393}, - {0x1F3A0, 0x1F3CA}, - {0x1F3CF, 0x1F3D3}, - {0x1F3E0, 0x1F3F0}, - {0x1F3F4, 0x1F3F4}, - {0x1F3F8, 0x1F43E}, - {0x1F440, 0x1F440}, - {0x1F442, 0x1F4FC}, - {0x1F4FF, 0x1F53D}, - {0x1F54B, 0x1F54E}, - {0x1F550, 0x1F567}, - {0x1F595, 0x1F596}, - {0x1F5FB, 0x1F64F}, - {0x1F680, 0x1F6C5}, - {0x1F6CC, 0x1F6CC}, - {0x1F6D0, 0x1F6D0}, - {0x1F6EB, 0x1F6EC}, - {0x1F910, 0x1F918}, - {0x1F980, 0x1F984}, - {0x1F9C0, 0x1F9C0} -}; + {0x0231A, 0x0231B}, {0x023E9, 0x023EC}, {0x023F0, 0x023F0}, {0x023F3, 0x023F3}, {0x025FD, 0x025FE}, + {0x02614, 0x02615}, {0x02648, 0x02653}, {0x0267F, 0x0267F}, {0x02693, 0x02693}, {0x026A1, 0x026A1}, + {0x026AA, 0x026AB}, {0x026BD, 0x026BE}, {0x026C4, 0x026C5}, {0x026CE, 0x026CE}, {0x026D4, 0x026D4}, + {0x026EA, 0x026EA}, {0x026F2, 0x026F3}, {0x026F5, 0x026F5}, {0x026FA, 0x026FA}, {0x026FD, 0x026FD}, + {0x02705, 0x02705}, {0x0270A, 0x0270B}, {0x02728, 0x02728}, {0x0274C, 0x0274C}, {0x0274E, 0x0274E}, + {0x02753, 0x02755}, {0x02757, 0x02757}, {0x02795, 0x02797}, {0x027B0, 0x027B0}, {0x027BF, 0x027BF}, + {0x02B1B, 0x02B1C}, {0x02B50, 0x02B50}, {0x02B55, 0x02B55}, {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, + {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, {0x1F201, 0x1F201}, {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, + {0x1F232, 0x1F236}, {0x1F238, 0x1F23A}, {0x1F250, 0x1F251}, {0x1F300, 0x1F320}, {0x1F32D, 0x1F335}, + {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, + {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, + {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567}, {0x1F595, 0x1F596}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, + {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D0}, {0x1F6EB, 0x1F6EC}, {0x1F910, 0x1F918}, {0x1F980, 0x1F984}, + {0x1F9C0, 0x1F9C0}}; template -bool widechar_in_table(const Collection &arr, uint32_t c) { - auto where = std::lower_bound(std::begin(arr), std::end(arr), c, - [](widechar_range p, uint32_t c) { return p.hi < c; }); +bool widechar_in_table(const Collection & arr, uint32_t c) +{ + auto where = + std::lower_bound(std::begin(arr), std::end(arr), c, [](widechar_range p, uint32_t c) { return p.hi < c; }); return where != std::end(arr) && where->lo <= c; } /* Return the width of character c, or a special negative value. */ -int widechar_wcwidth(uint32_t c) { +int widechar_wcwidth(uint32_t c) +{ if (widechar_in_table(widechar_ascii_table, c)) return 1; if (widechar_in_table(widechar_private_table, c)) diff --git a/src/libutil/windows/environment-variables.cc b/src/libutil/windows/environment-variables.cc index d7cc7b488..c76c12345 100644 --- a/src/libutil/windows/environment-variables.cc +++ b/src/libutil/windows/environment-variables.cc @@ -45,5 +45,5 @@ int setEnvOs(const OsString & name, const OsString & value) return -SetEnvironmentVariableW(name.c_str(), value.c_str()); } -} +} // namespace nix #endif diff --git a/src/libutil/windows/file-descriptor.cc b/src/libutil/windows/file-descriptor.cc index 03d68232c..3c3e7ea45 100644 --- a/src/libutil/windows/file-descriptor.cc +++ b/src/libutil/windows/file-descriptor.cc @@ -6,12 +6,12 @@ #include "nix/util/file-path.hh" #ifdef _WIN32 -#include -#include -#include -#include -#define WIN32_LEAN_AND_MEAN -#include +# include +# include +# include +# include +# define WIN32_LEAN_AND_MEAN +# include namespace nix { @@ -26,7 +26,6 @@ std::string readFile(HANDLE handle) return drainFD(handle, true, li.QuadPart); } - void readFull(HANDLE handle, char * buf, size_t count) { while (count) { @@ -34,34 +33,34 @@ void readFull(HANDLE handle, char * buf, size_t count) DWORD res; if (!ReadFile(handle, (char *) buf, count, &res, NULL)) throw WinError("%s:%d reading from file", __FILE__, __LINE__); - if (res == 0) throw EndOfFile("unexpected end-of-file"); + if (res == 0) + throw EndOfFile("unexpected end-of-file"); count -= res; buf += res; } } - void writeFull(HANDLE handle, std::string_view s, bool allowInterrupts) { while (!s.empty()) { - if (allowInterrupts) checkInterrupt(); + if (allowInterrupts) + checkInterrupt(); DWORD res; -#if _WIN32_WINNT >= 0x0600 +# if _WIN32_WINNT >= 0x0600 auto path = handleToPath(handle); // debug; do it before because handleToPath changes lasterror if (!WriteFile(handle, s.data(), s.size(), &res, NULL)) { throw WinError("writing to file %1%:%2%", handle, path); } -#else +# else if (!WriteFile(handle, s.data(), s.size(), &res, NULL)) { throw WinError("writing to file %1%", handle); } -#endif +# endif if (res > 0) s.remove_prefix(res); } } - std::string readLine(HANDLE handle, bool eofOk) { std::string s; @@ -77,16 +76,15 @@ std::string readLine(HANDLE handle, bool eofOk) return s; else throw EndOfFile("unexpected EOF reading a line"); - } - else { - if (ch == '\n') return s; + } else { + if (ch == '\n') + return s; s += ch; } } } - -void drainFD(HANDLE handle, Sink & sink/*, bool block*/) +void drainFD(HANDLE handle, Sink & sink /*, bool block*/) { std::vector buf(64 * 1024); while (1) { @@ -97,16 +95,14 @@ void drainFD(HANDLE handle, Sink & sink/*, bool block*/) if (winError.lastError == ERROR_BROKEN_PIPE) break; throw winError; - } - else if (rd == 0) break; + } else if (rd == 0) + break; sink({(char *) buf.data(), (size_t) rd}); } } - ////////////////////////////////////////////////////////////////////// - void Pipe::create() { SECURITY_ATTRIBUTES saAttr = {0}; @@ -122,35 +118,38 @@ void Pipe::create() writeSide = hWritePipe; } - ////////////////////////////////////////////////////////////////////// -#if _WIN32_WINNT >= 0x0600 +# if _WIN32_WINNT >= 0x0600 -std::wstring windows::handleToFileName(HANDLE handle) { +std::wstring windows::handleToFileName(HANDLE handle) +{ std::vector buf(0x100); DWORD dw = GetFinalPathNameByHandleW(handle, buf.data(), buf.size(), FILE_NAME_OPENED); if (dw == 0) { - if (handle == GetStdHandle(STD_INPUT_HANDLE )) return L""; - if (handle == GetStdHandle(STD_OUTPUT_HANDLE)) return L""; - if (handle == GetStdHandle(STD_ERROR_HANDLE )) return L""; + if (handle == GetStdHandle(STD_INPUT_HANDLE)) + return L""; + if (handle == GetStdHandle(STD_OUTPUT_HANDLE)) + return L""; + if (handle == GetStdHandle(STD_ERROR_HANDLE)) + return L""; return (boost::wformat(L"") % handle).str(); } if (dw > buf.size()) { buf.resize(dw); - if (GetFinalPathNameByHandleW(handle, buf.data(), buf.size(), FILE_NAME_OPENED) != dw-1) + if (GetFinalPathNameByHandleW(handle, buf.data(), buf.size(), FILE_NAME_OPENED) != dw - 1) throw WinError("GetFinalPathNameByHandleW"); dw -= 1; } return std::wstring(buf.data(), dw); } - -Path windows::handleToPath(HANDLE handle) { +Path windows::handleToPath(HANDLE handle) +{ return os_string_to_string(handleToFileName(handle)); } -#endif +# endif -} +} // namespace nix #endif diff --git a/src/libutil/windows/file-path.cc b/src/libutil/windows/file-path.cc index 03cc5afe5..7913b3d5d 100644 --- a/src/libutil/windows/file-path.cc +++ b/src/libutil/windows/file-path.cc @@ -11,14 +11,15 @@ namespace nix { std::optional maybePath(PathView path) { - if (path.length() >= 3 && (('A' <= path[0] && path[0] <= 'Z') || ('a' <= path[0] && path[0] <= 'z')) && path[1] == ':' && WindowsPathTrait::isPathSep(path[2])) { - std::filesystem::path::string_type sw = string_to_os_string( - std::string { "\\\\?\\" } + path); + if (path.length() >= 3 && (('A' <= path[0] && path[0] <= 'Z') || ('a' <= path[0] && path[0] <= 'z')) + && path[1] == ':' && WindowsPathTrait::isPathSep(path[2])) { + std::filesystem::path::string_type sw = string_to_os_string(std::string{"\\\\?\\"} + path); std::replace(sw.begin(), sw.end(), '/', '\\'); return sw; } - if (path.length() >= 7 && path[0] == '\\' && path[1] == '\\' && (path[2] == '.' || path[2] == '?') && path[3] == '\\' && - ('A' <= path[4] && path[4] <= 'Z') && path[5] == ':' && WindowsPathTrait::isPathSep(path[6])) { + if (path.length() >= 7 && path[0] == '\\' && path[1] == '\\' && (path[2] == '.' || path[2] == '?') + && path[3] == '\\' && ('A' <= path[4] && path[4] <= 'Z') && path[5] == ':' + && WindowsPathTrait::isPathSep(path[6])) { std::filesystem::path::string_type sw = string_to_os_string(path); std::replace(sw.begin(), sw.end(), '/', '\\'); return sw; @@ -31,10 +32,10 @@ std::filesystem::path pathNG(PathView path) std::optional sw = maybePath(path); if (!sw) { // FIXME why are we not using the regular error handling? - std::cerr << "invalid path for WinAPI call ["< +# include -#include "nix/util/error.hh" +# include "nix/util/error.hh" namespace nix::windows { @@ -25,8 +25,9 @@ public: * information to the message. */ template - WinError(DWORD lastError, const Args & ... args) - : SystemError(""), lastError(lastError) + WinError(DWORD lastError, const Args &... args) + : SystemError("") + , lastError(lastError) { auto hf = HintFmt(args...); err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), renderError(lastError)); @@ -39,8 +40,8 @@ public: * before calling this constructor! */ template - WinError(const Args & ... args) - : WinError(GetLastError(), args ...) + WinError(const Args &... args) + : WinError(GetLastError(), args...) { } @@ -49,5 +50,5 @@ private: std::string renderError(DWORD lastError); }; -} +} // namespace nix::windows #endif diff --git a/src/libutil/windows/muxable-pipe.cc b/src/libutil/windows/muxable-pipe.cc index 82ef40665..b2eff70e6 100644 --- a/src/libutil/windows/muxable-pipe.cc +++ b/src/libutil/windows/muxable-pipe.cc @@ -68,5 +68,5 @@ void MuxablePipePollState::iterate( } } -} +} // namespace nix #endif diff --git a/src/libutil/windows/os-string.cc b/src/libutil/windows/os-string.cc index 8c8a27a9f..d6f8e3670 100644 --- a/src/libutil/windows/os-string.cc +++ b/src/libutil/windows/os-string.cc @@ -23,6 +23,6 @@ std::filesystem::path::string_type string_to_os_string(std::string_view s) return converter.from_bytes(std::string{s}); } -} +} // namespace nix #endif diff --git a/src/libutil/windows/processes.cc b/src/libutil/windows/processes.cc index 099dff31b..f8f2900e5 100644 --- a/src/libutil/windows/processes.cc +++ b/src/libutil/windows/processes.cc @@ -25,8 +25,8 @@ #ifdef _WIN32 -#define WIN32_LEAN_AND_MEAN -#include +# define WIN32_LEAN_AND_MEAN +# include namespace nix { @@ -84,8 +84,13 @@ int Pid::wait() std::string runProgram( Path program, bool lookupPath, const Strings & args, const std::optional & input, bool isInteractive) { - auto res = runProgram(RunOptions{ - .program = program, .lookupPath = lookupPath, .args = args, .input = input, .isInteractive = isInteractive}); + auto res = runProgram( + RunOptions{ + .program = program, + .lookupPath = lookupPath, + .args = args, + .input = input, + .isInteractive = isInteractive}); if (!statusOk(res.first)) throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first)); @@ -383,6 +388,6 @@ int execvpe(const wchar_t * file0, const wchar_t * const argv[], const wchar_t * return _wexecve(file.c_str(), argv, envp); } -} +} // namespace nix #endif diff --git a/src/libutil/windows/users.cc b/src/libutil/windows/users.cc index 90da0281f..6cc753cec 100644 --- a/src/libutil/windows/users.cc +++ b/src/libutil/windows/users.cc @@ -5,8 +5,8 @@ #include "nix/util/windows-error.hh" #ifdef _WIN32 -#define WIN32_LEAN_AND_MEAN -#include +# define WIN32_LEAN_AND_MEAN +# include namespace nix { @@ -37,8 +37,7 @@ std::string getUserName() Path getHome() { - static Path homeDir = []() - { + static Path homeDir = []() { Path homeDir = getEnv("USERPROFILE").value_or("C:\\Users\\Default"); assert(!homeDir.empty()); return canonPath(homeDir); @@ -46,9 +45,10 @@ Path getHome() return homeDir; } -bool isRootUser() { +bool isRootUser() +{ return false; } -} +} // namespace nix #endif diff --git a/src/libutil/windows/windows-async-pipe.cc b/src/libutil/windows/windows-async-pipe.cc index da47c37a8..f6a82a139 100644 --- a/src/libutil/windows/windows-async-pipe.cc +++ b/src/libutil/windows/windows-async-pipe.cc @@ -49,6 +49,6 @@ void AsyncPipe::close() writeSide.close(); } -} +} // namespace nix::windows #endif diff --git a/src/libutil/windows/windows-error.cc b/src/libutil/windows/windows-error.cc index 0761bdfd5..dd731dce2 100644 --- a/src/libutil/windows/windows-error.cc +++ b/src/libutil/windows/windows-error.cc @@ -1,8 +1,8 @@ #ifdef _WIN32 -#include "nix/util/windows-error.hh" -#include -#define WIN32_LEAN_AND_MEAN -#include +# include "nix/util/windows-error.hh" +# include +# define WIN32_LEAN_AND_MEAN +# include namespace nix::windows { @@ -10,23 +10,25 @@ std::string WinError::renderError(DWORD lastError) { LPSTR errorText = NULL; - FormatMessageA( FORMAT_MESSAGE_FROM_SYSTEM // use system message tables to retrieve error text - |FORMAT_MESSAGE_ALLOCATE_BUFFER // allocate buffer on local heap for error text - |FORMAT_MESSAGE_IGNORE_INSERTS, // Important! will fail otherwise, since we're not (and CANNOT) pass insertion parameters - NULL, // unused with FORMAT_MESSAGE_FROM_SYSTEM - lastError, - MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - (LPTSTR)&errorText, // output - 0, // minimum size for output buffer - NULL); // arguments - see note + FormatMessageA( + FORMAT_MESSAGE_FROM_SYSTEM // use system message tables to retrieve error text + | FORMAT_MESSAGE_ALLOCATE_BUFFER // allocate buffer on local heap for error text + | FORMAT_MESSAGE_IGNORE_INSERTS, // Important! will fail otherwise, since we're not (and CANNOT) pass + // insertion parameters + NULL, // unused with FORMAT_MESSAGE_FROM_SYSTEM + lastError, + MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + (LPTSTR) &errorText, // output + 0, // minimum size for output buffer + NULL); // arguments - see note - if (NULL != errorText ) { - std::string s2 { errorText }; + if (NULL != errorText) { + std::string s2{errorText}; LocalFree(errorText); return s2; } return fmt("CODE=%d", lastError); } -} +} // namespace nix::windows #endif diff --git a/src/libutil/xml-writer.cc b/src/libutil/xml-writer.cc index e460dd169..9b7ca969d 100644 --- a/src/libutil/xml-writer.cc +++ b/src/libutil/xml-writer.cc @@ -2,95 +2,95 @@ #include "nix/util/xml-writer.hh" - namespace nix { - XMLWriter::XMLWriter(bool indent, std::ostream & output) - : output(output), indent(indent) + : output(output) + , indent(indent) { output << "" << std::endl; closed = false; } - XMLWriter::~XMLWriter() { close(); } - void XMLWriter::close() { - if (closed) return; - while (!pendingElems.empty()) closeElement(); + if (closed) + return; + while (!pendingElems.empty()) + closeElement(); closed = true; } - void XMLWriter::indent_(size_t depth) { - if (!indent) return; + if (!indent) + return; output << std::string(depth * 2, ' '); } - -void XMLWriter::openElement( - std::string_view name, - const XMLAttrs & attrs) +void XMLWriter::openElement(std::string_view name, const XMLAttrs & attrs) { assert(!closed); indent_(pendingElems.size()); output << "<" << name; writeAttrs(attrs); output << ">"; - if (indent) output << std::endl; + if (indent) + output << std::endl; pendingElems.push_back(std::string(name)); } - void XMLWriter::closeElement() { assert(!pendingElems.empty()); indent_(pendingElems.size() - 1); output << ""; - if (indent) output << std::endl; + if (indent) + output << std::endl; pendingElems.pop_back(); - if (pendingElems.empty()) closed = true; + if (pendingElems.empty()) + closed = true; } - -void XMLWriter::writeEmptyElement( - std::string_view name, - const XMLAttrs & attrs) +void XMLWriter::writeEmptyElement(std::string_view name, const XMLAttrs & attrs) { assert(!closed); indent_(pendingElems.size()); output << "<" << name; writeAttrs(attrs); output << " />"; - if (indent) output << std::endl; + if (indent) + output << std::endl; } - void XMLWriter::writeAttrs(const XMLAttrs & attrs) { for (auto & i : attrs) { output << " " << i.first << "=\""; for (size_t j = 0; j < i.second.size(); ++j) { char c = i.second[j]; - if (c == '"') output << """; - else if (c == '<') output << "<"; - else if (c == '>') output << ">"; - else if (c == '&') output << "&"; + if (c == '"') + output << """; + else if (c == '<') + output << "<"; + else if (c == '>') + output << ">"; + else if (c == '&') + output << "&"; /* Escape newlines to prevent attribute normalisation (see XML spec, section 3.3.3. */ - else if (c == '\n') output << " "; - else output << c; + else if (c == '\n') + output << " "; + else + output << c; } output << "\""; } } - -} +} // namespace nix diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 3977aaffe..d61a2f282 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -33,7 +33,7 @@ using namespace nix; using namespace std::string_literals; -extern char * * environ __attribute__((weak)); +extern char ** environ __attribute__((weak)); /* Recreate the effect of the perl shellwords function, breaking up a * string into arguments like a shell word, including escapes @@ -44,11 +44,9 @@ static std::vector shellwords(std::string_view s) auto begin = s.cbegin(); std::vector res; std::string cur; - enum state { - sBegin, - sSingleQuote, - sDoubleQuote - }; + + enum state { sBegin, sSingleQuote, sDoubleQuote }; + state st = sBegin; auto it = begin; for (; it != s.cend(); ++it) { @@ -58,36 +56,38 @@ static std::vector shellwords(std::string_view s) cur.append(begin, it); res.push_back(cur); it = match[0].second; - if (it == s.cend()) return res; + if (it == s.cend()) + return res; begin = it; cur.clear(); } } switch (*it) { - case '\'': - if (st != sDoubleQuote) { - cur.append(begin, it); - begin = it + 1; - st = st == sBegin ? sSingleQuote : sBegin; - } - break; - case '"': - if (st != sSingleQuote) { - cur.append(begin, it); - begin = it + 1; - st = st == sBegin ? sDoubleQuote : sBegin; - } - break; - case '\\': - if (st != sSingleQuote) { - /* perl shellwords mostly just treats the next char as part of the string with no special processing */ - cur.append(begin, it); - begin = ++it; - } - break; + case '\'': + if (st != sDoubleQuote) { + cur.append(begin, it); + begin = it + 1; + st = st == sBegin ? sSingleQuote : sBegin; + } + break; + case '"': + if (st != sSingleQuote) { + cur.append(begin, it); + begin = it + 1; + st = st == sBegin ? sDoubleQuote : sBegin; + } + break; + case '\\': + if (st != sSingleQuote) { + /* perl shellwords mostly just treats the next char as part of the string with no special processing */ + cur.append(begin, it); + begin = ++it; + } + break; } } - if (st != sBegin) throw Error("unterminated quote in shebang line"); + if (st != sBegin) + throw Error("unterminated quote in shebang line"); cur.append(begin, it); res.push_back(cur); return res; @@ -106,7 +106,8 @@ static SourcePath resolveShellExprPath(SourcePath path) if (compatibilitySettings.nixShellAlwaysLooksForShellNix) { return resolvedOrDir / "shell.nix"; } else { - warn("Skipping '%1%', because the setting '%2%' is disabled. This is a deprecated behavior. Consider enabling '%2%'.", + warn( + "Skipping '%1%', because the setting '%2%' is disabled. This is a deprecated behavior. Consider enabling '%2%'.", resolvedOrDir / "shell.nix", "nix-shell-always-looks-for-shell-nix"); } @@ -119,7 +120,7 @@ static SourcePath resolveShellExprPath(SourcePath path) return resolvedOrDir; } -static void main_nix_build(int argc, char * * argv) +static void main_nix_build(int argc, char ** argv) { auto dryRun = false; auto isNixShell = std::regex_search(argv[0], std::regex("nix-shell$")); @@ -148,9 +149,21 @@ static void main_nix_build(int argc, char * * argv) // List of environment variables kept for --pure StringSet keepVars{ - "HOME", "XDG_RUNTIME_DIR", "USER", "LOGNAME", "DISPLAY", - "WAYLAND_DISPLAY", "WAYLAND_SOCKET", "PATH", "TERM", "IN_NIX_SHELL", - "NIX_SHELL_PRESERVE_PROMPT", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL", + "HOME", + "XDG_RUNTIME_DIR", + "USER", + "LOGNAME", + "DISPLAY", + "WAYLAND_DISPLAY", + "WAYLAND_SOCKET", + "PATH", + "TERM", + "IN_NIX_SHELL", + "NIX_SHELL_PRESERVE_PROMPT", + "TZ", + "PAGER", + "NIX_BUILD_SHELL", + "SHLVL", }; keepVars.insert(networkProxyVariables.begin(), networkProxyVariables.end()); @@ -179,13 +192,16 @@ static void main_nix_build(int argc, char * * argv) args.push_back(word); } } - } catch (SystemError &) { } + } catch (SystemError &) { + } } struct MyArgs : LegacyArgs, MixEvalArgs { using LegacyArgs::LegacyArgs; - void setBaseDir(Path baseDir) { + + void setBaseDir(Path baseDir) + { commandBaseDir = baseDir; } }; @@ -235,8 +251,10 @@ static void main_nix_build(int argc, char * * argv) else if (*arg == "--expr" || *arg == "-E") fromArgs = true; - else if (*arg == "--pure") pure = true; - else if (*arg == "--impure") pure = false; + else if (*arg == "--pure") + pure = true; + else if (*arg == "--impure") + pure = false; else if (isNixShell && (*arg == "--packages" || *arg == "-p")) packages = true; @@ -262,9 +280,15 @@ static void main_nix_build(int argc, char * * argv) // read the shebang to understand which packages to read from. Since // this is handled via nix-shell -p, we wrap our ruby script execution // in ruby -e 'load' which ignores the shebangs. - envCommand = fmt("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%", execArgs, interpreter, escapeShellArgAlways(script), toView(joined)); + envCommand = + fmt("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%", + execArgs, + interpreter, + escapeShellArgAlways(script), + toView(joined)); } else { - envCommand = fmt("exec %1% %2% %3% %4%", execArgs, interpreter, escapeShellArgAlways(script), toView(joined)); + envCommand = + fmt("exec %1% %2% %3% %4%", execArgs, interpreter, escapeShellArgAlways(script), toView(joined)); } } @@ -293,7 +317,8 @@ static void main_nix_build(int argc, char * * argv) auto state = std::make_unique(myArgs.lookupPath, evalStore, fetchSettings, evalSettings, store); state->repair = myArgs.repair; - if (myArgs.repair) buildMode = bmRepair; + if (myArgs.repair) + buildMode = bmRepair; if (inShebang && compatibilitySettings.nixShellShebangArgumentsRelativeToScript) { myArgs.setBaseDir(absPath(dirOf(script))); @@ -304,20 +329,23 @@ static void main_nix_build(int argc, char * * argv) if (isNixShell) { auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1); newArgs.alloc("inNixShell").mkBool(true); - for (auto & i : *autoArgs) newArgs.insert(i); + for (auto & i : *autoArgs) + newArgs.insert(i); autoArgsWithInNixShell = newArgs.finish(); } if (packages) { std::ostringstream joined; - joined << "{...}@args: with import args; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; + joined + << "{...}@args: with import args; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; for (const auto & i : remainingArgs) joined << '(' << i << ") "; joined << "]; } \"\""; fromArgs = true; remainingArgs = {joined.str()}; } else if (!fromArgs && remainingArgs.empty()) { - if (isNixShell && !compatibilitySettings.nixShellAlwaysLooksForShellNix && std::filesystem::exists("shell.nix")) { + if (isNixShell && !compatibilitySettings.nixShellAlwaysLooksForShellNix + && std::filesystem::exists("shell.nix")) { // If we're in 2.3 compatibility mode, we need to look for shell.nix // now, because it won't be done later. remainingArgs = {"shell.nix"}; @@ -326,7 +354,10 @@ static void main_nix_build(int argc, char * * argv) // Instead of letting it throw later, we throw here to give a more relevant error message if (isNixShell && !std::filesystem::exists("shell.nix") && !std::filesystem::exists("default.nix")) - throw Error("no argument specified and no '%s' or '%s' file found in the working directory", "shell.nix", "default.nix"); + throw Error( + "no argument specified and no '%s' or '%s' file found in the working directory", + "shell.nix", + "default.nix"); } } @@ -348,14 +379,13 @@ static void main_nix_build(int argc, char * * argv) std::move(i), (inShebang && compatibilitySettings.nixShellShebangArgumentsRelativeToScript) ? lookupFileArg(*state, shebangBaseDir) - : state->rootPath(".") - )); - } - else { + : state->rootPath("."))); + } else { auto absolute = i; try { absolute = canonPath(absPath(i), true); - } catch (Error & e) {}; + } catch (Error & e) { + }; auto [path, outputNames] = parsePathWithOutputs(absolute); if (evalStore->isStorePath(path) && hasSuffix(path, ".drv")) drvs.push_back(PackageInfo(*state, evalStore, absolute)); @@ -364,10 +394,8 @@ static void main_nix_build(int argc, char * * argv) relative to the script. */ auto baseDir = inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i; - auto sourcePath = lookupFileArg(*state, - baseDir); - auto resolvedPath = - isNixShell ? resolveShellExprPath(sourcePath) : resolveExprPath(sourcePath); + auto sourcePath = lookupFileArg(*state, baseDir); + auto resolvedPath = isNixShell ? resolveShellExprPath(sourcePath) : resolveExprPath(sourcePath); exprs.push_back(state->parseExprFromFile(resolvedPath)); } @@ -375,7 +403,8 @@ static void main_nix_build(int argc, char * * argv) } /* Evaluate them into derivations. */ - if (attrPaths.empty()) attrPaths = {""}; + if (attrPaths.empty()) + attrPaths = {""}; for (auto e : exprs) { Value vRoot; @@ -399,21 +428,11 @@ static void main_nix_build(int argc, char * * argv) }; for (auto & i : attrPaths) { - Value & v(*findAlongAttrPath( - *state, - i, - takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, - vRoot - ).first); + Value & v( + *findAlongAttrPath(*state, i, takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, vRoot) + .first); state->forceValue(v, v.determinePos(noPos)); - getDerivations( - *state, - v, - "", - takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs, - drvs, - false - ); + getDerivations(*state, v, "", takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs, drvs, false); } } @@ -446,9 +465,7 @@ static void main_nix_build(int argc, char * * argv) if (!shell) { try { - auto expr = state->parseExprFromString( - "(import {}).bashInteractive", - state->rootPath(".")); + auto expr = state->parseExprFromString("(import {}).bashInteractive", state->rootPath(".")); Value v; state->eval(expr, v); @@ -458,10 +475,11 @@ static void main_nix_build(int argc, char * * argv) throw Error("the 'bashInteractive' attribute in did not evaluate to a derivation"); auto bashDrv = drv->requireDrvPath(); - pathsToBuild.push_back(DerivedPath::Built { - .drvPath = makeConstantStorePathRef(bashDrv), - .outputs = OutputsSpec::Names {"out"}, - }); + pathsToBuild.push_back( + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(bashDrv), + .outputs = OutputsSpec::Names{"out"}, + }); pathsToCopy.insert(bashDrv); shellDrv = bashDrv; @@ -474,16 +492,17 @@ static void main_nix_build(int argc, char * * argv) std::function, const DerivedPathMap::ChildNode &)> accumDerivedPath; - accumDerivedPath = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + accumDerivedPath = [&](ref inputDrv, + const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) - pathsToBuild.push_back(DerivedPath::Built { - .drvPath = inputDrv, - .outputs = OutputsSpec::Names { inputNode.value }, - }); + pathsToBuild.push_back( + DerivedPath::Built{ + .drvPath = inputDrv, + .outputs = OutputsSpec::Names{inputNode.value}, + }); for (const auto & [outputName, childNode] : inputNode.childMap) accumDerivedPath( - make_ref(SingleDerivedPath::Built { inputDrv, outputName }), - childNode); + make_ref(SingleDerivedPath::Built{inputDrv, outputName}), childNode); }; // Build or fetch all dependencies of the derivation. @@ -491,11 +510,9 @@ static void main_nix_build(int argc, char * * argv) // To get around lambda capturing restrictions in the // standard. const auto & inputDrv = inputDrv0; - if (std::all_of(envExclude.cbegin(), envExclude.cend(), - [&](const std::string & exclude) { - return !std::regex_search(store->printStorePath(inputDrv), std::regex(exclude)); - })) - { + if (std::all_of(envExclude.cbegin(), envExclude.cend(), [&](const std::string & exclude) { + return !std::regex_search(store->printStorePath(inputDrv), std::regex(exclude)); + })) { accumDerivedPath(makeConstantStorePathRef(inputDrv), inputNode); pathsToCopy.insert(inputDrv); } @@ -507,7 +524,8 @@ static void main_nix_build(int argc, char * * argv) buildPaths(pathsToBuild); - if (dryRun) return; + if (dryRun) + return; if (shellDrv) { auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value(), &*evalStore); @@ -540,9 +558,7 @@ static void main_nix_build(int argc, char * * argv) auto parsedDrv = StructuredAttrs::tryParse(drv.env); DerivationOptions drvOptions; try { - drvOptions = DerivationOptions::fromStructuredAttrs( - drv.env, - parsedDrv ? &*parsedDrv : nullptr); + drvOptions = DerivationOptions::fromStructuredAttrs(drv.env, parsedDrv ? &*parsedDrv : nullptr); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", store->printStorePath(packageInfo.requireDrvPath())); throw; @@ -566,7 +582,8 @@ static void main_nix_build(int argc, char * * argv) std::function::ChildNode &)> accumInputClosure; - accumInputClosure = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { + accumInputClosure = [&](const StorePath & inputDrv, + const DerivedPathMap::ChildNode & inputNode) { auto outputs = store->queryPartialDerivationOutputMap(inputDrv, &*evalStore); for (auto & i : inputNode.value) { auto o = outputs.at(i); @@ -579,11 +596,7 @@ static void main_nix_build(int argc, char * * argv) for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) accumInputClosure(inputDrv, inputNode); - auto json = parsedDrv->prepareStructuredAttrs( - *store, - drvOptions, - inputs, - drv.outputs); + auto json = parsedDrv->prepareStructuredAttrs(*store, drvOptions, inputs, drv.outputs); structuredAttrsRC = StructuredAttrs::writeShell(json); @@ -644,9 +657,7 @@ static void main_nix_build(int argc, char * * argv) for (auto & i : env) envStrs.push_back(i.first + "=" + i.second); - auto args = interactive - ? Strings{"bash", "--rcfile", rcfile} - : Strings{"bash", rcfile}; + auto args = interactive ? Strings{"bash", "--rcfile", rcfile} : Strings{"bash", rcfile}; auto envPtrs = stringsToCharPtrs(envStrs); @@ -678,10 +689,11 @@ static void main_nix_build(int argc, char * * argv) if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath)); - pathsToBuild.push_back(DerivedPath::Built{ - .drvPath = makeConstantStorePathRef(drvPath), - .outputs = OutputsSpec::Names{outputName}, - }); + pathsToBuild.push_back( + DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(drvPath), + .outputs = OutputsSpec::Names{outputName}, + }); pathsToBuildOrdered.push_back({drvPath, {outputName}}); drvsToCopy.insert(drvPath); @@ -694,7 +706,8 @@ static void main_nix_build(int argc, char * * argv) buildPaths(pathsToBuild); - if (dryRun) return; + if (dryRun) + return; std::vector outPaths; @@ -712,7 +725,8 @@ static void main_nix_build(int argc, char * * argv) if (auto store2 = store.dynamic_pointer_cast()) { std::string symlink = drvPrefix; - if (outputName != "out") symlink += "-" + outputName; + if (outputName != "out") + symlink += "-" + outputName; store2->addPermRoot(outputPath, absPath(symlink)); } diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index c4a058658..f047dce8f 100644 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -26,7 +26,8 @@ static std::filesystem::path channelsList; // Reads the list of channels. static void readChannels() { - if (!pathExists(channelsList)) return; + if (!pathExists(channelsList)) + return; auto channelsFile = readFile(channelsList); for (const auto & line : tokenizeString>(channelsFile, "\n")) { @@ -71,7 +72,7 @@ static void removeChannel(const std::string & name) channels.erase(name); writeChannels(); - runProgram(getNixBin("nix-env").string(), true, { "--profile", profile, "--uninstall", name }); + runProgram(getNixBin("nix-env").string(), true, {"--profile", profile, "--uninstall", name}); } static Path nixDefExpr; @@ -84,9 +85,10 @@ static void update(const StringSet & channelNames) auto store = openStore(); auto [fd, unpackChannelPath] = createTempFile(); - writeFull(fd.get(), - #include "unpack-channel.nix.gen.hh" - ); + writeFull( + fd.get(), +#include "unpack-channel.nix.gen.hh" + ); fd = -1; AutoDelete del(unpackChannelPath, false); @@ -111,7 +113,10 @@ static void update(const StringSet & channelNames) // no need to update this channel, reuse the existing store path Path symlink = profile + "/" + name; Path storepath = dirOf(readLink(symlink)); - exprs.push_back("f: rec { name = \"" + cname + "\"; type = \"derivation\"; outputs = [\"out\"]; system = \"builtin\"; outPath = builtins.storePath \"" + storepath + "\"; out = { inherit outPath; };}"); + exprs.push_back( + "f: rec { name = \"" + cname + + "\"; type = \"derivation\"; outputs = [\"out\"]; system = \"builtin\"; outPath = builtins.storePath \"" + + storepath + "\"; out = { inherit outPath; };}"); } else { // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel @@ -122,28 +127,40 @@ static void update(const StringSet & channelNames) bool unpacked = false; if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) { - runProgram(getNixBin("nix-build").string(), false, { "--no-out-link", "--expr", "import " + unpackChannelPath + - "{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" }); + runProgram( + getNixBin("nix-build").string(), + false, + {"--no-out-link", + "--expr", + "import " + unpackChannelPath + "{ name = \"" + cname + "\"; channelName = \"" + name + + "\"; src = builtins.storePath \"" + filename + "\"; }"}); unpacked = true; } if (!unpacked) { // Download the channel tarball. try { - filename = store->toRealPath(fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz").storePath); + filename = store->toRealPath( + fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.xz", "nixexprs.tar.xz") + .storePath); } catch (FileTransferError & e) { - filename = store->toRealPath(fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2").storePath); + filename = store->toRealPath( + fetchers::downloadFile(store, fetchSettings, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2") + .storePath); } } // Regardless of where it came from, add the expression representing this channel to accumulated expression - exprs.push_back("f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; " + extraAttrs + " }"); + exprs.push_back( + "f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + + filename + "\"; " + extraAttrs + " }"); } } // Unpack the channel tarballs into the Nix store and install them // into the channels profile. std::cerr << "unpacking " << exprs.size() << " channels...\n"; - Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--remove-all", "--from-expression" }; + Strings envArgs{ + "--profile", profile, "--file", unpackChannelPath, "--install", "--remove-all", "--from-expression"}; for (auto & expr : exprs) envArgs.push_back(std::move(expr)); envArgs.push_back("--quiet"); @@ -173,18 +190,11 @@ static int main_nix_channel(int argc, char ** argv) nixDefExpr = getNixDefExpr(); // Figure out the name of the channels profile. - profile = profilesDir() + "/channels"; + profile = profilesDir() + "/channels"; createDirs(dirOf(profile)); - enum { - cNone, - cAdd, - cRemove, - cList, - cUpdate, - cListGenerations, - cRollback - } cmd = cNone; + enum { cNone, cAdd, cRemove, cList, cUpdate, cListGenerations, cRollback } cmd = cNone; + std::vector args; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--help") { @@ -212,12 +222,12 @@ static int main_nix_channel(int argc, char ** argv) }); switch (cmd) { - case cNone: - throw UsageError("no command specified"); - case cAdd: - if (args.size() < 1 || args.size() > 2) - throw UsageError("'--add' requires one or two arguments"); - { + case cNone: + throw UsageError("no command specified"); + case cAdd: + if (args.size() < 1 || args.size() > 2) + throw UsageError("'--add' requires one or two arguments"); + { auto url = args[0]; std::string name; if (args.size() == 2) { @@ -228,40 +238,41 @@ static int main_nix_channel(int argc, char ** argv) name = std::regex_replace(name, std::regex("-stable$"), ""); } addChannel(url, name); - } - break; - case cRemove: - if (args.size() != 1) - throw UsageError("'--remove' requires one argument"); - removeChannel(args[0]); - break; - case cList: - if (!args.empty()) - throw UsageError("'--list' expects no arguments"); - readChannels(); - for (const auto & channel : channels) - std::cout << channel.first << ' ' << channel.second << '\n'; - break; - case cUpdate: - update(StringSet(args.begin(), args.end())); - break; - case cListGenerations: - if (!args.empty()) - throw UsageError("'--list-generations' expects no arguments"); - std::cout << runProgram(getNixBin("nix-env").string(), false, {"--profile", profile, "--list-generations"}) << std::flush; - break; - case cRollback: - if (args.size() > 1) - throw UsageError("'--rollback' has at most one argument"); - Strings envArgs{"--profile", profile}; - if (args.size() == 1) { - envArgs.push_back("--switch-generation"); - envArgs.push_back(args[0]); - } else { - envArgs.push_back("--rollback"); - } - runProgram(getNixBin("nix-env").string(), false, envArgs); - break; + } + break; + case cRemove: + if (args.size() != 1) + throw UsageError("'--remove' requires one argument"); + removeChannel(args[0]); + break; + case cList: + if (!args.empty()) + throw UsageError("'--list' expects no arguments"); + readChannels(); + for (const auto & channel : channels) + std::cout << channel.first << ' ' << channel.second << '\n'; + break; + case cUpdate: + update(StringSet(args.begin(), args.end())); + break; + case cListGenerations: + if (!args.empty()) + throw UsageError("'--list-generations' expects no arguments"); + std::cout << runProgram(getNixBin("nix-env").string(), false, {"--profile", profile, "--list-generations"}) + << std::flush; + break; + case cRollback: + if (args.size() > 1) + throw UsageError("'--rollback' has at most one argument"); + Strings envArgs{"--profile", profile}; + if (args.size() == 1) { + envArgs.push_back("--switch-generation"); + envArgs.push_back(args[0]); + } else { + envArgs.push_back("--rollback"); + } + runProgram(getNixBin("nix-env").string(), false, envArgs); + break; } return 0; diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 7f86b2b5c..4d6e60bf3 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -12,21 +12,23 @@ #include #include -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; std::string deleteOlderThan; bool dryRun = false; - /* If `-d' was specified, remove all old generations of all profiles. * Of course, this makes rollbacks to before this point in time * impossible. */ void removeOldGenerations(std::filesystem::path dir) { - if (access(dir.string().c_str(), R_OK) != 0) return; + if (access(dir.string().c_str(), R_OK) != 0) + return; bool canWrite = access(dir.string().c_str(), W_OK) == 0; @@ -41,7 +43,8 @@ void removeOldGenerations(std::filesystem::path dir) try { link = readLink(path); } catch (std::filesystem::filesystem_error & e) { - if (e.code() == std::errc::no_such_file_or_directory) continue; + if (e.code() == std::errc::no_such_file_or_directory) + continue; throw; } if (link.find("link") != std::string::npos) { @@ -58,7 +61,7 @@ void removeOldGenerations(std::filesystem::path dir) } } -static int main_nix_collect_garbage(int argc, char * * argv) +static int main_nix_collect_garbage(int argc, char ** argv) { { bool removeOld = false; @@ -70,12 +73,13 @@ static int main_nix_collect_garbage(int argc, char * * argv) showManPage("nix-collect-garbage"); else if (*arg == "--version") printVersion("nix-collect-garbage"); - else if (*arg == "--delete-old" || *arg == "-d") removeOld = true; + else if (*arg == "--delete-old" || *arg == "-d") + removeOld = true; else if (*arg == "--delete-older-than") { removeOld = true; deleteOlderThan = getArg(*arg, arg, end); - } - else if (*arg == "--dry-run") dryRun = true; + } else if (*arg == "--dry-run") + dryRun = true; else if (*arg == "--max-freed") options.maxFreed = std::max(getIntArg(*arg, arg, end, true), (int64_t) 0); else diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index fd48e67dc..f165c069c 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -33,27 +33,17 @@ using namespace nix; using std::cout; - -typedef enum { - srcNixExprDrvs, - srcNixExprs, - srcStorePaths, - srcProfile, - srcAttrPath, - srcUnknown -} InstallSourceType; - +typedef enum { srcNixExprDrvs, srcNixExprs, srcStorePaths, srcProfile, srcAttrPath, srcUnknown } InstallSourceType; struct InstallSourceInfo { InstallSourceType type; std::shared_ptr nixExprPath; /* for srcNixExprDrvs, srcNixExprs */ - Path profile; /* for srcProfile */ - std::string systemFilter; /* for srcNixExprDrvs */ + Path profile; /* for srcProfile */ + std::string systemFilter; /* for srcNixExprDrvs */ Bindings * autoArgs; }; - struct Globals { InstallSourceInfo instSource; @@ -66,57 +56,49 @@ struct Globals bool prebuiltOnly; }; +typedef void (*Operation)(Globals & globals, Strings opFlags, Strings opArgs); -typedef void (* Operation) (Globals & globals, - Strings opFlags, Strings opArgs); - - -static std::string needArg(Strings::iterator & i, - Strings & args, const std::string & arg) +static std::string needArg(Strings::iterator & i, Strings & args, const std::string & arg) { - if (i == args.end()) throw UsageError("'%1%' requires an argument", arg); + if (i == args.end()) + throw UsageError("'%1%' requires an argument", arg); return *i++; } - -static bool parseInstallSourceOptions(Globals & globals, - Strings::iterator & i, Strings & args, const std::string & arg) +static bool parseInstallSourceOptions(Globals & globals, Strings::iterator & i, Strings & args, const std::string & arg) { if (arg == "--from-expression" || arg == "-E") globals.instSource.type = srcNixExprs; else if (arg == "--from-profile") { globals.instSource.type = srcProfile; globals.instSource.profile = needArg(i, args, arg); - } - else if (arg == "--attr" || arg == "-A") + } else if (arg == "--attr" || arg == "-A") globals.instSource.type = srcAttrPath; - else return false; + else + return false; return true; } - static bool isNixExpr(const SourcePath & path, struct SourceAccessor::Stat & st) { - return - st.type == SourceAccessor::tRegular - || (st.type == SourceAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); + return st.type == SourceAccessor::tRegular + || (st.type == SourceAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); } - static constexpr size_t maxAttrs = 1024; - -static void getAllExprs(EvalState & state, - const SourcePath & path, StringSet & seen, BindingsBuilder & attrs) +static void getAllExprs(EvalState & state, const SourcePath & path, StringSet & seen, BindingsBuilder & attrs) { StringSet namesSorted; - for (auto & [name, _] : path.resolveSymlinks().readDirectory()) namesSorted.insert(name); + for (auto & [name, _] : path.resolveSymlinks().readDirectory()) + namesSorted.insert(name); for (auto & i : namesSorted) { /* Ignore the manifest.nix used by profiles. This is necessary to prevent it from showing up in channels (which are implemented using profiles). */ - if (i == "manifest.nix") continue; + if (i == "manifest.nix") + continue; auto path2 = (path / i).resolveSymlinks(); @@ -137,10 +119,15 @@ static void getAllExprs(EvalState & state, attrName = std::string(attrName, 0, attrName.size() - 4); if (!seen.insert(attrName).second) { std::string suggestionMessage = ""; - if (path2.path.abs().find("channels") != std::string::npos && path.path.abs().find("channels") != std::string::npos) - suggestionMessage = fmt("\nsuggestion: remove '%s' from either the root channels or the user channels", attrName); - printError("warning: name collision in input Nix expressions, skipping '%1%'" - "%2%", path2, suggestionMessage); + if (path2.path.abs().find("channels") != std::string::npos + && path.path.abs().find("channels") != std::string::npos) + suggestionMessage = + fmt("\nsuggestion: remove '%s' from either the root channels or the user channels", attrName); + printError( + "warning: name collision in input Nix expressions, skipping '%1%'" + "%2%", + path2, + suggestionMessage); continue; } /* Load the expression on demand. */ @@ -149,16 +136,13 @@ static void getAllExprs(EvalState & state, if (seen.size() == maxAttrs) throw Error("too many Nix expressions in directory '%1%'", path); attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg); - } - else if (st.type == SourceAccessor::tDirectory) + } else if (st.type == SourceAccessor::tDirectory) /* `path2' is a directory (with no default.nix in it); recurse into it. */ getAllExprs(state, path2, seen, attrs); } } - - static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v) { auto st = path.resolveSymlinks().lstat(); @@ -180,13 +164,17 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v v.mkAttrs(attrs); } - else throw Error("path '%s' is not a directory or a Nix expression", path); + else + throw Error("path '%s' is not a directory or a Nix expression", path); } - -static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, - std::string systemFilter, Bindings & autoArgs, - const std::string & pathPrefix, PackageInfos & elems) +static void loadDerivations( + EvalState & state, + const SourcePath & nixExprPath, + std::string systemFilter, + Bindings & autoArgs, + const std::string & pathPrefix, + PackageInfos & elems) { Value vRoot; loadSourceExpr(state, nixExprPath, vRoot); @@ -198,35 +186,33 @@ static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, /* Filter out all derivations not applicable to the current system. */ for (PackageInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { - j = i; j++; + j = i; + j++; if (systemFilter != "*" && i->querySystem() != systemFilter) elems.erase(i); } } - static NixInt getPriority(EvalState & state, PackageInfo & drv) { return drv.queryMetaInt("priority", NixInt(0)); } - static std::strong_ordering comparePriorities(EvalState & state, PackageInfo & drv1, PackageInfo & drv2) { return getPriority(state, drv2) <=> getPriority(state, drv1); } - // FIXME: this function is rather slow since it checks a single path // at a time. static bool isPrebuilt(EvalState & state, PackageInfo & elem) { auto path = elem.queryOutPath(); - if (state.store->isValidPath(path)) return true; + if (state.store->isValidPath(path)) + return true; return state.store->querySubstitutablePaths({path}).count(path); } - static void checkSelectorUse(DrvNames & selectors) { /* Check that all selectors have been used. */ @@ -235,14 +221,14 @@ static void checkSelectorUse(DrvNames & selectors) throw Error("selector '%1%' matches no derivations", i.fullName); } - namespace { -StringSet searchByPrefix(const PackageInfos & allElems, std::string_view prefix) { +StringSet searchByPrefix(const PackageInfos & allElems, std::string_view prefix) +{ constexpr std::size_t maxResults = 3; StringSet result; for (const auto & packageInfo : allElems) { - const auto drvName = DrvName { packageInfo.queryName() }; + const auto drvName = DrvName{packageInfo.queryName()}; if (hasPrefix(drvName.name, prefix)) { result.emplace(drvName.name); @@ -260,9 +246,10 @@ struct Match std::size_t index; Match(PackageInfo packageInfo_, std::size_t index_) - : packageInfo{std::move(packageInfo_)} - , index{index_} - {} + : packageInfo{std::move(packageInfo_)} + , index{index_} + { + } }; /* If a selector matches multiple derivations @@ -272,7 +259,8 @@ struct Match derivations, pick the one with the highest version. Finally, if there are still multiple derivations, arbitrarily pick the first one. */ -std::vector pickNewestOnly(EvalState & state, std::vector matches) { +std::vector pickNewestOnly(EvalState & state, std::vector matches) +{ /* Map from package names to derivations. */ std::map newest; StringSet multiple; @@ -280,7 +268,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) for (auto & match : matches) { auto & oneDrv = match.packageInfo; - const auto drvName = DrvName { oneDrv.queryName() }; + const auto drvName = DrvName{oneDrv.queryName()}; std::strong_ordering comparison = std::strong_ordering::greater; const auto itOther = newest.find(drvName.name); @@ -288,14 +276,14 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) if (itOther != newest.end()) { auto & newestDrv = itOther->second.packageInfo; - comparison = - oneDrv.querySystem() == newestDrv.querySystem() ? std::strong_ordering::equal : - oneDrv.querySystem() == settings.thisSystem ? std::strong_ordering::greater : - newestDrv.querySystem() == settings.thisSystem ? std::strong_ordering::less : std::strong_ordering::equal; + comparison = oneDrv.querySystem() == newestDrv.querySystem() ? std::strong_ordering::equal + : oneDrv.querySystem() == settings.thisSystem ? std::strong_ordering::greater + : newestDrv.querySystem() == settings.thisSystem ? std::strong_ordering::less + : std::strong_ordering::equal; if (comparison == 0) comparison = comparePriorities(state, oneDrv, newestDrv); if (comparison == 0) - comparison = compareVersions(drvName.version, DrvName { newestDrv.queryName() }.version); + comparison = compareVersions(drvName.version, DrvName{newestDrv.queryName()}.version); } if (comparison > 0) { @@ -310,9 +298,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) matches.clear(); for (auto & [name, match] : newest) { if (multiple.find(name) != multiple.end()) - warn( - "there are multiple derivations named '%1%'; using the first one", - name); + warn("there are multiple derivations named '%1%'; using the first one", name); matches.push_back(match); } @@ -321,8 +307,8 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) } // end namespace -static PackageInfos filterBySelector(EvalState & state, const PackageInfos & allElems, - const Strings & args, bool newestOnly) +static PackageInfos +filterBySelector(EvalState & state, const PackageInfos & allElems, const Strings & args, bool newestOnly) { DrvNames selectors = drvNamesFromArgs(args); if (selectors.empty()) @@ -334,7 +320,7 @@ static PackageInfos filterBySelector(EvalState & state, const PackageInfos & all for (auto & selector : selectors) { std::vector matches; for (const auto & [index, packageInfo] : enumerate(allElems)) { - const auto drvName = DrvName { packageInfo.queryName() }; + const auto drvName = DrvName{packageInfo.queryName()}; if (selector.matches(drvName)) { ++selector.hits; matches.emplace_back(packageInfo, index); @@ -369,16 +355,13 @@ static PackageInfos filterBySelector(EvalState & state, const PackageInfos & all return elems; } - static bool isPath(std::string_view s) { return s.find('/') != std::string_view::npos; } - -static void queryInstSources(EvalState & state, - InstallSourceInfo & instSource, const Strings & args, - PackageInfos & elems, bool newestOnly) +static void queryInstSources( + EvalState & state, InstallSourceInfo & instSource, const Strings & args, PackageInfos & elems, bool newestOnly) { InstallSourceType type = instSource.type; if (type == srcUnknown && args.size() > 0 && isPath(args.front())) @@ -386,98 +369,93 @@ static void queryInstSources(EvalState & state, switch (type) { - /* Get the available user environment elements from the - derivations specified in a Nix expression, including only - those with names matching any of the names in `args'. */ - case srcUnknown: - case srcNixExprDrvs: { + /* Get the available user environment elements from the + derivations specified in a Nix expression, including only + those with names matching any of the names in `args'. */ + case srcUnknown: + case srcNixExprDrvs: { - /* Load the derivations from the (default or specified) - Nix expression. */ - PackageInfos allElems; - loadDerivations(state, *instSource.nixExprPath, - instSource.systemFilter, *instSource.autoArgs, "", allElems); + /* Load the derivations from the (default or specified) + Nix expression. */ + PackageInfos allElems; + loadDerivations(state, *instSource.nixExprPath, instSource.systemFilter, *instSource.autoArgs, "", allElems); - elems = filterBySelector(state, allElems, args, newestOnly); + elems = filterBySelector(state, allElems, args, newestOnly); - break; + break; + } + + /* Get the available user environment elements from the Nix + expressions specified on the command line; these should be + functions that take the default Nix expression file as + argument, e.g., if the file is `./foo.nix', then the + argument `x: x.bar' is equivalent to `(x: x.bar) + (import ./foo.nix)' = `(import ./foo.nix).bar'. */ + case srcNixExprs: { + + Value vArg; + loadSourceExpr(state, *instSource.nixExprPath, vArg); + + for (auto & i : args) { + Expr * eFun = state.parseExprFromString(i, state.rootPath(".")); + Value vFun, vTmp; + state.eval(eFun, vFun); + vTmp.mkApp(&vFun, &vArg); + getDerivations(state, vTmp, "", *instSource.autoArgs, elems, true); } - /* Get the available user environment elements from the Nix - expressions specified on the command line; these should be - functions that take the default Nix expression file as - argument, e.g., if the file is `./foo.nix', then the - argument `x: x.bar' is equivalent to `(x: x.bar) - (import ./foo.nix)' = `(import ./foo.nix).bar'. */ - case srcNixExprs: { + break; + } - Value vArg; - loadSourceExpr(state, *instSource.nixExprPath, vArg); + /* The available user environment elements are specified as a + list of store paths (which may or may not be + derivations). */ + case srcStorePaths: { - for (auto & i : args) { - Expr * eFun = state.parseExprFromString(i, state.rootPath(".")); - Value vFun, vTmp; - state.eval(eFun, vFun); - vTmp.mkApp(&vFun, &vArg); - getDerivations(state, vTmp, "", *instSource.autoArgs, elems, true); - } + for (auto & i : args) { + auto path = state.store->followLinksToStorePath(i); - break; + std::string name(path.name()); + + PackageInfo elem(state, "", nullptr); + elem.setName(name); + + if (path.isDerivation()) { + elem.setDrvPath(path); + auto outputs = state.store->queryDerivationOutputMap(path); + elem.setOutPath(outputs.at("out")); + if (name.size() >= drvExtension.size() + && std::string(name, name.size() - drvExtension.size()) == drvExtension) + name = name.substr(0, name.size() - drvExtension.size()); + } else + elem.setOutPath(path); + + elems.push_back(elem); } - /* The available user environment elements are specified as a - list of store paths (which may or may not be - derivations). */ - case srcStorePaths: { + break; + } - for (auto & i : args) { - auto path = state.store->followLinksToStorePath(i); + /* Get the available user environment elements from another + user environment. These are then filtered as in the + `srcNixExprDrvs' case. */ + case srcProfile: { + elems = filterBySelector(state, queryInstalled(state, instSource.profile), args, newestOnly); + break; + } - std::string name(path.name()); - - PackageInfo elem(state, "", nullptr); - elem.setName(name); - - if (path.isDerivation()) { - elem.setDrvPath(path); - auto outputs = state.store->queryDerivationOutputMap(path); - elem.setOutPath(outputs.at("out")); - if (name.size() >= drvExtension.size() && - std::string(name, name.size() - drvExtension.size()) == drvExtension) - name = name.substr(0, name.size() - drvExtension.size()); - } - else - elem.setOutPath(path); - - elems.push_back(elem); - } - - break; - } - - /* Get the available user environment elements from another - user environment. These are then filtered as in the - `srcNixExprDrvs' case. */ - case srcProfile: { - elems = filterBySelector(state, - queryInstalled(state, instSource.profile), - args, newestOnly); - break; - } - - case srcAttrPath: { - Value vRoot; - loadSourceExpr(state, *instSource.nixExprPath, vRoot); - for (auto & i : args) { - Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot).first); - getDerivations(state, v, "", *instSource.autoArgs, elems, true); - } - break; + case srcAttrPath: { + Value vRoot; + loadSourceExpr(state, *instSource.nixExprPath, vRoot); + for (auto & i : args) { + Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot).first); + getDerivations(state, v, "", *instSource.autoArgs, elems, true); } + break; + } } } - static void printMissing(EvalState & state, PackageInfos & elems) { std::vector targets; @@ -485,34 +463,32 @@ static void printMissing(EvalState & state, PackageInfos & elems) if (auto drvPath = i.queryDrvPath()) { auto path = DerivedPath::Built{ .drvPath = makeConstantStorePathRef(*drvPath), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }; targets.emplace_back(std::move(path)); } else - targets.emplace_back(DerivedPath::Opaque{ - .path = i.queryOutPath(), - }); + targets.emplace_back( + DerivedPath::Opaque{ + .path = i.queryOutPath(), + }); printMissing(state.store, targets); } - static bool keep(PackageInfo & drv) { return drv.queryMetaBool("keep", false); } -static void setMetaFlag(EvalState & state, PackageInfo & drv, - const std::string & name, const std::string & value) +static void setMetaFlag(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & value) { auto v = state.allocValue(); v->mkString(value); drv.setMeta(name, v); } - -static void installDerivations(Globals & globals, - const Strings & args, const Path & profile, std::optional priority) +static void +installDerivations(Globals & globals, const Strings & args, const Path & profile, std::optional priority) { debug("installing derivations"); @@ -554,9 +530,7 @@ static void installDerivations(Globals & globals, for (auto & i : installedElems) { DrvName drvName(i.queryName()); - if (!globals.preserveInstalled && - newNames.find(drvName.name) != newNames.end() && - !keep(i)) + if (!globals.preserveInstalled && newNames.find(drvName.name) != newNames.end() && !keep(i)) printInfo("replacing old '%s'", i.queryName()); else allElems.push_back(i); @@ -568,20 +542,21 @@ static void installDerivations(Globals & globals, printMissing(*globals.state, newElems); - if (globals.dryRun) return; + if (globals.dryRun) + return; - if (createUserEnv(*globals.state, allElems, - profile, settings.envKeepDerivations, lockToken)) break; + if (createUserEnv(*globals.state, allElems, profile, settings.envKeepDerivations, lockToken)) + break; } } - static void opInstall(Globals & globals, Strings opFlags, Strings opArgs) { std::optional priority; - for (Strings::iterator i = opFlags.begin(); i != opFlags.end(); ) { + for (Strings::iterator i = opFlags.begin(); i != opFlags.end();) { auto arg = *i++; - if (parseInstallSourceOptions(globals, i, opFlags, arg)) ; + if (parseInstallSourceOptions(globals, i, opFlags, arg)) + ; else if (arg == "--preserve-installed" || arg == "-P") globals.preserveInstalled = true; else if (arg == "--remove-all" || arg == "-r") @@ -592,19 +567,16 @@ static void opInstall(Globals & globals, Strings opFlags, Strings opArgs) priority = string2Int(*i++); if (!priority) throw UsageError("'--priority' requires an integer argument"); - } - else throw UsageError("unknown flag '%1%'", arg); + } else + throw UsageError("unknown flag '%1%'", arg); } installDerivations(globals, opArgs, globals.profile, priority); } - typedef enum { utLt, utLeq, utEq, utAlways } UpgradeType; - -static void upgradeDerivations(Globals & globals, - const Strings & args, UpgradeType upgradeType) +static void upgradeDerivations(Globals & globals, const Strings & args, UpgradeType upgradeType) { debug("upgrading derivations"); @@ -649,15 +621,13 @@ static void upgradeDerivations(Globals & globals, DrvName newName(j->queryName()); if (newName.name == drvName.name) { std::strong_ordering d = compareVersions(drvName.version, newName.version); - if ((upgradeType == utLt && d < 0) || - (upgradeType == utLeq && d <= 0) || - (upgradeType == utEq && d == 0) || - upgradeType == utAlways) - { + if ((upgradeType == utLt && d < 0) || (upgradeType == utLeq && d <= 0) + || (upgradeType == utEq && d == 0) || upgradeType == utAlways) { std::strong_ordering d2 = std::strong_ordering::less; if (bestElem != availElems.end()) { d2 = comparePriorities(*globals.state, *bestElem, *j); - if (d2 == 0) d2 = compareVersions(bestVersion, newName.version); + if (d2 == 0) + d2 = compareVersions(bestVersion, newName.version); } if (d2 < 0 && (!globals.prebuiltOnly || isPrebuilt(*globals.state, *j))) { bestElem = j; @@ -667,16 +637,13 @@ static void upgradeDerivations(Globals & globals, } } - if (bestElem != availElems.end() && - i.queryOutPath() != - bestElem->queryOutPath()) - { - const char * action = compareVersions(drvName.version, bestVersion) <= 0 - ? "upgrading" : "downgrading"; - printInfo("%1% '%2%' to '%3%'", - action, i.queryName(), bestElem->queryName()); + if (bestElem != availElems.end() && i.queryOutPath() != bestElem->queryOutPath()) { + const char * action = + compareVersions(drvName.version, bestVersion) <= 0 ? "upgrading" : "downgrading"; + printInfo("%1% '%2%' to '%3%'", action, i.queryName(), bestElem->queryName()); newElems.push_back(*bestElem); - } else newElems.push_back(i); + } else + newElems.push_back(i); } catch (Error & e) { e.addTrace(nullptr, "while trying to find an upgrade for '%s'", i.queryName()); @@ -686,31 +653,36 @@ static void upgradeDerivations(Globals & globals, printMissing(*globals.state, newElems); - if (globals.dryRun) return; + if (globals.dryRun) + return; - if (createUserEnv(*globals.state, newElems, - globals.profile, settings.envKeepDerivations, lockToken)) break; + if (createUserEnv(*globals.state, newElems, globals.profile, settings.envKeepDerivations, lockToken)) + break; } } - static void opUpgrade(Globals & globals, Strings opFlags, Strings opArgs) { UpgradeType upgradeType = utLt; - for (Strings::iterator i = opFlags.begin(); i != opFlags.end(); ) { + for (Strings::iterator i = opFlags.begin(); i != opFlags.end();) { std::string arg = *i++; - if (parseInstallSourceOptions(globals, i, opFlags, arg)) ; - else if (arg == "--lt") upgradeType = utLt; - else if (arg == "--leq") upgradeType = utLeq; - else if (arg == "--eq") upgradeType = utEq; - else if (arg == "--always") upgradeType = utAlways; - else throw UsageError("unknown flag '%1%'", arg); + if (parseInstallSourceOptions(globals, i, opFlags, arg)) + ; + else if (arg == "--lt") + upgradeType = utLt; + else if (arg == "--leq") + upgradeType = utLeq; + else if (arg == "--eq") + upgradeType = utEq; + else if (arg == "--always") + upgradeType = utAlways; + else + throw UsageError("unknown flag '%1%'", arg); } upgradeDerivations(globals, opArgs, upgradeType); } - static void opSetFlag(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -743,21 +715,23 @@ static void opSetFlag(Globals & globals, Strings opFlags, Strings opArgs) checkSelectorUse(selectors); /* Write the new user environment. */ - if (createUserEnv(*globals.state, installedElems, - globals.profile, settings.envKeepDerivations, lockToken)) break; + if (createUserEnv(*globals.state, installedElems, globals.profile, settings.envKeepDerivations, lockToken)) + break; } } - static void opSet(Globals & globals, Strings opFlags, Strings opArgs) { auto store2 = globals.state->store.dynamic_pointer_cast(); - if (!store2) throw Error("--set is not supported for this Nix store"); + if (!store2) + throw Error("--set is not supported for this Nix store"); - for (Strings::iterator i = opFlags.begin(); i != opFlags.end(); ) { + for (Strings::iterator i = opFlags.begin(); i != opFlags.end();) { std::string arg = *i++; - if (parseInstallSourceOptions(globals, i, opFlags, arg)) ; - else throw UsageError("unknown flag '%1%'", arg); + if (parseInstallSourceOptions(globals, i, opFlags, arg)) + ; + else + throw UsageError("unknown flag '%1%'", arg); } PackageInfos elems; @@ -772,31 +746,26 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) drv.setName(globals.forceName); auto drvPath = drv.queryDrvPath(); - std::vector paths { - drvPath - ? (DerivedPath) (DerivedPath::Built { - .drvPath = makeConstantStorePathRef(*drvPath), - .outputs = OutputsSpec::All { }, - }) - : (DerivedPath) (DerivedPath::Opaque { - .path = drv.queryOutPath(), - }), + std::vector paths{ + drvPath ? (DerivedPath) (DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All{}, + }) + : (DerivedPath) (DerivedPath::Opaque{ + .path = drv.queryOutPath(), + }), }; printMissing(globals.state->store, paths); - if (globals.dryRun) return; + if (globals.dryRun) + return; globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal); debug("switching to new user environment"); - Path generation = createGeneration( - *store2, - globals.profile, - drv.queryOutPath()); + Path generation = createGeneration(*store2, globals.profile, drv.queryOutPath()); switchLink(globals.profile, generation); } - -static void uninstallDerivations(Globals & globals, Strings & selectors, - Path & profile) +static void uninstallDerivations(Globals & globals, Strings & selectors, Path & profile) { while (true) { auto lockToken = optimisticLockProfile(profile); @@ -808,20 +777,15 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, if (isPath(selector)) { StorePath selectorStorePath = globals.state->store->followLinksToStorePath(selector); split = std::partition( - workingElems.begin(), workingElems.end(), - [&selectorStorePath, globals](auto &elem) { + workingElems.begin(), workingElems.end(), [&selectorStorePath, globals](auto & elem) { return selectorStorePath != elem.queryOutPath(); - } - ); + }); } else { DrvName selectorName(selector); - split = std::partition( - workingElems.begin(), workingElems.end(), - [&selectorName](auto &elem){ - DrvName elemName(elem.queryName()); - return !selectorName.matches(elemName); - } - ); + split = std::partition(workingElems.begin(), workingElems.end(), [&selectorName](auto & elem) { + DrvName elemName(elem.queryName()); + return !selectorName.matches(elemName); + }); } if (split == workingElems.end()) warn("selector '%s' matched no installed derivations", selector); @@ -831,14 +795,14 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, workingElems.erase(split, workingElems.end()); } - if (globals.dryRun) return; + if (globals.dryRun) + return; - if (createUserEnv(*globals.state, workingElems, - profile, settings.envKeepDerivations, lockToken)) break; + if (createUserEnv(*globals.state, workingElems, profile, settings.envKeepDerivations, lockToken)) + break; } } - static void opUninstall(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -846,26 +810,20 @@ static void opUninstall(Globals & globals, Strings opFlags, Strings opArgs) uninstallDerivations(globals, opArgs, globals.profile); } - static bool cmpChars(char a, char b) { return toupper(a) < toupper(b); } - static bool cmpElemByName(const PackageInfo & a, const PackageInfo & b) { auto a_name = a.queryName(); auto b_name = b.queryName(); - return lexicographical_compare( - a_name.begin(), a_name.end(), - b_name.begin(), b_name.end(), cmpChars); + return lexicographical_compare(a_name.begin(), a_name.end(), b_name.begin(), b_name.end(), cmpChars); } - typedef std::list Table; - void printTable(Table & table) { auto nrColumns = table.size() > 0 ? table.front().size() : 0; @@ -878,7 +836,8 @@ void printTable(Table & table) Strings::iterator j; size_t column; for (j = i.begin(), column = 0; j != i.end(); ++j, ++column) - if (j->size() > widths[column]) widths[column] = j->size(); + if (j->size() > widths[column]) + widths[column] = j->size(); } for (auto & i : table) { @@ -895,7 +854,6 @@ void printTable(Table & table) } } - /* This function compares the version of an element against the versions in the given set of elements. `cvLess' means that only lower versions are in the set, `cvEqual' means that at most an @@ -905,8 +863,7 @@ void printTable(Table & table) typedef enum { cvLess, cvEqual, cvGreater, cvUnavail } VersionDiff; -static VersionDiff compareVersionAgainstSet( - const PackageInfo & elem, const PackageInfos & elems, std::string & version) +static VersionDiff compareVersionAgainstSet(const PackageInfo & elem, const PackageInfos & elems, std::string & version) { DrvName name(elem.queryName()); @@ -920,12 +877,10 @@ static VersionDiff compareVersionAgainstSet( if (d < 0) { diff = cvGreater; version = name2.version; - } - else if (diff != cvGreater && d == 0) { + } else if (diff != cvGreater && d == 0) { diff = cvEqual; version = name2.version; - } - else if (diff != cvGreater && diff != cvEqual && d > 0) { + } else if (diff != cvGreater && diff != cvEqual && d > 0) { diff = cvLess; if (version == "" || compareVersions(version, name2.version) < 0) version = name2.version; @@ -936,18 +891,18 @@ static VersionDiff compareVersionAgainstSet( return diff; } - -static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) +static void +queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) { using nlohmann::json; json topObj = json::object(); for (auto & i : elems) { try { - if (i.hasFailed()) continue; - + if (i.hasFailed()) + continue; auto drvName = DrvName(i.queryName()); - json &pkgObj = topObj[i.attrPath]; + json & pkgObj = topObj[i.attrPath]; pkgObj = { {"name", drvName.fullName}, {"pname", drvName.name}, @@ -958,7 +913,7 @@ static void queryJSON(Globals & globals, std::vector & elems, bool { PackageInfo::Outputs outputs = i.queryOutputs(printOutPath); - json &outputObj = pkgObj["outputs"]; + json & outputObj = pkgObj["outputs"]; outputObj = json::object(); for (auto & j : outputs) { if (j.second) @@ -970,11 +925,12 @@ static void queryJSON(Globals & globals, std::vector & elems, bool if (printDrvPath) { auto drvPath = i.queryDrvPath(); - if (drvPath) pkgObj["drvPath"] = globals.state->store->printStorePath(*drvPath); + if (drvPath) + pkgObj["drvPath"] = globals.state->store->printStorePath(*drvPath); } if (printMeta) { - json &metaObj = pkgObj["meta"]; + json & metaObj = pkgObj["meta"]; metaObj = json::object(); StringSet metaNames = i.queryMetaNames(); for (auto & j : metaNames) { @@ -998,10 +954,9 @@ static void queryJSON(Globals & globals, std::vector & elems, bool std::cout << topObj.dump(2); } - static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) { - auto & store { *globals.state->store }; + auto & store{*globals.state->store}; Strings remaining; std::string attrPath; @@ -1022,21 +977,34 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) settings.readOnlyMode = true; /* makes evaluation a bit faster */ - for (Strings::iterator i = opFlags.begin(); i != opFlags.end(); ) { + for (Strings::iterator i = opFlags.begin(); i != opFlags.end();) { auto arg = *i++; - if (arg == "--status" || arg == "-s") printStatus = true; - else if (arg == "--no-name") printName = false; - else if (arg == "--system") printSystem = true; - else if (arg == "--description") printDescription = true; - else if (arg == "--compare-versions" || arg == "-c") compareVersions = true; - else if (arg == "--drv-path") printDrvPath = true; - else if (arg == "--out-path") printOutPath = true; - else if (arg == "--meta") printMeta = true; - else if (arg == "--installed") source = sInstalled; - else if (arg == "--available" || arg == "-a") source = sAvailable; - else if (arg == "--xml") xmlOutput = true; - else if (arg == "--json") jsonOutput = true; - else if (arg == "--attr-path" || arg == "-P") printAttrPath = true; + if (arg == "--status" || arg == "-s") + printStatus = true; + else if (arg == "--no-name") + printName = false; + else if (arg == "--system") + printSystem = true; + else if (arg == "--description") + printDescription = true; + else if (arg == "--compare-versions" || arg == "-c") + compareVersions = true; + else if (arg == "--drv-path") + printDrvPath = true; + else if (arg == "--out-path") + printOutPath = true; + else if (arg == "--meta") + printMeta = true; + else if (arg == "--installed") + source = sInstalled; + else if (arg == "--available" || arg == "-a") + source = sAvailable; + else if (arg == "--xml") + xmlOutput = true; + else if (arg == "--json") + jsonOutput = true; + else if (arg == "--attr-path" || arg == "-P") + printAttrPath = true; else if (arg == "--attr" || arg == "-A") attrPath = needArg(i, opFlags, arg); else @@ -1053,24 +1021,26 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) installedElems = queryInstalled(*globals.state, globals.profile); if (source == sAvailable || compareVersions) - loadDerivations(*globals.state, *globals.instSource.nixExprPath, - globals.instSource.systemFilter, *globals.instSource.autoArgs, - attrPath, availElems); + loadDerivations( + *globals.state, + *globals.instSource.nixExprPath, + globals.instSource.systemFilter, + *globals.instSource.autoArgs, + attrPath, + availElems); - PackageInfos elems_ = filterBySelector(*globals.state, - source == sInstalled ? installedElems : availElems, - opArgs, false); + PackageInfos elems_ = + filterBySelector(*globals.state, source == sInstalled ? installedElems : availElems, opArgs, false); PackageInfos & otherElems(source == sInstalled ? availElems : installedElems); - /* Sort them by name. */ /* !!! */ std::vector elems; - for (auto & i : elems_) elems.push_back(i); + for (auto & i : elems_) + elems.push_back(i); sort(elems.begin(), elems.end(), cmpElemByName); - /* We only need to know the installed paths when we are querying the status of the derivation. */ StorePathSet installed; /* installed paths */ @@ -1079,7 +1049,6 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) for (auto & i : installedElems) installed.insert(i.queryOutPath()); - /* Query which paths have substitutes. */ StorePathSet validPaths; StorePathSet substitutablePaths; @@ -1089,14 +1058,14 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) try { paths.insert(i.queryOutPath()); } catch (AssertionError & e) { - printMsg(lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); + printMsg( + lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); i.setFailed(); } validPaths = store.queryValidPaths(paths); substitutablePaths = store.querySubstitutablePaths(paths); } - /* Print the desired columns, or XML output. */ if (jsonOutput) { queryJSON(globals, elems, printOutPath, printDrvPath, printMeta); @@ -1114,13 +1083,13 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) for (auto & i : elems) { try { - if (i.hasFailed()) continue; + if (i.hasFailed()) + continue; - //Activity act(*logger, lvlDebug, "outputting query result '%1%'", i.attrPath); + // Activity act(*logger, lvlDebug, "outputting query result '%1%'", i.attrPath); - if (globals.prebuiltOnly && - !validPaths.count(i.queryOutPath()) && - !substitutablePaths.count(i.queryOutPath())) + if (globals.prebuiltOnly && !validPaths.count(i.queryOutPath()) + && !substitutablePaths.count(i.queryOutPath())) continue; /* For table output. */ @@ -1140,9 +1109,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs["substitutable"] = hasSubs ? "1" : "0"; } else columns.push_back( - (std::string) (isInstalled ? "I" : "-") - + (isValid ? "P" : "-") - + (hasSubs ? "S" : "-")); + (std::string) (isInstalled ? "I" : "-") + (isValid ? "P" : "-") + (hasSubs ? "S" : "-")); } if (xmlOutput) @@ -1169,11 +1136,20 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) char ch; switch (diff) { - case cvLess: ch = '>'; break; - case cvEqual: ch = '='; break; - case cvGreater: ch = '<'; break; - case cvUnavail: ch = '-'; break; - default: unreachable(); + case cvLess: + ch = '>'; + break; + case cvEqual: + ch = '='; + break; + case cvGreater: + ch = '<'; + break; + case cvUnavail: + ch = '-'; + break; + default: + unreachable(); } if (xmlOutput) { @@ -1190,15 +1166,16 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) } if (xmlOutput) { - if (i.querySystem() != "") attrs["system"] = i.querySystem(); - } - else if (printSystem) + if (i.querySystem() != "") + attrs["system"] = i.querySystem(); + } else if (printSystem) columns.push_back(i.querySystem()); if (printDrvPath) { auto drvPath = i.queryDrvPath(); if (xmlOutput) { - if (drvPath) attrs["drvPath"] = store.printStorePath(*drvPath); + if (drvPath) + attrs["drvPath"] = store.printStorePath(*drvPath); } else columns.push_back(drvPath ? store.printStorePath(*drvPath) : "-"); } @@ -1210,8 +1187,12 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) PackageInfo::Outputs outputs = i.queryOutputs(); std::string s; for (auto & j : outputs) { - if (!s.empty()) s += ';'; - if (j.first != "out") { s += j.first; s += "="; } + if (!s.empty()) + s += ';'; + if (j.first != "out") { + s += j.first; + s += "="; + } s += store.printStorePath(*j.second); } columns.push_back(s); @@ -1220,7 +1201,8 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (printDescription) { auto descr = i.queryMetaString("description"); if (xmlOutput) { - if (descr != "") attrs["description"] = descr; + if (descr != "") + attrs["description"] = descr; } else columns.push_back(descr); } @@ -1242,9 +1224,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs2["name"] = j; Value * v = i.queryMeta(j); if (!v) - printError( - "derivation '%s' has invalid meta attribute '%s'", - i.queryName(), j); + printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j); else { if (v->type() == nString) { attrs2["type"] = "string"; @@ -1266,7 +1246,8 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs2["type"] = "strings"; XMLOpenElement m(xml, "meta", attrs2); for (auto elem : v->listView()) { - if (elem->type() != nString) continue; + if (elem->type() != nString) + continue; XMLAttrs attrs3; attrs3["value"] = elem->c_str(); xml.writeEmptyElement("string", attrs3); @@ -1275,12 +1256,13 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs2["type"] = "strings"; XMLOpenElement m(xml, "meta", attrs2); for (auto & i : *v->attrs()) { - if (i.value->type() != nString) continue; + if (i.value->type() != nString) + continue; XMLAttrs attrs3; attrs3["type"] = globals.state->symbols[i.name]; attrs3["value"] = i.value->c_str(); xml.writeEmptyElement("string", attrs3); - } + } } } } @@ -1298,10 +1280,10 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) } } - if (!xmlOutput) printTable(table); + if (!xmlOutput) + printTable(table); } - static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -1315,7 +1297,6 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs) switchLink(profileLink, profile); } - static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -1329,7 +1310,6 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg throw UsageError("expected a generation number"); } - static void opRollback(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -1340,7 +1320,6 @@ static void opRollback(Globals & globals, Strings opFlags, Strings opArgs) switchGeneration(globals.profile, {}, globals.dryRun); } - static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -1366,15 +1345,19 @@ static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs if (!localtime_r(&i.creationTime, &t)) throw Error("cannot convert time"); #endif - logger->cout("%|4| %|4|-%|02|-%|02| %|02|:%|02|:%|02| %||", + logger->cout( + "%|4| %|4|-%|02|-%|02| %|02|:%|02|:%|02| %||", i.number, - t.tm_year + 1900, t.tm_mon + 1, t.tm_mday, - t.tm_hour, t.tm_min, t.tm_sec, + t.tm_year + 1900, + t.tm_mon + 1, + t.tm_mday, + t.tm_hour, + t.tm_min, + t.tm_sec, i.number == curGen ? "(current)" : ""); } } - static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opArgs) { if (opFlags.size() > 0) @@ -1405,14 +1388,12 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr } } - static void opVersion(Globals & globals, Strings opFlags, Strings opArgs) { printVersion("nix-env"); } - -static int main_nix_env(int argc, char * * argv) +static int main_nix_env(int argc, char ** argv) { { Strings opFlags, opArgs; @@ -1431,14 +1412,11 @@ static int main_nix_env(int argc, char * * argv) if (!pathExists(nixExprPath)) { try { createDirs(nixExprPath); - replaceSymlink( - defaultChannelsDir(), - nixExprPath + "/channels"); + replaceSymlink(defaultChannelsDir(), nixExprPath + "/channels"); if (!isRootUser()) - replaceSymlink( - rootChannelsDir(), - nixExprPath + "/channels_root"); - } catch (Error &) { } + replaceSymlink(rootChannelsDir(), nixExprPath + "/channels_root"); + } catch (Error &) { + } } globals.dryRun = false; @@ -1461,70 +1439,56 @@ static int main_nix_env(int argc, char * * argv) else if (*arg == "--install" || *arg == "-i") { op = opInstall; opName = "-install"; - } - else if (*arg == "--force-name") // undocumented flag for nix-install-package + } else if (*arg == "--force-name") // undocumented flag for nix-install-package globals.forceName = getArg(*arg, arg, end); else if (*arg == "--uninstall" || *arg == "-e") { op = opUninstall; opName = "-uninstall"; - } - else if (*arg == "--upgrade" || *arg == "-u") { + } else if (*arg == "--upgrade" || *arg == "-u") { op = opUpgrade; opName = "-upgrade"; - } - else if (*arg == "--set-flag") { + } else if (*arg == "--set-flag") { op = opSetFlag; opName = arg->substr(1); - } - else if (*arg == "--set") { + } else if (*arg == "--set") { op = opSet; opName = arg->substr(1); - } - else if (*arg == "--query" || *arg == "-q") { + } else if (*arg == "--query" || *arg == "-q") { op = opQuery; opName = "-query"; - } - else if (*arg == "--profile" || *arg == "-p") + } else if (*arg == "--profile" || *arg == "-p") globals.profile = absPath(getArg(*arg, arg, end)); else if (*arg == "--file" || *arg == "-f") file = getArg(*arg, arg, end); else if (*arg == "--switch-profile" || *arg == "-S") { op = opSwitchProfile; opName = "-switch-profile"; - } - else if (*arg == "--switch-generation" || *arg == "-G") { + } else if (*arg == "--switch-generation" || *arg == "-G") { op = opSwitchGeneration; opName = "-switch-generation"; - } - else if (*arg == "--rollback") { + } else if (*arg == "--rollback") { op = opRollback; opName = arg->substr(1); - } - else if (*arg == "--list-generations") { + } else if (*arg == "--list-generations") { op = opListGenerations; opName = arg->substr(1); - } - else if (*arg == "--delete-generations") { + } else if (*arg == "--delete-generations") { op = opDeleteGenerations; opName = arg->substr(1); - } - else if (*arg == "--dry-run") { + } else if (*arg == "--dry-run") { printInfo("(dry run; not doing anything)"); globals.dryRun = true; - } - else if (*arg == "--system-filter") + } else if (*arg == "--system-filter") globals.instSource.systemFilter = getArg(*arg, arg, end); else if (*arg == "--prebuilt-only" || *arg == "-b") globals.prebuiltOnly = true; else if (*arg != "" && arg->at(0) == '-') { opFlags.push_back(*arg); /* FIXME: hacky */ - if (*arg == "--from-profile" || - (op == opQuery && (*arg == "--attr" || *arg == "-A")) || - (op == opInstall && (*arg == "--priority"))) + if (*arg == "--from-profile" || (op == opQuery && (*arg == "--attr" || *arg == "-A")) + || (op == opInstall && (*arg == "--priority"))) opFlags.push_back(getArg(*arg, arg, end)); - } - else + } else opArgs.push_back(*arg); if (oldOp && oldOp != op) @@ -1535,18 +1499,19 @@ static int main_nix_env(int argc, char * * argv) myArgs.parseCmdline(argvToStrings(argc, argv)); - if (showHelp) showManPage("nix-env" + opName); - if (!op) throw UsageError("no operation specified"); + if (showHelp) + showManPage("nix-env" + opName); + if (!op) + throw UsageError("no operation specified"); auto store = openStore(); - globals.state = std::shared_ptr(new EvalState(myArgs.lookupPath, store, fetchSettings, evalSettings)); + globals.state = + std::shared_ptr(new EvalState(myArgs.lookupPath, store, fetchSettings, evalSettings)); globals.state->repair = myArgs.repair; globals.instSource.nixExprPath = std::make_shared( - file != "" - ? lookupFileArg(*globals.state, file) - : globals.state->rootPath(CanonPath(nixExprPath))); + file != "" ? lookupFileArg(*globals.state, file) : globals.state->rootPath(CanonPath(nixExprPath))); globals.instSource.autoArgs = myArgs.getAutoArgs(*globals.state); diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index e149b6aeb..1b6e552f7 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -15,7 +15,6 @@ namespace nix { - PackageInfos queryInstalled(EvalState & state, const Path & userEnv) { PackageInfos elems; @@ -31,10 +30,8 @@ PackageInfos queryInstalled(EvalState & state, const Path & userEnv) return elems; } - -bool createUserEnv(EvalState & state, PackageInfos & elems, - const Path & profile, bool keepDerivations, - const std::string & lockToken) +bool createUserEnv( + EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken) { /* Build the components in the user environment, if they don't exist already. */ @@ -44,9 +41,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, drvsToBuild.push_back({*drvPath}); debug("building user environment dependencies"); - state.store->buildPaths( - toDerivedPaths(drvsToBuild), - state.repair ? bmRepair : bmNormal); + state.store->buildPaths(toDerivedPaths(drvsToBuild), state.repair ? bmRepair : bmNormal); /* Construct the whole top level derivation. */ StorePathSet references; @@ -91,7 +86,8 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, auto meta = state.buildBindings(metaNames.size()); for (auto & j : metaNames) { Value * v = i.queryMeta(j); - if (!v) continue; + if (!v) + continue; meta.insert(state.symbols.create(j), v); } @@ -99,7 +95,8 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, (list[n] = state.allocValue())->mkAttrs(attrs); - if (drvPath) references.insert(*drvPath); + if (drvPath) + references.insert(*drvPath); } Value manifest; @@ -111,16 +108,23 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, auto manifestFile = ({ std::ostringstream str; printAmbiguous(manifest, state.symbols, str, nullptr, std::numeric_limits::max()); - StringSource source { toView(str) }; + StringSource source{toView(str)}; state.store->addToStoreFromDump( - source, "env-manifest.nix", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references); + source, + "env-manifest.nix", + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + references); }); /* Get the environment builder expression. */ Value envBuilder; - state.eval(state.parseExprFromString( - #include "buildenv.nix.gen.hh" - , state.rootPath(CanonPath::root)), envBuilder); + state.eval( + state.parseExprFromString( +#include "buildenv.nix.gen.hh" + , state.rootPath(CanonPath::root)), + envBuilder); /* Construct a Nix expression that calls the user environment builder with the manifest as argument. */ @@ -147,9 +151,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, debug("building user environment"); std::vector topLevelDrvs; topLevelDrvs.push_back({topLevelDrv}); - state.store->buildPaths( - toDerivedPaths(topLevelDrvs), - state.repair ? bmRepair : bmNormal); + state.store->buildPaths(toDerivedPaths(topLevelDrvs), state.repair ? bmRepair : bmNormal); /* Switch the current user environment to the output path. */ auto store2 = state.store.dynamic_pointer_cast(); @@ -172,5 +174,4 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, return true; } - -} +} // namespace nix diff --git a/src/nix-env/user-env.hh b/src/nix-env/user-env.hh index 0a19b8f32..abe25af65 100644 --- a/src/nix-env/user-env.hh +++ b/src/nix-env/user-env.hh @@ -7,8 +7,7 @@ namespace nix { PackageInfos queryInstalled(EvalState & state, const Path & userEnv); -bool createUserEnv(EvalState & state, PackageInfos & elems, - const Path & profile, bool keepDerivations, - const std::string & lockToken); +bool createUserEnv( + EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken); -} +} // namespace nix diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index f7b218efc..3d5c3e26a 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -17,19 +17,23 @@ #include #include - using namespace nix; - static Path gcRoot; static int rootNr = 0; - enum OutputKind { okPlain, okRaw, okXML, okJSON }; -void processExpr(EvalState & state, const Strings & attrPaths, - bool parseOnly, bool strict, Bindings & autoArgs, - bool evalOnly, OutputKind output, bool location, Expr * e) +void processExpr( + EvalState & state, + const Strings & attrPaths, + bool parseOnly, + bool strict, + Bindings & autoArgs, + bool evalOnly, + OutputKind output, + bool location, + Expr * e) { if (parseOnly) { e->show(state.symbols, std::cout); @@ -53,15 +57,16 @@ void processExpr(EvalState & state, const Strings & attrPaths, state.autoCallFunction(autoArgs, v, vRes); if (output == okRaw) std::cout << *state.coerceToString(noPos, vRes, context, "while generating the nix-instantiate output"); - // We intentionally don't output a newline here. The default PS1 for Bash in NixOS starts with a newline - // and other interactive shells like Zsh are smart enough to print a missing newline before the prompt. + // We intentionally don't output a newline here. The default PS1 for Bash in NixOS starts with a newline + // and other interactive shells like Zsh are smart enough to print a missing newline before the prompt. else if (output == okXML) printValueAsXML(state, strict, location, vRes, std::cout, context, noPos); else if (output == okJSON) { printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context); std::cout << std::endl; } else { - if (strict) state.forceValueDeep(vRes); + if (strict) + state.forceValueDeep(vRes); std::set seen; printAmbiguous(vRes, state.symbols, std::cout, &seen, std::numeric_limits::max()); std::cout << std::endl; @@ -82,7 +87,8 @@ void processExpr(EvalState & state, const Strings & attrPaths, printGCWarning(); else { Path rootName = absPath(gcRoot); - if (++rootNr > 1) rootName += "-" + std::to_string(rootNr); + if (++rootNr > 1) + rootName += "-" + std::to_string(rootNr); auto store2 = state.store.dynamic_pointer_cast(); if (store2) drvPathS = store2->addPermRoot(drvPath, rootName); @@ -93,8 +99,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, } } - -static int main_nix_instantiate(int argc, char * * argv) +static int main_nix_instantiate(int argc, char ** argv) { { Strings files; @@ -169,7 +174,8 @@ static int main_nix_instantiate(int argc, char * * argv) Bindings & autoArgs = *myArgs.getAutoArgs(*state); - if (attrPaths.empty()) attrPaths = {""}; + if (attrPaths.empty()) + attrPaths = {""}; if (findFile) { for (auto & i : files) { @@ -184,17 +190,16 @@ static int main_nix_instantiate(int argc, char * * argv) if (readStdin) { Expr * e = state->parseStdin(); - processExpr(*state, attrPaths, parseOnly, strict, autoArgs, - evalOnly, outputKind, xmlOutputSourceLocation, e); + processExpr( + *state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } else if (files.empty() && !fromArgs) files.push_back("./default.nix"); for (auto & i : files) { - Expr * e = fromArgs - ? state->parseExprFromString(i, state->rootPath(".")) - : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); - processExpr(*state, attrPaths, parseOnly, strict, autoArgs, - evalOnly, outputKind, xmlOutputSourceLocation, e); + Expr * e = fromArgs ? state->parseExprFromString(i, state->rootPath(".")) + : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); + processExpr( + *state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } state->maybePrintStats(); diff --git a/src/nix-store/dotgraph.cc b/src/nix-store/dotgraph.cc index f8054b554..e2963b4bb 100644 --- a/src/nix-store/dotgraph.cc +++ b/src/nix-store/dotgraph.cc @@ -3,44 +3,37 @@ #include - using std::cout; namespace nix { - static std::string dotQuote(std::string_view s) { return "\"" + std::string(s) + "\""; } - static const std::string & nextColour() { static int n = 0; - static std::vector colours - { "black", "red", "green", "blue" - , "magenta", "burlywood" }; + static std::vector colours{"black", "red", "green", "blue", "magenta", "burlywood"}; return colours[n++ % colours.size()]; } - static std::string makeEdge(std::string_view src, std::string_view dst) { - return fmt("%1% -> %2% [color = %3%];\n", - dotQuote(src), dotQuote(dst), dotQuote(nextColour())); + return fmt("%1% -> %2% [color = %3%];\n", dotQuote(src), dotQuote(dst), dotQuote(nextColour())); } - -static std::string makeNode(std::string_view id, std::string_view label, - std::string_view colour) +static std::string makeNode(std::string_view id, std::string_view label, std::string_view colour) { - return fmt("%1% [label = %2%, shape = box, " + return fmt( + "%1% [label = %2%, shape = box, " "style = filled, fillcolor = %3%];\n", - dotQuote(id), dotQuote(label), dotQuote(colour)); + dotQuote(id), + dotQuote(label), + dotQuote(colour)); } - void printDotGraph(ref store, StorePathSet && roots) { StorePathSet workList(std::move(roots)); @@ -51,7 +44,8 @@ void printDotGraph(ref store, StorePathSet && roots) while (!workList.empty()) { auto path = std::move(workList.extract(workList.begin()).value()); - if (!doneSet.insert(path).second) continue; + if (!doneSet.insert(path).second) + continue; cout << makeNode(std::string(path.to_string()), path.name(), "#ff0000"); @@ -66,5 +60,4 @@ void printDotGraph(ref store, StorePathSet && roots) cout << "}\n"; } - -} +} // namespace nix diff --git a/src/nix-store/graphml.cc b/src/nix-store/graphml.cc index 3b3188a41..009db05d4 100644 --- a/src/nix-store/graphml.cc +++ b/src/nix-store/graphml.cc @@ -4,12 +4,10 @@ #include - using std::cout; namespace nix { - static inline std::string_view xmlQuote(std::string_view s) { // Luckily, store paths shouldn't contain any character that needs to be @@ -17,20 +15,16 @@ static inline std::string_view xmlQuote(std::string_view s) return s; } - static std::string symbolicName(std::string_view p) { return std::string(p.substr(0, p.find('-') + 1)); } - static std::string makeEdge(std::string_view src, std::string_view dst) { - return fmt(" \n", - xmlQuote(src), xmlQuote(dst)); + return fmt(" \n", xmlQuote(src), xmlQuote(dst)); } - static std::string makeNode(const ValidPathInfo & info) { return fmt( @@ -45,7 +39,6 @@ static std::string makeNode(const ValidPathInfo & info) (info.path.isDerivation() ? "derivation" : "output-path")); } - void printGraphML(ref store, StorePathSet && roots) { StorePathSet workList(std::move(roots)); @@ -65,7 +58,8 @@ void printGraphML(ref store, StorePathSet && roots) auto path = std::move(workList.extract(workList.begin()).value()); ret = doneSet.insert(path); - if (ret.second == false) continue; + if (ret.second == false) + continue; auto info = store->queryPathInfo(path); cout << makeNode(*info); @@ -76,12 +70,10 @@ void printGraphML(ref store, StorePathSet && roots) cout << makeEdge(path.to_string(), p.to_string()); } } - } cout << "\n"; cout << "\n"; } - -} +} // namespace nix diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 3da7a8ac1..5ada44949 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -16,9 +16,9 @@ #include "man-pages.hh" #ifndef _WIN32 // TODO implement on Windows or provide allowed-to-noop interface -# include "nix/store/local-store.hh" -# include "nix/util/monitor-fd.hh" -# include "nix/store/posix-fs-canonicalise.hh" +# include "nix/store/local-store.hh" +# include "nix/util/monitor-fd.hh" +# include "nix/store/posix-fs-canonicalise.hh" #endif #include @@ -34,41 +34,37 @@ namespace nix_store { - using namespace nix; using std::cin; using std::cout; - -typedef void (* Operation) (Strings opFlags, Strings opArgs); - +typedef void (*Operation)(Strings opFlags, Strings opArgs); static Path gcRoot; static int rootNr = 0; static bool noOutput = false; static std::shared_ptr store; - #ifndef _WIN32 // TODO reenable on Windows once we have `LocalStore` there ref ensureLocalStore() { auto store2 = std::dynamic_pointer_cast(store); - if (!store2) throw Error("you don't have sufficient rights to use this command"); + if (!store2) + throw Error("you don't have sufficient rights to use this command"); return ref(store2); } #endif - static StorePath useDeriver(const StorePath & path) { - if (path.isDerivation()) return path; + if (path.isDerivation()) + return path; auto info = store->queryPathInfo(path); if (!info->deriver) throw Error("deriver of path '%s' is not known", store->printStorePath(path)); return *info->deriver; } - /* Realise the given path. For a derivation that means build it; for other paths it means ensure their validity. */ static PathSet realisePath(StorePathWithOutputs path, bool build = true) @@ -76,22 +72,23 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) auto store2 = std::dynamic_pointer_cast(store); if (path.path.isDerivation()) { - if (build) store->buildPaths({path.toDerivedPath()}); + if (build) + store->buildPaths({path.toDerivedPath()}); auto outputPaths = store->queryDerivationOutputMap(path.path); Derivation drv = store->derivationFromPath(path.path); rootNr++; /* FIXME: Encode this empty special case explicitly in the type. */ if (path.outputs.empty()) - for (auto & i : drv.outputs) path.outputs.insert(i.first); + for (auto & i : drv.outputs) + path.outputs.insert(i.first); PathSet outputs; for (auto & j : path.outputs) { /* Match outputs of a store path with outputs of the derivation that produces it. */ DerivationOutputs::iterator i = drv.outputs.find(j); if (i == drv.outputs.end()) - throw Error("derivation '%s' does not have an output named '%s'", - store2->printStorePath(path.path), j); + throw Error("derivation '%s' does not have an output named '%s'", store2->printStorePath(path.path), j); auto outPath = outputPaths.at(i->first); auto retPath = store->printStorePath(outPath); if (store2) { @@ -99,8 +96,10 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) printGCWarning(); else { Path rootName = gcRoot; - if (rootNr > 1) rootName += "-" + std::to_string(rootNr); - if (i->first != "out") rootName += "-" + i->first; + if (rootNr > 1) + rootName += "-" + std::to_string(rootNr); + if (i->first != "out") + rootName += "-" + i->first; retPath = store2->addPermRoot(outPath, rootName); } } @@ -110,7 +109,8 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) } else { - if (build) store->ensurePath(path.path); + if (build) + store->ensurePath(path.path); else if (!store->isValidPath(path.path)) throw Error("path '%s' does not exist and cannot be created", store->printStorePath(path.path)); if (store2) { @@ -119,7 +119,8 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) else { Path rootName = gcRoot; rootNr++; - if (rootNr > 1) rootName += "-" + std::to_string(rootNr); + if (rootNr > 1) + rootName += "-" + std::to_string(rootNr); return {store2->addPermRoot(path.path, rootName)}; } } @@ -127,7 +128,6 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) } } - /* Realise the given paths. */ static void opRealise(Strings opFlags, Strings opArgs) { @@ -136,11 +136,16 @@ static void opRealise(Strings opFlags, Strings opArgs) bool ignoreUnknown = false; for (auto & i : opFlags) - if (i == "--dry-run") dryRun = true; - else if (i == "--repair") buildMode = bmRepair; - else if (i == "--check") buildMode = bmCheck; - else if (i == "--ignore-unknown") ignoreUnknown = true; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--dry-run") + dryRun = true; + else if (i == "--repair") + buildMode = bmRepair; + else if (i == "--check") + buildMode = bmCheck; + else if (i == "--ignore-unknown") + ignoreUnknown = true; + else + throw UsageError("unknown flag '%1%'", i); std::vector paths; for (auto & i : opArgs) @@ -152,7 +157,8 @@ static void opRealise(Strings opFlags, Strings opArgs) if (ignoreUnknown) { std::vector paths2; for (auto & i : paths) - if (!missing.unknown.count(i.path)) paths2.push_back(i); + if (!missing.unknown.count(i.path)) + paths2.push_back(i); paths = std::move(paths2); missing.unknown = StorePathSet(); } @@ -160,7 +166,8 @@ static void opRealise(Strings opFlags, Strings opArgs) if (settings.printMissing) printMissing(ref(store), missing); - if (dryRun) return; + if (dryRun) + return; /* Build all paths at the same time to exploit parallelism. */ store->buildPaths(toDerivedPaths(paths), buildMode); @@ -174,20 +181,18 @@ static void opRealise(Strings opFlags, Strings opArgs) } } - /* Add files to the Nix store and print the resulting paths. */ static void opAdd(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); for (auto & i : opArgs) { auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(i)); - cout << fmt("%s\n", store->printStorePath(store->addToStore( - std::string(baseNameOf(i)), sourcePath))); + cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), sourcePath))); } } - /* Preload the output of a fixed-output derivation into the Nix store. */ static void opAddFixed(Strings opFlags, Strings opArgs) @@ -195,8 +200,10 @@ static void opAddFixed(Strings opFlags, Strings opArgs) ContentAddressMethod method = ContentAddressMethod::Raw::Flat; for (auto & i : opFlags) - if (i == "--recursive") method = ContentAddressMethod::Raw::NixArchive; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--recursive") + method = ContentAddressMethod::Raw::NixArchive; + else + throw UsageError("unknown flag '%1%'", i); if (opArgs.empty()) throw UsageError("first argument must be hash algorithm"); @@ -206,23 +213,21 @@ static void opAddFixed(Strings opFlags, Strings opArgs) for (auto & i : opArgs) { auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(i)); - std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( - baseNameOf(i), - sourcePath, - method, - hashAlgo).path)); + std::cout << fmt( + "%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), sourcePath, method, hashAlgo).path)); } } - /* Hack to support caching in `nix-prefetch-url'. */ static void opPrintFixedPath(Strings opFlags, Strings opArgs) { auto method = FileIngestionMethod::Flat; for (const auto & i : opFlags) - if (i == "--recursive") method = FileIngestionMethod::NixArchive; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--recursive") + method = FileIngestionMethod::NixArchive; + else + throw UsageError("unknown flag '%1%'", i); if (opArgs.size() != 3) throw UsageError("'--print-fixed-path' requires three arguments"); @@ -232,17 +237,21 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs) std::string hash = *i++; std::string name = *i++; - cout << fmt("%s\n", store->printStorePath(store->makeFixedOutputPath(name, FixedOutputInfo { - .method = method, - .hash = Hash::parseAny(hash, hashAlgo), - .references = {}, - }))); + cout << fmt( + "%s\n", + store->printStorePath(store->makeFixedOutputPath( + name, + FixedOutputInfo{ + .method = method, + .hash = Hash::parseAny(hash, hashAlgo), + .references = {}, + }))); } - static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput, bool forceRealise) { - if (forceRealise) realisePath({storePath}); + if (forceRealise) + realisePath({storePath}); if (useOutput && storePath.isDerivation()) { auto drv = store->derivationFromPath(storePath); StorePathSet outputs; @@ -250,20 +259,20 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput, return store->queryDerivationOutputs(storePath); for (auto & i : drv.outputsAndOptPaths(*store)) { if (!i.second.second) - throw UsageError("Cannot use output path of floating content-addressing derivation until we know what it is (e.g. by building it)"); + throw UsageError( + "Cannot use output path of floating content-addressing derivation until we know what it is (e.g. by building it)"); outputs.insert(*i.second.second); } return outputs; - } - else return {storePath}; + } else + return {storePath}; } - /* Some code to print a tree representation of a derivation dependency graph. Topological sorting is used to keep the tree relatively flat. */ -static void printTree(const StorePath & path, - const std::string & firstPad, const std::string & tailPad, StorePathSet & done) +static void +printTree(const StorePath & path, const std::string & firstPad, const std::string & tailPad, StorePathSet & done) { if (!done.insert(path).second) { cout << fmt("%s%s [...]\n", firstPad, store->printStorePath(path)); @@ -281,23 +290,33 @@ static void printTree(const StorePath & path, auto sorted = store->topoSortPaths(info->references); reverse(sorted.begin(), sorted.end()); - for (const auto &[n, i] : enumerate(sorted)) { + for (const auto & [n, i] : enumerate(sorted)) { bool last = n + 1 == sorted.size(); - printTree(i, - tailPad + (last ? treeLast : treeConn), - tailPad + (last ? treeNull : treeLine), - done); + printTree(i, tailPad + (last ? treeLast : treeConn), tailPad + (last ? treeNull : treeLine), done); } } - /* Perform various sorts of queries. */ static void opQuery(Strings opFlags, Strings opArgs) { - enum QueryType - { qOutputs, qRequisites, qReferences, qReferrers - , qReferrersClosure, qDeriver, qValidDerivers, qBinding, qHash, qSize - , qTree, qGraph, qGraphML, qResolve, qRoots }; + enum QueryType { + qOutputs, + qRequisites, + qReferences, + qReferrers, + qReferrersClosure, + qDeriver, + qValidDerivers, + qBinding, + qHash, + qSize, + qTree, + qGraph, + qGraphML, + qResolve, + qRoots + }; + std::optional query; bool useOutput = false; bool includeOutputs = false; @@ -306,187 +325,203 @@ static void opQuery(Strings opFlags, Strings opArgs) for (auto & i : opFlags) { std::optional prev = query; - if (i == "--outputs") query = qOutputs; - else if (i == "--requisites" || i == "-R") query = qRequisites; - else if (i == "--references") query = qReferences; - else if (i == "--referrers" || i == "--referers") query = qReferrers; - else if (i == "--referrers-closure" || i == "--referers-closure") query = qReferrersClosure; - else if (i == "--deriver" || i == "-d") query = qDeriver; - else if (i == "--valid-derivers") query = qValidDerivers; + if (i == "--outputs") + query = qOutputs; + else if (i == "--requisites" || i == "-R") + query = qRequisites; + else if (i == "--references") + query = qReferences; + else if (i == "--referrers" || i == "--referers") + query = qReferrers; + else if (i == "--referrers-closure" || i == "--referers-closure") + query = qReferrersClosure; + else if (i == "--deriver" || i == "-d") + query = qDeriver; + else if (i == "--valid-derivers") + query = qValidDerivers; else if (i == "--binding" || i == "-b") { if (opArgs.size() == 0) throw UsageError("expected binding name"); bindingName = opArgs.front(); opArgs.pop_front(); query = qBinding; - } - else if (i == "--hash") query = qHash; - else if (i == "--size") query = qSize; - else if (i == "--tree") query = qTree; - else if (i == "--graph") query = qGraph; - else if (i == "--graphml") query = qGraphML; - else if (i == "--resolve") query = qResolve; - else if (i == "--roots") query = qRoots; - else if (i == "--use-output" || i == "-u") useOutput = true; - else if (i == "--force-realise" || i == "--force-realize" || i == "-f") forceRealise = true; - else if (i == "--include-outputs") includeOutputs = true; - else throw UsageError("unknown flag '%1%'", i); + } else if (i == "--hash") + query = qHash; + else if (i == "--size") + query = qSize; + else if (i == "--tree") + query = qTree; + else if (i == "--graph") + query = qGraph; + else if (i == "--graphml") + query = qGraphML; + else if (i == "--resolve") + query = qResolve; + else if (i == "--roots") + query = qRoots; + else if (i == "--use-output" || i == "-u") + useOutput = true; + else if (i == "--force-realise" || i == "--force-realize" || i == "-f") + forceRealise = true; + else if (i == "--include-outputs") + includeOutputs = true; + else + throw UsageError("unknown flag '%1%'", i); if (prev && prev != query) throw UsageError("query type '%1%' conflicts with earlier flag", i); } - if (!query) query = qOutputs; + if (!query) + query = qOutputs; RunPager pager; switch (*query) { - case qOutputs: { - for (auto & i : opArgs) { - auto outputs = maybeUseOutputs(store->followLinksToStorePath(i), true, forceRealise); - for (auto & outputPath : outputs) - cout << fmt("%1%\n", store->printStorePath(outputPath)); + case qOutputs: { + for (auto & i : opArgs) { + auto outputs = maybeUseOutputs(store->followLinksToStorePath(i), true, forceRealise); + for (auto & outputPath : outputs) + cout << fmt("%1%\n", store->printStorePath(outputPath)); + } + break; + } + + case qRequisites: + case qReferences: + case qReferrers: + case qReferrersClosure: { + StorePathSet paths; + for (auto & i : opArgs) { + auto ps = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); + for (auto & j : ps) { + if (query == qRequisites) + store->computeFSClosure(j, paths, false, includeOutputs); + else if (query == qReferences) { + for (auto & p : store->queryPathInfo(j)->references) + paths.insert(p); + } else if (query == qReferrers) { + StorePathSet tmp; + store->queryReferrers(j, tmp); + for (auto & i : tmp) + paths.insert(i); + } else if (query == qReferrersClosure) + store->computeFSClosure(j, paths, true); } - break; } + auto sorted = store->topoSortPaths(paths); + for (StorePaths::reverse_iterator i = sorted.rbegin(); i != sorted.rend(); ++i) + cout << fmt("%s\n", store->printStorePath(*i)); + break; + } - case qRequisites: - case qReferences: - case qReferrers: - case qReferrersClosure: { - StorePathSet paths; - for (auto & i : opArgs) { - auto ps = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); - for (auto & j : ps) { - if (query == qRequisites) store->computeFSClosure(j, paths, false, includeOutputs); - else if (query == qReferences) { - for (auto & p : store->queryPathInfo(j)->references) - paths.insert(p); - } - else if (query == qReferrers) { - StorePathSet tmp; - store->queryReferrers(j, tmp); - for (auto & i : tmp) - paths.insert(i); - } - else if (query == qReferrersClosure) store->computeFSClosure(j, paths, true); - } + case qDeriver: + for (auto & i : opArgs) { + auto info = store->queryPathInfo(store->followLinksToStorePath(i)); + cout << fmt("%s\n", info->deriver ? store->printStorePath(*info->deriver) : "unknown-deriver"); + } + break; + + case qValidDerivers: { + StorePathSet result; + for (auto & i : opArgs) { + auto derivers = store->queryValidDerivers(store->followLinksToStorePath(i)); + for (const auto & i : derivers) { + result.insert(i); } - auto sorted = store->topoSortPaths(paths); - for (StorePaths::reverse_iterator i = sorted.rbegin(); - i != sorted.rend(); ++i) - cout << fmt("%s\n", store->printStorePath(*i)); - break; } + auto sorted = store->topoSortPaths(result); + for (StorePaths::reverse_iterator i = sorted.rbegin(); i != sorted.rend(); ++i) + cout << fmt("%s\n", store->printStorePath(*i)); + break; + } - case qDeriver: - for (auto & i : opArgs) { - auto info = store->queryPathInfo(store->followLinksToStorePath(i)); - cout << fmt("%s\n", info->deriver ? store->printStorePath(*info->deriver) : "unknown-deriver"); + case qBinding: + for (auto & i : opArgs) { + auto path = useDeriver(store->followLinksToStorePath(i)); + Derivation drv = store->derivationFromPath(path); + StringPairs::iterator j = drv.env.find(bindingName); + if (j == drv.env.end()) + throw Error( + "derivation '%s' has no environment binding named '%s'", store->printStorePath(path), bindingName); + cout << fmt("%s\n", j->second); + } + break; + + case qHash: + case qSize: + for (auto & i : opArgs) { + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { + auto info = store->queryPathInfo(j); + if (query == qHash) { + assert(info->narHash.algo == HashAlgorithm::SHA256); + cout << fmt("%s\n", info->narHash.to_string(HashFormat::Nix32, true)); + } else if (query == qSize) + cout << fmt("%d\n", info->narSize); } - break; - - case qValidDerivers: { - StorePathSet result; - for (auto & i : opArgs) { - auto derivers = store->queryValidDerivers(store->followLinksToStorePath(i)); - for (const auto &i: derivers) { - result.insert(i); - } - } - auto sorted = store->topoSortPaths(result); - for (StorePaths::reverse_iterator i = sorted.rbegin(); - i != sorted.rend(); ++i) - cout << fmt("%s\n", store->printStorePath(*i)); - break; } + break; - case qBinding: - for (auto & i : opArgs) { - auto path = useDeriver(store->followLinksToStorePath(i)); - Derivation drv = store->derivationFromPath(path); - StringPairs::iterator j = drv.env.find(bindingName); - if (j == drv.env.end()) - throw Error("derivation '%s' has no environment binding named '%s'", - store->printStorePath(path), bindingName); - cout << fmt("%s\n", j->second); - } - break; + case qTree: { + StorePathSet done; + for (auto & i : opArgs) + printTree(store->followLinksToStorePath(i), "", "", done); + break; + } - case qHash: - case qSize: - for (auto & i : opArgs) { - for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { - auto info = store->queryPathInfo(j); - if (query == qHash) { - assert(info->narHash.algo == HashAlgorithm::SHA256); - cout << fmt("%s\n", info->narHash.to_string(HashFormat::Nix32, true)); - } else if (query == qSize) - cout << fmt("%d\n", info->narSize); - } - } - break; + case qGraph: { + StorePathSet roots; + for (auto & i : opArgs) + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + roots.insert(j); + printDotGraph(ref(store), std::move(roots)); + break; + } - case qTree: { - StorePathSet done; - for (auto & i : opArgs) - printTree(store->followLinksToStorePath(i), "", "", done); - break; - } + case qGraphML: { + StorePathSet roots; + for (auto & i : opArgs) + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + roots.insert(j); + printGraphML(ref(store), std::move(roots)); + break; + } - case qGraph: { - StorePathSet roots; - for (auto & i : opArgs) - for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) - roots.insert(j); - printDotGraph(ref(store), std::move(roots)); - break; - } + case qResolve: { + for (auto & i : opArgs) + cout << fmt("%s\n", store->printStorePath(store->followLinksToStorePath(i))); + break; + } - case qGraphML: { - StorePathSet roots; - for (auto & i : opArgs) - for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) - roots.insert(j); - printGraphML(ref(store), std::move(roots)); - break; - } + case qRoots: { + StorePathSet args; + for (auto & i : opArgs) + for (auto & p : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + args.insert(p); - case qResolve: { - for (auto & i : opArgs) - cout << fmt("%s\n", store->printStorePath(store->followLinksToStorePath(i))); - break; - } + StorePathSet referrers; + store->computeFSClosure(args, referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations); - case qRoots: { - StorePathSet args; - for (auto & i : opArgs) - for (auto & p : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) - args.insert(p); + auto & gcStore = require(*store); + Roots roots = gcStore.findRoots(false); + for (auto & [target, links] : roots) + if (referrers.find(target) != referrers.end()) + for (auto & link : links) + cout << fmt("%1% -> %2%\n", link, gcStore.printStorePath(target)); + break; + } - StorePathSet referrers; - store->computeFSClosure( - args, referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations); - - auto & gcStore = require(*store); - Roots roots = gcStore.findRoots(false); - for (auto & [target, links] : roots) - if (referrers.find(target) != referrers.end()) - for (auto & link : links) - cout << fmt("%1% -> %2%\n", link, gcStore.printStorePath(target)); - break; - } - - default: - unreachable(); + default: + unreachable(); } } - static void opPrintEnv(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); - if (opArgs.size() != 1) throw UsageError("'--print-env' requires one derivation store path"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); + if (opArgs.size() != 1) + throw UsageError("'--print-env' requires one derivation store path"); Path drvPath = opArgs.front(); Derivation drv = store->derivationFromPath(store->parseStorePath(drvPath)); @@ -501,17 +536,18 @@ static void opPrintEnv(Strings opFlags, Strings opArgs) cout << "export _args; _args='"; bool first = true; for (auto & i : drv.args) { - if (!first) cout << ' '; + if (!first) + cout << ' '; first = false; cout << escapeShellArgAlways(i); } cout << "'\n"; } - static void opReadLog(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); auto & logStore = require(*store); @@ -526,10 +562,10 @@ static void opReadLog(Strings opFlags, Strings opArgs) } } - static void opDumpDB(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); if (!opArgs.empty()) { for (auto & i : opArgs) cout << store->makeValidityRegistration({store->followLinksToStorePath(i)}, true, true); @@ -539,7 +575,6 @@ static void opDumpDB(Strings opFlags, Strings opArgs) } } - static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) { ValidPathInfos infos; @@ -547,9 +582,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) while (1) { // We use a dummy value because we'll set it below. FIXME be correct by // construction and avoid dummy value. - auto hashResultOpt = !hashGiven ? std::optional { {Hash::dummy, -1} } : std::nullopt; + auto hashResultOpt = !hashGiven ? std::optional{{Hash::dummy, -1}} : std::nullopt; auto info = decodeValidPathInfo(*store, cin, hashResultOpt); - if (!info) break; + if (!info) + break; if (!store->isValidPath(info->path) || reregister) { /* !!! races */ if (canonicalise) @@ -560,8 +596,9 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) #endif if (!hashGiven) { HashResult hash = hashPath( - {store->getFSAccessor(false), CanonPath { info->path.to_string() }}, - FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); + {store->getFSAccessor(false), CanonPath{info->path.to_string()}}, + FileSerialisationMethod::NixArchive, + HashAlgorithm::SHA256); info->narHash = hash.first; info->narSize = hash.second; } @@ -574,39 +611,43 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) #endif } - static void opLoadDB(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); if (!opArgs.empty()) throw UsageError("no arguments expected"); registerValidity(true, true, false); } - static void opRegisterValidity(Strings opFlags, Strings opArgs) { bool reregister = false; // !!! maybe this should be the default bool hashGiven = false; for (auto & i : opFlags) - if (i == "--reregister") reregister = true; - else if (i == "--hash-given") hashGiven = true; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--reregister") + reregister = true; + else if (i == "--hash-given") + hashGiven = true; + else + throw UsageError("unknown flag '%1%'", i); - if (!opArgs.empty()) throw UsageError("no arguments expected"); + if (!opArgs.empty()) + throw UsageError("no arguments expected"); registerValidity(reregister, hashGiven, true); } - static void opCheckValidity(Strings opFlags, Strings opArgs) { bool printInvalid = false; for (auto & i : opFlags) - if (i == "--print-invalid") printInvalid = true; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--print-invalid") + printInvalid = true; + else + throw UsageError("unknown flag '%1%'", i); for (auto & i : opArgs) { auto path = store->followLinksToStorePath(i); @@ -619,7 +660,6 @@ static void opCheckValidity(Strings opFlags, Strings opArgs) } } - static void opGC(Strings opFlags, Strings opArgs) { bool printRoots = false; @@ -630,14 +670,19 @@ static void opGC(Strings opFlags, Strings opArgs) /* Do what? */ for (auto i = opFlags.begin(); i != opFlags.end(); ++i) - if (*i == "--print-roots") printRoots = true; - else if (*i == "--print-live") options.action = GCOptions::gcReturnLive; - else if (*i == "--print-dead") options.action = GCOptions::gcReturnDead; + if (*i == "--print-roots") + printRoots = true; + else if (*i == "--print-live") + options.action = GCOptions::gcReturnLive; + else if (*i == "--print-dead") + options.action = GCOptions::gcReturnDead; else if (*i == "--max-freed") options.maxFreed = std::max(getIntArg(*i, i, opFlags.end(), true), (int64_t) 0); - else throw UsageError("bad sub-operation '%1%' in GC", *i); + else + throw UsageError("bad sub-operation '%1%' in GC", *i); - if (!opArgs.empty()) throw UsageError("no arguments expected"); + if (!opArgs.empty()) + throw UsageError("no arguments expected"); auto & gcStore = require(*store); @@ -662,7 +707,6 @@ static void opGC(Strings opFlags, Strings opArgs) } } - /* Remove paths from the Nix store if possible (i.e., if they do not have any remaining referrers and are not reachable from any GC roots). */ @@ -672,8 +716,10 @@ static void opDelete(Strings opFlags, Strings opArgs) options.action = GCOptions::gcDeleteSpecific; for (auto & i : opFlags) - if (i == "--ignore-liveness") options.ignoreLiveness = true; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--ignore-liveness") + options.ignoreLiveness = true; + else + throw UsageError("unknown flag '%1%'", i); for (auto & i : opArgs) options.pathsToDelete.insert(store->followLinksToStorePath(i)); @@ -685,12 +731,13 @@ static void opDelete(Strings opFlags, Strings opArgs) gcStore.collectGarbage(options, results); } - /* Dump a path as a Nix archive. The archive is written to stdout */ static void opDump(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); - if (opArgs.size() != 1) throw UsageError("only one argument allowed"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); + if (opArgs.size() != 1) + throw UsageError("only one argument allowed"); FdSink sink(getStandardOutput()); std::string path = *opArgs.begin(); @@ -698,18 +745,18 @@ static void opDump(Strings opFlags, Strings opArgs) sink.flush(); } - /* Restore a value from a Nix archive. The archive is read from stdin. */ static void opRestore(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); - if (opArgs.size() != 1) throw UsageError("only one argument allowed"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); + if (opArgs.size() != 1) + throw UsageError("only one argument allowed"); FdSource source(STDIN_FILENO); restorePath(*opArgs.begin(), source); } - static void opExport(Strings opFlags, Strings opArgs) { for (auto & i : opFlags) @@ -725,13 +772,13 @@ static void opExport(Strings opFlags, Strings opArgs) sink.flush(); } - static void opImport(Strings opFlags, Strings opArgs) { for (auto & i : opFlags) throw UsageError("unknown flag '%1%'", i); - if (!opArgs.empty()) throw UsageError("no arguments expected"); + if (!opArgs.empty()) + throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); auto paths = store->importPaths(source, NoCheckSigs); @@ -740,18 +787,17 @@ static void opImport(Strings opFlags, Strings opArgs) cout << fmt("%s\n", store->printStorePath(i)) << std::flush; } - /* Initialise the Nix databases. */ static void opInit(Strings opFlags, Strings opArgs) { - if (!opFlags.empty()) throw UsageError("unknown flag"); + if (!opFlags.empty()) + throw UsageError("unknown flag"); if (!opArgs.empty()) throw UsageError("no arguments expected"); /* Doesn't do anything right now; database tables are initialised automatically. */ } - /* Verify the consistency of the Nix environment. */ static void opVerify(Strings opFlags, Strings opArgs) { @@ -762,9 +808,12 @@ static void opVerify(Strings opFlags, Strings opArgs) RepairFlag repair = NoRepair; for (auto & i : opFlags) - if (i == "--check-contents") checkContents = true; - else if (i == "--repair") repair = Repair; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--check-contents") + checkContents = true; + else if (i == "--repair") + repair = Repair; + else + throw UsageError("unknown flag '%1%'", i); if (store->verifyStore(checkContents, repair)) { warn("not all store errors were fixed"); @@ -772,7 +821,6 @@ static void opVerify(Strings opFlags, Strings opArgs) } } - /* Verify whether the contents of the given store path have not changed. */ static void opVerifyPath(Strings opFlags, Strings opArgs) { @@ -789,7 +837,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) store->narFromPath(path, sink); auto current = sink.finish(); if (current.first != info->narHash) { - printError("path '%s' was modified! expected hash '%s', got '%s'", + printError( + "path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true)); @@ -800,7 +849,6 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) throw Exit(status); } - /* Repair the contents of the given path by redownloading it using a substituter (if available). */ static void opRepairPath(Strings opFlags, Strings opArgs) @@ -827,24 +875,25 @@ static void opServe(Strings opFlags, Strings opArgs) { bool writeAllowed = false; for (auto & i : opFlags) - if (i == "--write") writeAllowed = true; - else throw UsageError("unknown flag '%1%'", i); + if (i == "--write") + writeAllowed = true; + else + throw UsageError("unknown flag '%1%'", i); - if (!opArgs.empty()) throw UsageError("no arguments expected"); + if (!opArgs.empty()) + throw UsageError("no arguments expected"); FdSource in(STDIN_FILENO); FdSink out(getStandardOutput()); /* Exchange the greeting. */ - ServeProto::Version clientVersion = - ServeProto::BasicServerConnection::handshake( - out, in, SERVE_PROTOCOL_VERSION); + ServeProto::Version clientVersion = ServeProto::BasicServerConnection::handshake(out, in, SERVE_PROTOCOL_VERSION); - ServeProto::ReadConn rconn { + ServeProto::ReadConn rconn{ .from = in, .version = clientVersion, }; - ServeProto::WriteConn wconn { + ServeProto::WriteConn wconn{ .to = out, .version = clientVersion, }; @@ -895,151 +944,155 @@ static void opServe(Strings opFlags, Strings opArgs) switch (cmd) { - case ServeProto::Command::QueryValidPaths: { - bool lock = readInt(in); - bool substitute = readInt(in); - auto paths = ServeProto::Serialise::read(*store, rconn); - if (lock && writeAllowed) - for (auto & path : paths) - store->addTempRoot(path); + case ServeProto::Command::QueryValidPaths: { + bool lock = readInt(in); + bool substitute = readInt(in); + auto paths = ServeProto::Serialise::read(*store, rconn); + if (lock && writeAllowed) + for (auto & path : paths) + store->addTempRoot(path); - if (substitute && writeAllowed) { - store->substitutePaths(paths); - } - - ServeProto::write(*store, wconn, store->queryValidPaths(paths)); - break; + if (substitute && writeAllowed) { + store->substitutePaths(paths); } - case ServeProto::Command::QueryPathInfos: { - auto paths = ServeProto::Serialise::read(*store, rconn); - // !!! Maybe we want a queryPathInfos? - for (auto & i : paths) { - try { - auto info = store->queryPathInfo(i); - out << store->printStorePath(info->path); - ServeProto::write(*store, wconn, static_cast(*info)); - } catch (InvalidPath &) { - } - } - out << ""; - break; - } - - case ServeProto::Command::DumpStorePath: - store->narFromPath(store->parseStorePath(readString(in)), out); - break; - - case ServeProto::Command::ImportPaths: { - if (!writeAllowed) throw Error("importing paths is not allowed"); - store->importPaths(in, NoCheckSigs); // FIXME: should we skip sig checking? - out << 1; // indicate success - break; - } - - case ServeProto::Command::ExportPaths: { - readInt(in); // obsolete - store->exportPaths(ServeProto::Serialise::read(*store, rconn), out); - break; - } - - case ServeProto::Command::BuildPaths: { - - if (!writeAllowed) throw Error("building paths is not allowed"); - - std::vector paths; - for (auto & s : readStrings(in)) - paths.push_back(parsePathWithOutputs(*store, s)); - - getBuildSettings(); + ServeProto::write(*store, wconn, store->queryValidPaths(paths)); + break; + } + case ServeProto::Command::QueryPathInfos: { + auto paths = ServeProto::Serialise::read(*store, rconn); + // !!! Maybe we want a queryPathInfos? + for (auto & i : paths) { try { -#ifndef _WIN32 // TODO figure out if Windows needs something similar - MonitorFdHup monitor(in.fd); -#endif - store->buildPaths(toDerivedPaths(paths)); - out << 0; - } catch (Error & e) { - assert(e.info().status); - out << e.info().status << e.msg(); + auto info = store->queryPathInfo(i); + out << store->printStorePath(info->path); + ServeProto::write(*store, wconn, static_cast(*info)); + } catch (InvalidPath &) { } - break; } + out << ""; + break; + } - case ServeProto::Command::BuildDerivation: { /* Used by hydra-queue-runner. */ + case ServeProto::Command::DumpStorePath: + store->narFromPath(store->parseStorePath(readString(in)), out); + break; - if (!writeAllowed) throw Error("building paths is not allowed"); + case ServeProto::Command::ImportPaths: { + if (!writeAllowed) + throw Error("importing paths is not allowed"); + store->importPaths(in, NoCheckSigs); // FIXME: should we skip sig checking? + out << 1; // indicate success + break; + } - auto drvPath = store->parseStorePath(readString(in)); - BasicDerivation drv; - readDerivation(in, *store, drv, Derivation::nameFromPath(drvPath)); + case ServeProto::Command::ExportPaths: { + readInt(in); // obsolete + store->exportPaths(ServeProto::Serialise::read(*store, rconn), out); + break; + } - getBuildSettings(); + case ServeProto::Command::BuildPaths: { + if (!writeAllowed) + throw Error("building paths is not allowed"); + + std::vector paths; + for (auto & s : readStrings(in)) + paths.push_back(parsePathWithOutputs(*store, s)); + + getBuildSettings(); + + try { #ifndef _WIN32 // TODO figure out if Windows needs something similar MonitorFdHup monitor(in.fd); #endif - auto status = store->buildDerivation(drvPath, drv); - - ServeProto::write(*store, wconn, status); - break; + store->buildPaths(toDerivedPaths(paths)); + out << 0; + } catch (Error & e) { + assert(e.info().status); + out << e.info().status << e.msg(); } + break; + } - case ServeProto::Command::QueryClosure: { - bool includeOutputs = readInt(in); - StorePathSet closure; - store->computeFSClosure(ServeProto::Serialise::read(*store, rconn), - closure, false, includeOutputs); - ServeProto::write(*store, wconn, closure); - break; - } + case ServeProto::Command::BuildDerivation: { /* Used by hydra-queue-runner. */ - case ServeProto::Command::AddToStoreNar: { - if (!writeAllowed) throw Error("importing paths is not allowed"); + if (!writeAllowed) + throw Error("building paths is not allowed"); - auto path = readString(in); - auto deriver = readString(in); - ValidPathInfo info { - store->parseStorePath(path), - Hash::parseAny(readString(in), HashAlgorithm::SHA256), - }; - if (deriver != "") - info.deriver = store->parseStorePath(deriver); - info.references = ServeProto::Serialise::read(*store, rconn); - in >> info.registrationTime >> info.narSize >> info.ultimate; - info.sigs = readStrings(in); - info.ca = ContentAddress::parseOpt(readString(in)); + auto drvPath = store->parseStorePath(readString(in)); + BasicDerivation drv; + readDerivation(in, *store, drv, Derivation::nameFromPath(drvPath)); - if (info.narSize == 0) - throw Error("narInfo is too old and missing the narSize field"); + getBuildSettings(); - SizedSource sizedSource(in, info.narSize); +#ifndef _WIN32 // TODO figure out if Windows needs something similar + MonitorFdHup monitor(in.fd); +#endif + auto status = store->buildDerivation(drvPath, drv); - store->addToStore(info, sizedSource, NoRepair, NoCheckSigs); + ServeProto::write(*store, wconn, status); + break; + } - // consume all the data that has been sent before continuing. - sizedSource.drainAll(); + case ServeProto::Command::QueryClosure: { + bool includeOutputs = readInt(in); + StorePathSet closure; + store->computeFSClosure( + ServeProto::Serialise::read(*store, rconn), closure, false, includeOutputs); + ServeProto::write(*store, wconn, closure); + break; + } - out << 1; // indicate success + case ServeProto::Command::AddToStoreNar: { + if (!writeAllowed) + throw Error("importing paths is not allowed"); - break; - } + auto path = readString(in); + auto deriver = readString(in); + ValidPathInfo info{ + store->parseStorePath(path), + Hash::parseAny(readString(in), HashAlgorithm::SHA256), + }; + if (deriver != "") + info.deriver = store->parseStorePath(deriver); + info.references = ServeProto::Serialise::read(*store, rconn); + in >> info.registrationTime >> info.narSize >> info.ultimate; + info.sigs = readStrings(in); + info.ca = ContentAddress::parseOpt(readString(in)); - default: - throw Error("unknown serve command %1%", cmd); + if (info.narSize == 0) + throw Error("narInfo is too old and missing the narSize field"); + + SizedSource sizedSource(in, info.narSize); + + store->addToStore(info, sizedSource, NoRepair, NoCheckSigs); + + // consume all the data that has been sent before continuing. + sizedSource.drainAll(); + + out << 1; // indicate success + + break; + } + + default: + throw Error("unknown serve command %1%", cmd); } out.flush(); } } - static void opGenerateBinaryCacheKey(Strings opFlags, Strings opArgs) { for (auto & i : opFlags) throw UsageError("unknown flag '%1%'", i); - if (opArgs.size() != 3) throw UsageError("three arguments expected"); + if (opArgs.size() != 3) + throw UsageError("three arguments expected"); auto i = opArgs.begin(); std::string keyName = *i++; std::string secretKeyFile = *i++; @@ -1052,17 +1105,15 @@ static void opGenerateBinaryCacheKey(Strings opFlags, Strings opArgs) writeFile(secretKeyFile, secretKey.to_string()); } - static void opVersion(Strings opFlags, Strings opArgs) { printVersion("nix-store"); } - /* Scan the arguments; find the operation, set global flags, put all other flags in a list, and put all other arguments in another list. */ -static int main_nix_store(int argc, char * * argv) +static int main_nix_store(int argc, char ** argv) { { Strings opFlags, opArgs; @@ -1081,92 +1132,72 @@ static int main_nix_store(int argc, char * * argv) else if (*arg == "--realise" || *arg == "--realize" || *arg == "-r") { op = opRealise; opName = "-realise"; - } - else if (*arg == "--add" || *arg == "-A"){ + } else if (*arg == "--add" || *arg == "-A") { op = opAdd; opName = "-add"; - } - else if (*arg == "--add-fixed") { + } else if (*arg == "--add-fixed") { op = opAddFixed; opName = arg->substr(1); - } - else if (*arg == "--print-fixed-path") + } else if (*arg == "--print-fixed-path") op = opPrintFixedPath; else if (*arg == "--delete") { op = opDelete; opName = arg->substr(1); - } - else if (*arg == "--query" || *arg == "-q") { + } else if (*arg == "--query" || *arg == "-q") { op = opQuery; opName = "-query"; - } - else if (*arg == "--print-env") { + } else if (*arg == "--print-env") { op = opPrintEnv; opName = arg->substr(1); - } - else if (*arg == "--read-log" || *arg == "-l") { + } else if (*arg == "--read-log" || *arg == "-l") { op = opReadLog; opName = "-read-log"; - } - else if (*arg == "--dump-db") { + } else if (*arg == "--dump-db") { op = opDumpDB; opName = arg->substr(1); - } - else if (*arg == "--load-db") { + } else if (*arg == "--load-db") { op = opLoadDB; opName = arg->substr(1); - } - else if (*arg == "--register-validity") + } else if (*arg == "--register-validity") op = opRegisterValidity; else if (*arg == "--check-validity") op = opCheckValidity; else if (*arg == "--gc") { op = opGC; opName = arg->substr(1); - } - else if (*arg == "--dump") { + } else if (*arg == "--dump") { op = opDump; opName = arg->substr(1); - } - else if (*arg == "--restore") { + } else if (*arg == "--restore") { op = opRestore; opName = arg->substr(1); - } - else if (*arg == "--export") { + } else if (*arg == "--export") { op = opExport; opName = arg->substr(1); - } - else if (*arg == "--import") { + } else if (*arg == "--import") { op = opImport; opName = arg->substr(1); - } - else if (*arg == "--init") + } else if (*arg == "--init") op = opInit; else if (*arg == "--verify") { op = opVerify; opName = arg->substr(1); - } - else if (*arg == "--verify-path") { + } else if (*arg == "--verify-path") { op = opVerifyPath; opName = arg->substr(1); - } - else if (*arg == "--repair-path") { + } else if (*arg == "--repair-path") { op = opRepairPath; opName = arg->substr(1); - } - else if (*arg == "--optimise" || *arg == "--optimize") { + } else if (*arg == "--optimise" || *arg == "--optimize") { op = opOptimise; opName = "-optimise"; - } - else if (*arg == "--serve") { + } else if (*arg == "--serve") { op = opServe; opName = arg->substr(1); - } - else if (*arg == "--generate-binary-cache-key") { + } else if (*arg == "--generate-binary-cache-key") { op = opGenerateBinaryCacheKey; opName = arg->substr(1); - } - else if (*arg == "--add-root") + } else if (*arg == "--add-root") gcRoot = absPath(getArg(*arg, arg, end)); else if (*arg == "--stdin" && !isatty(STDIN_FILENO)) readFromStdIn = true; @@ -1178,15 +1209,14 @@ static int main_nix_store(int argc, char * * argv) opFlags.push_back(*arg); if (*arg == "--max-freed" || *arg == "--max-links" || *arg == "--max-atime") /* !!! hack */ opFlags.push_back(getArg(*arg, arg, end)); - } - else + } else opArgs.push_back(*arg); if (readFromStdIn && op != opImport && op != opRestore && op != opServe) { - std::string word; - while (std::cin >> word) { - opArgs.emplace_back(std::move(word)); - }; + std::string word; + while (std::cin >> word) { + opArgs.emplace_back(std::move(word)); + }; } if (oldOp && oldOp != op) @@ -1195,8 +1225,10 @@ static int main_nix_store(int argc, char * * argv) return true; }); - if (showHelp) showManPage("nix-store" + opName); - if (!op) throw UsageError("no operation specified"); + if (showHelp) + showManPage("nix-store" + opName); + if (!op) + throw UsageError("no operation specified"); if (op != opDump && op != opRestore) /* !!! hack */ store = openStore(); @@ -1209,4 +1241,4 @@ static int main_nix_store(int argc, char * * argv) static RegisterLegacyCommand r_nix_store("nix-store", main_nix_store); -} +} // namespace nix_store diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 9b7306fdd..e87f49546 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -35,15 +35,13 @@ struct CmdAddToStore : MixDryRun, StoreCommand void run(ref store) override { - if (!namePart) namePart = baseNameOf(path); + if (!namePart) + namePart = baseNameOf(path); auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(path)); - auto storePath = dryRun - ? store->computeStorePath( - *namePart, sourcePath, caMethod, hashAlgo, {}).first - : store->addToStoreSlow( - *namePart, sourcePath, caMethod, hashAlgo, {}).path; + auto storePath = dryRun ? store->computeStorePath(*namePart, sourcePath, caMethod, hashAlgo, {}).first + : store->addToStoreSlow(*namePart, sourcePath, caMethod, hashAlgo, {}).path; logger->cout("%s", store->printStorePath(storePath)); } @@ -59,8 +57,8 @@ struct CmdAdd : CmdAddToStore std::string doc() override { return - #include "add.md" - ; +#include "add.md" + ; } }; diff --git a/src/nix/app.cc b/src/nix/app.cc index c9a9f9caf..412b53817 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -15,29 +15,27 @@ namespace nix { * Return the rewrites that are needed to resolve a string whose context is * included in `dependencies`. */ -StringPairs resolveRewrites( - Store & store, - const std::vector & dependencies) +StringPairs resolveRewrites(Store & store, const std::vector & dependencies) { StringPairs res; if (!experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { return res; } - for (auto &dep: dependencies) { + for (auto & dep : dependencies) { auto drvDep = std::get_if(&dep.path); if (!drvDep) { continue; } - for (const auto & [ outputName, outputPath ] : drvDep->outputs) { + for (const auto & [outputName, outputPath] : drvDep->outputs) { res.emplace( DownstreamPlaceholder::fromSingleDerivedPathBuilt( - SingleDerivedPath::Built { + SingleDerivedPath::Built{ .drvPath = make_ref(drvDep->drvPath->discardOutputPath()), .output = outputName, - }).render(), - store.printStorePath(outputPath) - ); + }) + .render(), + store.printStorePath(outputPath)); } } return res; @@ -46,10 +44,8 @@ StringPairs resolveRewrites( /** * Resolve the given string assuming the given context. */ -std::string resolveString( - Store & store, - const std::string & toResolve, - const std::vector & dependencies) +std::string +resolveString(Store & store, const std::string & toResolve, const std::vector & dependencies) { auto rewrites = resolveRewrites(store, dependencies); return rewriteStrings(toResolve, rewrites); @@ -62,9 +58,10 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) auto type = cursor->getAttr("type")->getString(); - std::string expectedType = !attrPath.empty() && - (state.symbols[attrPath[0]] == "apps" || state.symbols[attrPath[0]] == "defaultApp") - ? "app" : "derivation"; + std::string expectedType = + !attrPath.empty() && (state.symbols[attrPath[0]] == "apps" || state.symbols[attrPath[0]] == "defaultApp") + ? "app" + : "derivation"; if (type != expectedType) throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expectedType); @@ -73,29 +70,32 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) std::vector context2; for (auto & c : context) { - context2.emplace_back(std::visit(overloaded { - [&](const NixStringContextElem::DrvDeep & d) -> DerivedPath { - /* We want all outputs of the drv */ - return DerivedPath::Built { - .drvPath = makeConstantStorePathRef(d.drvPath), - .outputs = OutputsSpec::All {}, - }; - }, - [&](const NixStringContextElem::Built & b) -> DerivedPath { - return DerivedPath::Built { - .drvPath = b.drvPath, - .outputs = OutputsSpec::Names { b.output }, - }; - }, - [&](const NixStringContextElem::Opaque & o) -> DerivedPath { - return DerivedPath::Opaque { - .path = o.path, - }; - }, - }, c.raw)); + context2.emplace_back( + std::visit( + overloaded{ + [&](const NixStringContextElem::DrvDeep & d) -> DerivedPath { + /* We want all outputs of the drv */ + return DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(d.drvPath), + .outputs = OutputsSpec::All{}, + }; + }, + [&](const NixStringContextElem::Built & b) -> DerivedPath { + return DerivedPath::Built{ + .drvPath = b.drvPath, + .outputs = OutputsSpec::Names{b.output}, + }; + }, + [&](const NixStringContextElem::Opaque & o) -> DerivedPath { + return DerivedPath::Opaque{ + .path = o.path, + }; + }, + }, + c.raw)); } - return UnresolvedApp { App { + return UnresolvedApp{App{ .context = std::move(context2), .program = program, }}; @@ -109,18 +109,13 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) auto aPname = cursor->maybeGetAttr("pname"); auto aMeta = cursor->maybeGetAttr(state.sMeta); auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr; - auto mainProgram = - aMainProgram - ? aMainProgram->getString() - : aPname - ? aPname->getString() - : DrvName(name).name; + auto mainProgram = aMainProgram ? aMainProgram->getString() : aPname ? aPname->getString() : DrvName(name).name; auto program = outPath + "/bin/" + mainProgram; - return UnresolvedApp { App { - .context = { DerivedPath::Built { + return UnresolvedApp{App{ + .context = {DerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath), - .outputs = OutputsSpec::Names { outputName }, - } }, + .outputs = OutputsSpec::Names{outputName}, + }}, .program = program, }}; } @@ -134,8 +129,7 @@ std::vector UnresolvedApp::build(ref evalStore, ref< Installables installableContext; for (auto & ctxElt : unresolved.context) - installableContext.push_back( - make_ref(store, DerivedPath { ctxElt })); + installableContext.push_back(make_ref(store, DerivedPath{ctxElt})); return Installable::build(evalStore, store, Realise::Outputs, installableContext); } @@ -153,4 +147,4 @@ App UnresolvedApp::resolve(ref evalStore, ref store) return res; } -} +} // namespace nix diff --git a/src/nix/build.cc b/src/nix/build.cc index bd0c8862b..eb47c3133 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -12,31 +12,32 @@ static nlohmann::json derivedPathsToJSON(const DerivedPaths & paths, Store & sto { auto res = nlohmann::json::array(); for (auto & t : paths) { - std::visit([&](const auto & t) { - res.push_back(t.toJSON(store)); - }, t.raw()); + std::visit([&](const auto & t) { res.push_back(t.toJSON(store)); }, t.raw()); } return res; } -static nlohmann::json builtPathsWithResultToJSON(const std::vector & buildables, const Store & store) +static nlohmann::json +builtPathsWithResultToJSON(const std::vector & buildables, const Store & store) { auto res = nlohmann::json::array(); for (auto & b : buildables) { - std::visit([&](const auto & t) { - auto j = t.toJSON(store); - if (b.result) { - if (b.result->startTime) - j["startTime"] = b.result->startTime; - if (b.result->stopTime) - j["stopTime"] = b.result->stopTime; - if (b.result->cpuUser) - j["cpuUser"] = ((double) b.result->cpuUser->count()) / 1000000; - if (b.result->cpuSystem) - j["cpuSystem"] = ((double) b.result->cpuSystem->count()) / 1000000; - } - res.push_back(j); - }, b.path.raw()); + std::visit( + [&](const auto & t) { + auto j = t.toJSON(store); + if (b.result) { + if (b.result->startTime) + j["startTime"] = b.result->startTime; + if (b.result->stopTime) + j["stopTime"] = b.result->stopTime; + if (b.result->cpuUser) + j["cpuUser"] = ((double) b.result->cpuUser->count()) / 1000000; + if (b.result->cpuSystem) + j["cpuSystem"] = ((double) b.result->cpuSystem->count()) / 1000000; + } + res.push_back(j); + }, + b.path.raw()); } return res; } @@ -69,8 +70,8 @@ struct CmdBuild : InstallablesCommand, MixOutLinkByDefault, MixDryRun, MixJSON, std::string doc() override { return - #include "build.md" - ; +#include "build.md" + ; } void run(ref store, Installables && installables) override @@ -90,29 +91,27 @@ struct CmdBuild : InstallablesCommand, MixOutLinkByDefault, MixDryRun, MixJSON, return; } - auto buildables = Installable::build( - getEvalStore(), store, - Realise::Outputs, - installables, - repair ? bmRepair : buildMode); + auto buildables = + Installable::build(getEvalStore(), store, Realise::Outputs, installables, repair ? bmRepair : buildMode); - if (json) logger->cout("%s", builtPathsWithResultToJSON(buildables, *store).dump()); + if (json) + logger->cout("%s", builtPathsWithResultToJSON(buildables, *store).dump()); createOutLinksMaybe(buildables, store); if (printOutputPaths) { logger->stop(); for (auto & buildable : buildables) { - std::visit(overloaded { - [&](const BuiltPath::Opaque & bo) { - logger->cout(store->printStorePath(bo.path)); + std::visit( + overloaded{ + [&](const BuiltPath::Opaque & bo) { logger->cout(store->printStorePath(bo.path)); }, + [&](const BuiltPath::Built & bfd) { + for (auto & output : bfd.outputs) { + logger->cout(store->printStorePath(output.second)); + } + }, }, - [&](const BuiltPath::Built & bfd) { - for (auto & output : bfd.outputs) { - logger->cout(store->printStorePath(output.second)); - } - }, - }, buildable.path.raw()); + buildable.path.raw()); } } diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 50d7bf6a3..ed70ba47e 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -6,7 +6,9 @@ #include "nix/store/local-fs-store.hh" #include "nix/expr/eval-inline.hh" -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; @@ -30,12 +32,12 @@ struct CmdBundle : InstallableValueCommand addFlag({ .longName = "out-link", .shortName = 'o', - .description = "Override the name of the symlink to the build result. It defaults to the base name of the app.", + .description = + "Override the name of the symlink to the build result. It defaults to the base name of the app.", .labels = {"path"}, .handler = {&outLink}, .completer = completePath, }); - } std::string description() override @@ -46,19 +48,19 @@ struct CmdBundle : InstallableValueCommand std::string doc() override { return - #include "bundle.md" - ; +#include "bundle.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } // FIXME: cut&paste from CmdRun. Strings getDefaultFlakeAttrPaths() override { - Strings res{ - "apps." + settings.thisSystem.get() + ".default", - "defaultApp." + settings.thisSystem.get() - }; + Strings res{"apps." + settings.thisSystem.get() + ".default", "defaultApp." + settings.thisSystem.get()}; for (auto & s : SourceExprCommand::getDefaultFlakeAttrPaths()) res.push_back(s); return res; @@ -78,18 +80,18 @@ struct CmdBundle : InstallableValueCommand auto val = installable->toValue(*evalState).first; - auto [bundlerFlakeRef, bundlerName, extendedOutputsSpec] = - parseFlakeRefWithFragmentAndExtendedOutputsSpec( - fetchSettings, bundler, std::filesystem::current_path().string()); - const flake::LockFlags lockFlags{ .writeLockFile = false }; - InstallableFlake bundler{this, - evalState, std::move(bundlerFlakeRef), bundlerName, std::move(extendedOutputsSpec), - {"bundlers." + settings.thisSystem.get() + ".default", - "defaultBundler." + settings.thisSystem.get() - }, + auto [bundlerFlakeRef, bundlerName, extendedOutputsSpec] = parseFlakeRefWithFragmentAndExtendedOutputsSpec( + fetchSettings, bundler, std::filesystem::current_path().string()); + const flake::LockFlags lockFlags{.writeLockFile = false}; + InstallableFlake bundler{ + this, + evalState, + std::move(bundlerFlakeRef), + bundlerName, + std::move(extendedOutputsSpec), + {"bundlers." + settings.thisSystem.get() + ".default", "defaultBundler." + settings.thisSystem.get()}, {"bundlers." + settings.thisSystem.get() + "."}, - lockFlags - }; + lockFlags}; auto vRes = evalState->allocValue(); evalState->callFunction(*bundler.toValue(*evalState).first, *val, *vRes, noPos); @@ -113,9 +115,9 @@ struct CmdBundle : InstallableValueCommand auto outPath = evalState->coerceToStorePath(attr2->pos, *attr2->value, context2, ""); store->buildPaths({ - DerivedPath::Built { + DerivedPath::Built{ .drvPath = makeConstantStorePathRef(drvPath), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }, }); diff --git a/src/nix/cat.cc b/src/nix/cat.cc index aa27446d2..276e01f5d 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -23,11 +23,7 @@ struct CmdCatStore : StoreCommand, MixCat CmdCatStore() { - expectArgs({ - .label = "path", - .handler = {&path}, - .completer = completePath - }); + expectArgs({.label = "path", .handler = {&path}, .completer = completePath}); } std::string description() override @@ -38,8 +34,8 @@ struct CmdCatStore : StoreCommand, MixCat std::string doc() override { return - #include "store-cat.md" - ; +#include "store-cat.md" + ; } void run(ref store) override @@ -57,11 +53,7 @@ struct CmdCatNar : StoreCommand, MixCat CmdCatNar() { - expectArgs({ - .label = "nar", - .handler = {&narPath}, - .completer = completePath - }); + expectArgs({.label = "nar", .handler = {&narPath}, .completer = completePath}); expectArg("path", &path); } @@ -73,8 +65,8 @@ struct CmdCatNar : StoreCommand, MixCat std::string doc() override { return - #include "nar-cat.md" - ; +#include "nar-cat.md" + ; } void run(ref store) override diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 27d053b9f..7fcb7be7e 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -10,7 +10,9 @@ #include "nix/store/worker-protocol.hh" #include "nix/util/executable-path.hh" -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; @@ -26,21 +28,24 @@ std::string formatProtocol(unsigned int proto) return "unknown"; } -bool checkPass(std::string_view msg) { +bool checkPass(std::string_view msg) +{ notice(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg); return true; } -bool checkFail(std::string_view msg) { +bool checkFail(std::string_view msg) +{ notice(ANSI_RED "[FAIL] " ANSI_NORMAL + msg); return false; } -void checkInfo(std::string_view msg) { +void checkInfo(std::string_view msg) +{ notice(ANSI_BLUE "[INFO] " ANSI_NORMAL + msg); } -} +} // namespace struct CmdConfigCheck : StoreCommand { @@ -59,7 +64,10 @@ struct CmdConfigCheck : StoreCommand return "check your system for potential problems and print a PASS or FAIL for each check"; } - Category category() override { return catNixInstallation; } + Category category() override + { + return catNixInstallation; + } void run(ref store) override { @@ -83,7 +91,7 @@ struct CmdConfigCheck : StoreCommand for (auto & dir : ExecutablePath::load().directories) { auto candidate = dir / "nix-env"; if (std::filesystem::exists(candidate)) - dirs.insert(std::filesystem::canonical(candidate).parent_path() ); + dirs.insert(std::filesystem::canonical(candidate).parent_path()); } if (dirs.size() != 1) { @@ -106,9 +114,10 @@ struct CmdConfigCheck : StoreCommand try { auto userEnv = std::filesystem::weakly_canonical(profileDir); - auto noContainsProfiles = [&]{ + auto noContainsProfiles = [&] { for (auto && part : profileDir) - if (part == "profiles") return false; + if (part == "profiles") + return false; return true; }; @@ -121,7 +130,8 @@ struct CmdConfigCheck : StoreCommand dirs.insert(dir); } } catch (SystemError &) { - } catch (std::filesystem::filesystem_error &) {} + } catch (std::filesystem::filesystem_error &) { + } } if (!dirs.empty()) { @@ -141,8 +151,8 @@ struct CmdConfigCheck : StoreCommand bool checkStoreProtocol(unsigned int storeProto) { unsigned int clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) - ? SERVE_PROTOCOL_VERSION - : PROTOCOL_VERSION; + ? SERVE_PROTOCOL_VERSION + : PROTOCOL_VERSION; if (clientProto != storeProto) { std::ostringstream ss; @@ -160,9 +170,7 @@ struct CmdConfigCheck : StoreCommand void checkTrustedUser(ref store) { if (auto trustedMay = store->isTrustedClient()) { - std::string_view trusted = trustedMay.value() - ? "trusted" - : "not trusted"; + std::string_view trusted = trustedMay.value() ? "trusted" : "not trusted"; checkInfo(fmt("You are %s by store uri: %s", trusted, store->getUri())); } else { checkInfo(fmt("Store uri: %s doesn't have a notion of trusted user", store->getUri())); @@ -170,4 +178,4 @@ struct CmdConfigCheck : StoreCommand } }; -static auto rCmdConfigCheck = registerCommand2({ "config", "check" }); +static auto rCmdConfigCheck = registerCommand2({"config", "check"}); diff --git a/src/nix/config.cc b/src/nix/config.cc index cd82b08a6..c2a9fd8e2 100644 --- a/src/nix/config.cc +++ b/src/nix/config.cc @@ -10,22 +10,28 @@ using namespace nix; struct CmdConfig : NixMultiCommand { - CmdConfig() : NixMultiCommand("config", RegisterCommand::getCommandsFor({"config"})) - { } + CmdConfig() + : NixMultiCommand("config", RegisterCommand::getCommandsFor({"config"})) + { + } std::string description() override { return "manipulate the Nix configuration"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; struct CmdConfigShow : Command, MixJSON { std::optional name; - CmdConfigShow() { + CmdConfigShow() + { expectArgs({ .label = {"name"}, .optional = true, @@ -38,7 +44,10 @@ struct CmdConfigShow : Command, MixJSON return "show the Nix configuration or the value of a specific setting"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } void run() override { diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 013f2a7e3..62e8b64f5 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -18,7 +18,8 @@ struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand, MixProfile addFlag({ .longName = "out-link", .shortName = 'o', - .description = "Create symlinks prefixed with *path* to the top-level store paths fetched from the source store.", + .description = + "Create symlinks prefixed with *path* to the top-level store paths fetched from the source store.", .labels = {"path"}, .handler = {&outLink}, .completer = completePath, @@ -48,11 +49,14 @@ struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand, MixProfile std::string doc() override { return - #include "copy.md" - ; +#include "copy.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref srcStore, BuiltPaths && allPaths, BuiltPaths && rootPaths) override { @@ -65,8 +69,7 @@ struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand, MixProfile stuffToCopy.insert(theseRealisations.begin(), theseRealisations.end()); } - copyPaths( - *srcStore, *dstStore, stuffToCopy, NoRepair, checkSigs, substitute); + copyPaths(*srcStore, *dstStore, stuffToCopy, NoRepair, checkSigs, substitute); updateProfile(rootPaths); diff --git a/src/nix/crash-handler.cc b/src/nix/crash-handler.cc index d65773fa0..17c948dab 100644 --- a/src/nix/crash-handler.cc +++ b/src/nix/crash-handler.cc @@ -55,7 +55,7 @@ void onTerminate() std::abort(); } -} +} // namespace void registerCrashHandler() { @@ -65,4 +65,4 @@ void registerCrashHandler() // If you want signals, set up a minidump system and do it out-of-process. std::set_terminate(onTerminate); } -} +} // namespace nix diff --git a/src/nix/crash-handler.hh b/src/nix/crash-handler.hh index 018e86747..06404a4b3 100644 --- a/src/nix/crash-handler.hh +++ b/src/nix/crash-handler.hh @@ -1,4 +1,5 @@ #pragma once + /// @file Crash handler for Nix that prints back traces (hopefully in instances where it is not just going to crash the /// process itself). @@ -8,4 +9,4 @@ namespace nix { * detectStackOverflow(). */ void registerCrashHandler(); -} +} // namespace nix diff --git a/src/nix/derivation-add.cc b/src/nix/derivation-add.cc index e99c44deb..0f797bb20 100644 --- a/src/nix/derivation-add.cc +++ b/src/nix/derivation-add.cc @@ -20,11 +20,14 @@ struct CmdAddDerivation : MixDryRun, StoreCommand std::string doc() override { return - #include "derivation-add.md" - ; +#include "derivation-add.md" + ; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } void run(ref store) override { diff --git a/src/nix/derivation-show.cc b/src/nix/derivation-show.cc index 26108b8b8..1a61ccd5c 100644 --- a/src/nix/derivation-show.cc +++ b/src/nix/derivation-show.cc @@ -33,11 +33,14 @@ struct CmdShowDerivation : InstallablesCommand, MixPrintJSON std::string doc() override { return - #include "derivation-show.md" - ; +#include "derivation-show.md" + ; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } void run(ref store, Installables && installables) override { @@ -52,10 +55,10 @@ struct CmdShowDerivation : InstallablesCommand, MixPrintJSON json jsonRoot = json::object(); for (auto & drvPath : drvPaths) { - if (!drvPath.isDerivation()) continue; + if (!drvPath.isDerivation()) + continue; - jsonRoot[store->printStorePath(drvPath)] = - store->readDerivation(drvPath).toJSON(*store); + jsonRoot[store->printStorePath(drvPath)] = store->readDerivation(drvPath).toJSON(*store); } printJSON(jsonRoot); } diff --git a/src/nix/derivation.cc b/src/nix/derivation.cc index ee62ab4dc..2634048ac 100644 --- a/src/nix/derivation.cc +++ b/src/nix/derivation.cc @@ -4,15 +4,20 @@ using namespace nix; struct CmdDerivation : NixMultiCommand { - CmdDerivation() : NixMultiCommand("derivation", RegisterCommand::getCommandsFor({"derivation"})) - { } + CmdDerivation() + : NixMultiCommand("derivation", RegisterCommand::getCommandsFor({"derivation"})) + { + } std::string description() override { return "Work with derivations, Nix's notion of a build plan."; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdDerivation = registerCommand("derivation"); diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 37bce6ca0..d3381a988 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -9,7 +9,7 @@ #include "nix/store/derivations.hh" #ifndef _WIN32 // TODO re-enable on Windows -# include "run.hh" +# include "run.hh" #endif #include @@ -20,20 +20,21 @@ #include "nix/util/strings.hh" -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; struct DevelopSettings : Config { - Setting bashPrompt{this, "", "bash-prompt", - "The bash prompt (`PS1`) in `nix develop` shells."}; + Setting bashPrompt{this, "", "bash-prompt", "The bash prompt (`PS1`) in `nix develop` shells."}; - Setting bashPromptPrefix{this, "", "bash-prompt-prefix", - "Prefix prepended to the `PS1` environment variable in `nix develop` shells."}; + Setting bashPromptPrefix{ + this, "", "bash-prompt-prefix", "Prefix prepended to the `PS1` environment variable in `nix develop` shells."}; - Setting bashPromptSuffix{this, "", "bash-prompt-suffix", - "Suffix appended to the `PS1` environment variable in `nix develop` shells."}; + Setting bashPromptSuffix{ + this, "", "bash-prompt-suffix", "Suffix appended to the `PS1` environment variable in `nix develop` shells."}; }; static DevelopSettings developSettings; @@ -47,7 +48,7 @@ struct BuildEnvironment bool exported; std::string value; - bool operator == (const String & other) const + bool operator==(const String & other) const { return exported == other.exported && value == other.value; } @@ -72,7 +73,8 @@ struct BuildEnvironment for (auto & [name, info] : json["variables"].items()) { std::string type = info["type"]; if (type == "var" || type == "exported") - res.vars.insert({name, BuildEnvironment::String { .exported = type == "exported", .value = info["value"] }}); + res.vars.insert( + {name, BuildEnvironment::String{.exported = type == "exported", .value = info["value"]}}); else if (type == "array") res.vars.insert({name, (Array) info["value"]}); else if (type == "associative") @@ -107,12 +109,10 @@ struct BuildEnvironment if (auto str = std::get_if(&value)) { info["type"] = str->exported ? "exported" : "var"; info["value"] = str->value; - } - else if (auto arr = std::get_if(&value)) { + } else if (auto arr = std::get_if(&value)) { info["type"] = "array"; info["value"] = *arr; - } - else if (auto arr = std::get_if(&value)) { + } else if (auto arr = std::get_if(&value)) { info["type"] = "associative"; info["value"] = *arr; } @@ -159,14 +159,12 @@ struct BuildEnvironment out << fmt("%s=%s\n", name, escapeShellArgAlways(str->value)); if (str->exported) out << fmt("export %s\n", name); - } - else if (auto arr = std::get_if(&value)) { + } else if (auto arr = std::get_if(&value)) { out << "declare -a " << name << "=("; for (auto & s : *arr) out << escapeShellArgAlways(s) << " "; out << ")\n"; - } - else if (auto arr = std::get_if(&value)) { + } else if (auto arr = std::get_if(&value)) { out << "declare -A " << name << "=("; for (auto & [n, v] : *arr) out << "[" << escapeShellArgAlways(n) << "]=" << escapeShellArgAlways(v) << " "; @@ -206,12 +204,11 @@ struct BuildEnvironment Array assocKeys; std::for_each(assoc->begin(), assoc->end(), [&](auto & n) { assocKeys.push_back(n.first); }); return assocKeys; - } - else + } else throw Error("bash variable is not a string or array"); } - bool operator == (const BuildEnvironment & other) const + bool operator==(const BuildEnvironment & other) const { return vars == other.vars && bashFunctions == other.bashFunctions; } @@ -226,7 +223,7 @@ struct BuildEnvironment }; const static std::string getEnvSh = - #include "get-env.sh.gen.hh" +#include "get-env.sh.gen.hh" ; /* Given an existing derivation, return the shell environment as @@ -243,9 +240,14 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore throw Error("'nix develop' only works on derivations that use 'bash' as their builder"); auto getEnvShPath = ({ - StringSource source { getEnvSh }; + StringSource source{getEnvSh}; evalStore->addToStoreFromDump( - source, "get-env.sh", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, {}); + source, + "get-env.sh", + FileSerialisationMethod::Flat, + ContentAddressMethod::Raw::Text, + HashAlgorithm::SHA256, + {}); }); drv.args = {store->printStorePath(getEnvShPath)}; @@ -264,12 +266,11 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore drv.inputSrcs.insert(std::move(getEnvShPath)); if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { for (auto & output : drv.outputs) { - output.second = DerivationOutput::Deferred {}, - drv.env[output.first] = hashPlaceholder(output.first); + output.second = DerivationOutput::Deferred{}, drv.env[output.first] = hashPlaceholder(output.first); } } else { for (auto & output : drv.outputs) { - output.second = DerivationOutput::Deferred { }; + output.second = DerivationOutput::Deferred{}; drv.env[output.first] = ""; } auto hashesModulo = hashDerivationModulo(*evalStore, drv, true); @@ -277,7 +278,7 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore for (auto & output : drv.outputs) { Hash h = hashesModulo.hashes.at(output.first); auto outPath = store->makeOutputPath(output.first, h, drv.name); - output.second = DerivationOutput::InputAddressed { + output.second = DerivationOutput::InputAddressed{ .path = outPath, }; drv.env[output.first] = store->printStorePath(outPath); @@ -288,11 +289,12 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore /* Build the derivation. */ store->buildPaths( - { DerivedPath::Built { + {DerivedPath::Built{ .drvPath = makeConstantStorePathRef(shellDrvPath), - .outputs = OutputsSpec::All { }, + .outputs = OutputsSpec::All{}, }}, - bmNormal, evalStore); + bmNormal, + evalStore); for (auto & [_0, optPath] : evalStore->queryPartialDerivationOutputMap(shellDrvPath)) { assert(optPath); @@ -345,7 +347,7 @@ struct Common : InstallableCommand, MixProfile ref store, const BuildEnvironment & buildEnvironment, const std::filesystem::path & tmpDir, - const std::filesystem::path & outputsDir = std::filesystem::path { std::filesystem::current_path() } / "outputs") + const std::filesystem::path & outputsDir = std::filesystem::path{std::filesystem::current_path()} / "outputs") { // A list of colon-separated environment variables that should be // prepended to, rather than overwritten, in order to keep the shell usable. @@ -384,10 +386,7 @@ struct Common : InstallableCommand, MixProfile StringMap rewrites; if (buildEnvironment.providesStructuredAttrs()) { for (auto & [outputName, from] : BuildEnvironment::getAssociative(outputs->second)) { - rewrites.insert({ - from, - (outputsDir / outputName).string() - }); + rewrites.insert({from, (outputsDir / outputName).string()}); } } else { for (auto & outputName : BuildEnvironment::getStrings(outputs->second)) { @@ -404,9 +403,9 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto builtPaths = Installable::toStorePathSet( - getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); - for (auto & path: builtPaths) { + auto builtPaths = + Installable::toStorePathSet(getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); + for (auto & path : builtPaths) { auto from = store->printStorePath(path); if (script.find(from) == std::string::npos) warn("'%s' (path '%s') is not used by this build environment", installable->what(), from); @@ -419,21 +418,14 @@ struct Common : InstallableCommand, MixProfile if (buildEnvironment.providesStructuredAttrs()) { fixupStructuredAttrs( - OS_STR("sh"), - "NIX_ATTRS_SH_FILE", - buildEnvironment.getAttrsSH(), - rewrites, - buildEnvironment, - tmpDir - ); + OS_STR("sh"), "NIX_ATTRS_SH_FILE", buildEnvironment.getAttrsSH(), rewrites, buildEnvironment, tmpDir); fixupStructuredAttrs( OS_STR("json"), "NIX_ATTRS_JSON_FILE", buildEnvironment.getAttrsJSON(), rewrites, buildEnvironment, - tmpDir - ); + tmpDir); } return rewriteStrings(script, rewrites); @@ -488,8 +480,10 @@ struct Common : InstallableCommand, MixProfile auto drvs = Installable::toDerivations(store, {installable}); if (drvs.size() != 1) - throw Error("'%s' needs to evaluate to a single derivation, but it evaluated to %d derivations", - installable->what(), drvs.size()); + throw Error( + "'%s' needs to evaluate to a single derivation, but it evaluated to %d derivations", + installable->what(), + drvs.size()); auto & drvPath = *drvs.begin(); @@ -497,8 +491,7 @@ struct Common : InstallableCommand, MixProfile } } - std::pair - getBuildEnvironment(ref store, ref installable) + std::pair getBuildEnvironment(ref store, ref installable) { auto shellOutPath = getShellOutPath(store, installable); @@ -525,7 +518,8 @@ struct CmdDevelop : Common, MixEnvironment .description = "Instead of starting an interactive shell, start the specified command and arguments.", .labels = {"command", "args"}, .handler = {[&](std::vector ss) { - if (ss.empty()) throw UsageError("--command requires at least one argument"); + if (ss.empty()) + throw UsageError("--command requires at least one argument"); command = ss; }}, }); @@ -582,8 +576,8 @@ struct CmdDevelop : Common, MixEnvironment std::string doc() override { return - #include "develop.md" - ; +#include "develop.md" + ; } void run(ref store, ref installable) override @@ -619,16 +613,17 @@ struct CmdDevelop : Common, MixEnvironment } else { - script = "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;\nshopt -u expand_aliases\n" + script + "\nshopt -s expand_aliases\n"; + script = "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;\nshopt -u expand_aliases\n" + script + + "\nshopt -s expand_aliases\n"; if (developSettings.bashPrompt != "") - script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n", - escapeShellArgAlways(developSettings.bashPrompt.get())); + script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n", escapeShellArgAlways(developSettings.bashPrompt.get())); if (developSettings.bashPromptPrefix != "") - script += fmt("[ -n \"$PS1\" ] && PS1=%s\"$PS1\";\n", - escapeShellArgAlways(developSettings.bashPromptPrefix.get())); + script += + fmt("[ -n \"$PS1\" ] && PS1=%s\"$PS1\";\n", + escapeShellArgAlways(developSettings.bashPromptPrefix.get())); if (developSettings.bashPromptSuffix != "") - script += fmt("[ -n \"$PS1\" ] && PS1+=%s;\n", - escapeShellArgAlways(developSettings.bashPromptSuffix.get())); + script += + fmt("[ -n \"$PS1\" ] && PS1+=%s;\n", escapeShellArgAlways(developSettings.bashPromptSuffix.get())); } writeFull(rcFileFd.get(), script); @@ -662,7 +657,8 @@ struct CmdDevelop : Common, MixEnvironment bool found = false; - for (auto & path : Installable::toStorePathSet(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { + for (auto & path : Installable::toStorePathSet( + getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { auto s = store->printStorePath(path) + "/bin/bash"; if (pathExists(s)) { shell = s; @@ -688,7 +684,7 @@ struct CmdDevelop : Common, MixEnvironment // If running a phase or single command, don't want an interactive shell running after // Ctrl-C, so don't pass --rcfile auto args = phase || !command.empty() ? Strings{std::string(baseNameOf(shell)), rcFilePath} - : Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath}; + : Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath}; // Need to chdir since phases assume in flake directory if (phase) { @@ -723,11 +719,14 @@ struct CmdPrintDevEnv : Common, MixJSON std::string doc() override { return - #include "print-dev-env.md" - ; +#include "print-dev-env.md" + ; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } void run(ref store, ref installable) override { diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index ff9f9db40..020c3e13b 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -41,7 +41,7 @@ GroupedPaths getClosureInfo(ref store, const StorePath & toplevel) } DrvName drvName(name); - groupedPaths[drvName.name][drvName.version].emplace(path, Info { .outputName = outputName }); + groupedPaths[drvName.name][drvName.version].emplace(path, Info{.outputName = outputName}); } return groupedPaths; @@ -49,7 +49,8 @@ GroupedPaths getClosureInfo(ref store, const StorePath & toplevel) std::string showVersions(const StringSet & versions) { - if (versions.empty()) return "∅"; + if (versions.empty()) + return "∅"; StringSet versions2; for (auto & version : versions) versions2.insert(version.empty() ? "ε" : version); @@ -57,24 +58,22 @@ std::string showVersions(const StringSet & versions) } void printClosureDiff( - ref store, - const StorePath & beforePath, - const StorePath & afterPath, - std::string_view indent) + ref store, const StorePath & beforePath, const StorePath & afterPath, std::string_view indent) { auto beforeClosure = getClosureInfo(store, beforePath); auto afterClosure = getClosureInfo(store, afterPath); StringSet allNames; - for (auto & [name, _] : beforeClosure) allNames.insert(name); - for (auto & [name, _] : afterClosure) allNames.insert(name); + for (auto & [name, _] : beforeClosure) + allNames.insert(name); + for (auto & [name, _] : afterClosure) + allNames.insert(name); for (auto & name : allNames) { auto & beforeVersions = beforeClosure[name]; auto & afterVersions = afterClosure[name]; - auto totalSize = [&](const std::map> & versions) - { + auto totalSize = [&](const std::map> & versions) { uint64_t sum = 0; for (auto & [_, paths] : versions) for (auto & [path, _] : paths) @@ -89,24 +88,29 @@ void printClosureDiff( StringSet removed, unchanged; for (auto & [version, _] : beforeVersions) - if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version); + if (!afterVersions.count(version)) + removed.insert(version); + else + unchanged.insert(version); StringSet added; for (auto & [version, _] : afterVersions) - if (!beforeVersions.count(version)) added.insert(version); + if (!beforeVersions.count(version)) + added.insert(version); if (showDelta || !removed.empty() || !added.empty()) { std::vector items; if (!removed.empty() || !added.empty()) items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added))); if (showDelta) - items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); + items.push_back( + fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); logger->cout("%s%s: %s", indent, name, concatStringsSep(", ", items)); } } } -} +} // namespace nix using namespace nix; @@ -128,8 +132,8 @@ struct CmdDiffClosures : SourceExprCommand, MixOperateOnOptions std::string doc() override { return - #include "diff-closures.md" - ; +#include "diff-closures.md" + ; } void run(ref store) override diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index c883630b1..8475655e9 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -14,8 +14,8 @@ struct CmdDumpPath : StorePathCommand std::string doc() override { return - #include "store-dump-path.md" - ; +#include "store-dump-path.md" + ; } void run(ref store, const StorePath & storePath) override @@ -34,11 +34,7 @@ struct CmdDumpPath2 : Command CmdDumpPath2() { - expectArgs({ - .label = "path", - .handler = {&path}, - .completer = completePath - }); + expectArgs({.label = "path", .handler = {&path}, .completer = completePath}); } std::string description() override @@ -49,8 +45,8 @@ struct CmdDumpPath2 : Command std::string doc() override { return - #include "nar-dump-path.md" - ; +#include "nar-dump-path.md" + ; } void run() override @@ -61,8 +57,10 @@ struct CmdDumpPath2 : Command } }; -struct CmdNarDumpPath : CmdDumpPath2 { - void run() override { +struct CmdNarDumpPath : CmdDumpPath2 +{ + void run() override + { warn("'nix nar dump-path' is a deprecated alias for 'nix nar pack'"); CmdDumpPath2::run(); } diff --git a/src/nix/edit.cc b/src/nix/edit.cc index cfb9eb74a..0657301f3 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -19,11 +19,14 @@ struct CmdEdit : InstallableValueCommand std::string doc() override { return - #include "edit.md" - ; +#include "edit.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref store, ref installable) override { @@ -48,7 +51,8 @@ struct CmdEdit : InstallableValueCommand execvp(args.front().c_str(), stringsToCharPtrs(args).data()); std::string command; - for (const auto &arg : args) command += " '" + arg + "'"; + for (const auto & arg : args) + command += " '" + arg + "'"; throw SysError("cannot run command%s", command); } }; diff --git a/src/nix/eval.cc b/src/nix/eval.cc index be064e552..10d0a1841 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -10,7 +10,9 @@ using namespace nix; -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption { @@ -18,7 +20,8 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption std::optional apply; std::optional writeTo; - CmdEval() : InstallableValueCommand() + CmdEval() + : InstallableValueCommand() { addFlag({ .longName = "raw", @@ -49,11 +52,14 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption std::string doc() override { return - #include "eval.md" - ; +#include "eval.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref store, ref installable) override { @@ -81,8 +87,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption std::function recurse; - recurse = [&](Value & v, const PosIdx pos, const std::filesystem::path & path) - { + recurse = [&](Value & v, const PosIdx pos, const std::filesystem::path & path) { state->forceValue(v, pos); if (v.type() == nString) // FIXME: disallow strings with contexts? @@ -99,14 +104,13 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption recurse(*attr.value, attr.pos, path / name); } catch (Error & e) { e.addTrace( - state->positions[attr.pos], - HintFmt("while evaluating the attribute '%s'", name)); + state->positions[attr.pos], HintFmt("while evaluating the attribute '%s'", name)); throw; } } - } - else - state->error("value at '%s' is not a string or an attribute set", state->positions[pos]).debugThrow(); + } else + state->error("value at '%s' is not a string or an attribute set", state->positions[pos]) + .debugThrow(); }; recurse(*v, pos, *writeTo); @@ -114,7 +118,9 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption else if (raw) { logger->stop(); - writeFull(getStandardOutput(), *state->coerceToString(noPos, *v, context, "while generating the eval command output")); + writeFull( + getStandardOutput(), + *state->coerceToString(noPos, *v, context, "while generating the eval command output")); } else if (json) { @@ -122,17 +128,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } else { - logger->cout( - "%s", - ValuePrinter( - *state, - *v, - PrintOptions { - .force = true, - .derivationPaths = true - } - ) - ); + logger->cout("%s", ValuePrinter(*state, *v, PrintOptions{.force = true, .derivationPaths = true})); } } }; diff --git a/src/nix/flake-command.hh b/src/nix/flake-command.hh index 36dfe44c6..3636bd525 100644 --- a/src/nix/flake-command.hh +++ b/src/nix/flake-command.hh @@ -24,4 +24,4 @@ public: std::vector getFlakeRefsForCompletion() override; }; -} +} // namespace nix diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 1d20add02..a7b72c7e1 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -24,7 +24,9 @@ #include "nix/util/strings-inline.hh" -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; using namespace nix::flake; @@ -34,19 +36,18 @@ struct CmdFlakeUpdate; FlakeCommand::FlakeCommand() { - expectArgs({ - .label = "flake-url", - .optional = true, - .handler = {&flakeUrl}, - .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { - completeFlakeRef(completions, getStore(), prefix); - }} - }); + expectArgs( + {.label = "flake-url", + .optional = true, + .handler = {&flakeUrl}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); + }}}); } FlakeRef FlakeCommand::getFlakeRef() { - return parseFlakeRef(fetchSettings, flakeUrl, std::filesystem::current_path().string()); //FIXME + return parseFlakeRef(fetchSettings, flakeUrl, std::filesystem::current_path().string()); // FIXME } LockedFlake FlakeCommand::lockFlake() @@ -56,10 +57,8 @@ LockedFlake FlakeCommand::lockFlake() std::vector FlakeCommand::getFlakeRefsForCompletion() { - return { - // Like getFlakeRef but with expandTilde called first - parseFlakeRef(fetchSettings, expandTilde(flakeUrl), std::filesystem::current_path().string()) - }; + return {// Like getFlakeRef but with expandTilde called first + parseFlakeRef(fetchSettings, expandTilde(flakeUrl), std::filesystem::current_path().string())}; } struct CmdFlakeUpdate : FlakeCommand @@ -75,28 +74,33 @@ public: { expectedArgs.clear(); addFlag({ - .longName="flake", - .description="The flake to operate on. Default is the current directory.", - .labels={"flake-url"}, - .handler={&flakeUrl}, + .longName = "flake", + .description = "The flake to operate on. Default is the current directory.", + .labels = {"flake-url"}, + .handler = {&flakeUrl}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, getStore(), prefix); }}, }); expectArgs({ - .label="inputs", - .optional=true, - .handler={[&](std::vector inputsToUpdate){ + .label = "inputs", + .optional = true, + .handler = {[&](std::vector inputsToUpdate) { for (const auto & inputToUpdate : inputsToUpdate) { InputAttrPath inputAttrPath; try { inputAttrPath = flake::parseInputAttrPath(inputToUpdate); } catch (Error & e) { - warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate); + warn( + "Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", + inputToUpdate, + inputToUpdate); throw e; } if (lockFlags.inputUpdates.contains(inputAttrPath)) - warn("Input '%s' was specified multiple times. You may have done this by accident.", printInputAttrPath(inputAttrPath)); + warn( + "Input '%s' was specified multiple times. You may have done this by accident.", + printInputAttrPath(inputAttrPath)); lockFlags.inputUpdates.insert(inputAttrPath); } }}, @@ -113,8 +117,8 @@ public: std::string doc() override { return - #include "flake-update.md" - ; +#include "flake-update.md" + ; } void run(nix::ref store) override @@ -146,8 +150,8 @@ struct CmdFlakeLock : FlakeCommand std::string doc() override { return - #include "flake-lock.md" - ; +#include "flake-lock.md" + ; } void run(nix::ref store) override @@ -162,7 +166,9 @@ struct CmdFlakeLock : FlakeCommand } }; -static void enumerateOutputs(EvalState & state, Value & vFlake, +static void enumerateOutputs( + EvalState & state, + Value & vFlake, std::function callback) { auto pos = vFlake.determinePos(noPos); @@ -197,8 +203,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON std::string doc() override { return - #include "flake-metadata.md" - ; +#include "flake-metadata.md" + ; } void run(nix::ref store) override @@ -235,40 +241,25 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON j["fingerprint"] = fingerprint->to_string(HashFormat::Base16, false); printJSON(j); } else { - logger->cout( - ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s", - flake.resolvedRef.to_string()); + logger->cout(ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s", flake.resolvedRef.to_string()); if (flake.lockedRef.input.isLocked()) - logger->cout( - ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s", - flake.lockedRef.to_string()); + logger->cout(ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s", flake.lockedRef.to_string()); if (flake.description) - logger->cout( - ANSI_BOLD "Description:" ANSI_NORMAL " %s", - *flake.description); - logger->cout( - ANSI_BOLD "Path:" ANSI_NORMAL " %s", - storePath); + logger->cout(ANSI_BOLD "Description:" ANSI_NORMAL " %s", *flake.description); + logger->cout(ANSI_BOLD "Path:" ANSI_NORMAL " %s", storePath); if (auto rev = flake.lockedRef.input.getRev()) - logger->cout( - ANSI_BOLD "Revision:" ANSI_NORMAL " %s", - rev->to_string(HashFormat::Base16, false)); + logger->cout(ANSI_BOLD "Revision:" ANSI_NORMAL " %s", rev->to_string(HashFormat::Base16, false)); if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev")) - logger->cout( - ANSI_BOLD "Revision:" ANSI_NORMAL " %s", - *dirtyRev); + logger->cout(ANSI_BOLD "Revision:" ANSI_NORMAL " %s", *dirtyRev); if (auto revCount = flake.lockedRef.input.getRevCount()) - logger->cout( - ANSI_BOLD "Revisions:" ANSI_NORMAL " %s", - *revCount); + logger->cout(ANSI_BOLD "Revisions:" ANSI_NORMAL " %s", *revCount); if (auto lastModified = flake.lockedRef.input.getLastModified()) logger->cout( ANSI_BOLD "Last modified:" ANSI_NORMAL " %s", std::put_time(std::localtime(&*lastModified), "%F %T")); if (auto fingerprint = lockedFlake.getFingerprint(store, fetchSettings)) logger->cout( - ANSI_BOLD "Fingerprint:" ANSI_NORMAL " %s", - fingerprint->to_string(HashFormat::Base16, false)); + ANSI_BOLD "Fingerprint:" ANSI_NORMAL " %s", fingerprint->to_string(HashFormat::Base16, false)); if (!lockedFlake.lockFile.root->inputs.empty()) logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL); @@ -277,8 +268,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON std::function recurse; - recurse = [&](const Node & node, const std::string & prefix) - { + recurse = [&](const Node & node, const std::string & prefix) { for (const auto & [i, input] : enumerate(node.inputs)) { bool last = i + 1 == node.inputs.size(); @@ -286,17 +276,22 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON std::string lastModifiedStr = ""; if (auto lastModified = (*lockedNode)->lockedRef.input.getLastModified()) lastModifiedStr = fmt(" (%s)", std::put_time(std::gmtime(&*lastModified), "%F %T")); - logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s%s", - prefix + (last ? treeLast : treeConn), input.first, + logger->cout( + "%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s%s", + prefix + (last ? treeLast : treeConn), + input.first, (*lockedNode)->lockedRef, lastModifiedStr); bool firstVisit = visited.insert(*lockedNode).second; - if (firstVisit) recurse(**lockedNode, prefix + (last ? treeNull : treeLine)); + if (firstVisit) + recurse(**lockedNode, prefix + (last ? treeNull : treeLine)); } else if (auto follows = std::get_if<1>(&input.second)) { - logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'", - prefix + (last ? treeLast : treeConn), input.first, + logger->cout( + "%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'", + prefix + (last ? treeLast : treeConn), + input.first, printInputAttrPath(*follows)); } } @@ -344,8 +339,8 @@ struct CmdFlakeCheck : FlakeCommand std::string doc() override { return - #include "flake-check.md" - ; +#include "flake-check.md" + ; } void run(nix::ref store) override @@ -371,8 +366,7 @@ struct CmdFlakeCheck : FlakeCommand if (settings.keepGoing) { ignoreExceptionExceptInterrupt(); hasErrors = true; - } - else + } else throw; } }; @@ -381,16 +375,11 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: rewrite to use EvalCache. - auto resolve = [&] (PosIdx p) { - return state->positions[p]; - }; + auto resolve = [&](PosIdx p) { return state->positions[p]; }; - auto argHasName = [&] (Symbol arg, std::string_view expected) { + auto argHasName = [&](Symbol arg, std::string_view expected) { std::string_view name = state->symbols[arg]; - return - name == expected - || name == "_" - || (hasPrefix(name, "_") && name.substr(1) == expected); + return name == expected || name == "_" || (hasPrefix(name, "_") && name.substr(1) == expected); }; auto checkSystemName = [&](std::string_view system, const PosIdx pos) { @@ -408,10 +397,10 @@ struct CmdFlakeCheck : FlakeCommand } }; - auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { + auto checkDerivation = + [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking derivation %s", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking derivation %s", attrPath)); auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); @@ -419,9 +408,8 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: check meta attributes auto storePath = packageInfo->queryDrvPath(); if (storePath) { - logger->log(lvlInfo, - fmt("derivation evaluated to %s", - store->printStorePath(storePath.value()))); + logger->log( + lvlInfo, fmt("derivation evaluated to %s", store->printStorePath(storePath.value()))); } return storePath; } @@ -477,14 +465,12 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](std::string_view attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking overlay '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking overlay '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); } - if (v.lambda().fun->hasFormals() - || !argHasName(v.lambda().fun->arg, "final")) + if (v.lambda().fun->hasFormals() || !argHasName(v.lambda().fun->arg, "final")) throw Error("overlay does not take an argument named 'final'"); // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. @@ -496,8 +482,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkModule = [&](std::string_view attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS module '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); @@ -509,8 +494,7 @@ struct CmdFlakeCheck : FlakeCommand checkHydraJobs = [&](std::string_view attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking Hydra job '%s'", attrPath)); state->forceAttrs(v, pos, ""); if (state->isDerivation(v)) @@ -520,8 +504,7 @@ struct CmdFlakeCheck : FlakeCommand state->forceAttrs(*attr.value, attr.pos, ""); auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); if (state->isDerivation(*attr.value)) { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job '%s'", attrPath2)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking Hydra job '%s'", attrPath2)); checkDerivation(attrPath2, *attr.value, attr.pos); } else checkHydraJobs(attrPath2, *attr.value, attr.pos); @@ -535,8 +518,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS configuration '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking NixOS configuration '%s'", attrPath)); Bindings & bindings(*state->allocBindings(0)); auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; state->forceValue(*vToplevel, pos); @@ -550,8 +532,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkTemplate = [&](std::string_view attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking template '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking template '%s'", attrPath)); state->forceAttrs(v, pos, ""); @@ -584,8 +565,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking bundler '%s'", attrPath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking bundler '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); @@ -602,227 +582,208 @@ struct CmdFlakeCheck : FlakeCommand auto vFlake = state->allocValue(); flake::callFlake(*state, flake, *vFlake); - enumerateOutputs(*state, - *vFlake, - [&](std::string_view name, Value & vOutput, const PosIdx pos) { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking flake output '%s'", name)); + enumerateOutputs(*state, *vFlake, [&](std::string_view name, Value & vOutput, const PosIdx pos) { + Activity act(*logger, lvlInfo, actUnknown, fmt("checking flake output '%s'", name)); - try { - evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs"); + try { + evalSettings.enableImportFromDerivation.setDefault(name != "hydraJobs"); - state->forceValue(vOutput, pos); + state->forceValue(vOutput, pos); - std::string_view replacement = - name == "defaultPackage" ? "packages..default" : - name == "defaultApp" ? "apps..default" : - name == "defaultTemplate" ? "templates.default" : - name == "defaultBundler" ? "bundlers..default" : - name == "overlay" ? "overlays.default" : - name == "devShell" ? "devShells..default" : - name == "nixosModule" ? "nixosModules.default" : - ""; - if (replacement != "") - warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement); + std::string_view replacement = name == "defaultPackage" ? "packages..default" + : name == "defaultApp" ? "apps..default" + : name == "defaultTemplate" ? "templates.default" + : name == "defaultBundler" ? "bundlers..default" + : name == "overlay" ? "overlays.default" + : name == "devShell" ? "devShells..default" + : name == "nixosModule" ? "nixosModules.default" + : ""; + if (replacement != "") + warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement); - if (name == "checks") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - std::string_view attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) { - auto drvPath = checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - if (drvPath && attr_name == settings.thisSystem.get()) { - auto path = DerivedPath::Built { - .drvPath = makeConstantStorePathRef(*drvPath), - .outputs = OutputsSpec::All { }, - }; - drvPaths.push_back(std::move(path)); - } + if (name == "checks") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + std::string_view attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) { + auto drvPath = checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, + attr2.pos); + if (drvPath && attr_name == settings.thisSystem.get()) { + auto path = DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All{}, + }; + drvPaths.push_back(std::move(path)); } } } } - - else if (name == "formatter") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkDerivation( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "packages" || name == "devShells") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) - checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - }; - } - } - - else if (name == "apps") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) - checkApp( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - }; - } - } - - else if (name == "defaultPackage" || name == "devShell") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkDerivation( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "defaultApp") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos) ) { - checkApp( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "legacyPackages") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - checkSystemName(state->symbols[attr.name], attr.pos); - checkSystemType(state->symbols[attr.name], attr.pos); - // FIXME: do getDerivations? - } - } - - else if (name == "overlay") - checkOverlay(name, vOutput, pos); - - else if (name == "overlays") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkOverlay(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "nixosModule") - checkModule(name, vOutput, pos); - - else if (name == "nixosModules") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkModule(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "nixosConfigurations") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkNixOSConfiguration(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "hydraJobs") - checkHydraJobs(name, vOutput, pos); - - else if (name == "defaultTemplate") - checkTemplate(name, vOutput, pos); - - else if (name == "templates") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) - checkTemplate(fmt("%s.%s", name, state->symbols[attr.name]), - *attr.value, attr.pos); - } - - else if (name == "defaultBundler") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - checkBundler( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); - }; - } - } - - else if (name == "bundlers") { - state->forceAttrs(vOutput, pos, ""); - for (auto & attr : *vOutput.attrs()) { - const auto & attr_name = state->symbols[attr.name]; - checkSystemName(attr_name, attr.pos); - if (checkSystemType(attr_name, attr.pos)) { - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs()) { - checkBundler( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - } - }; - } - } - - else if ( - name == "lib" - || name == "darwinConfigurations" - || name == "darwinModules" - || name == "flakeModule" - || name == "flakeModules" - || name == "herculesCI" - || name == "homeConfigurations" - || name == "homeModule" - || name == "homeModules" - || name == "nixopsConfigurations" - ) - // Known but unchecked community attribute - ; - - else - warn("unknown flake output '%s'", name); - - } catch (Error & e) { - e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); - reportError(e); } - }); + + else if (name == "formatter") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkDerivation(fmt("%s.%s", name, attr_name), *attr.value, attr.pos); + }; + } + } + + else if (name == "packages" || name == "devShells") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) + checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, + attr2.pos); + }; + } + } + + else if (name == "apps") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) + checkApp( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, + attr2.pos); + }; + } + } + + else if (name == "defaultPackage" || name == "devShell") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkDerivation(fmt("%s.%s", name, attr_name), *attr.value, attr.pos); + }; + } + } + + else if (name == "defaultApp") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkApp(fmt("%s.%s", name, attr_name), *attr.value, attr.pos); + }; + } + } + + else if (name == "legacyPackages") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + checkSystemName(state->symbols[attr.name], attr.pos); + checkSystemType(state->symbols[attr.name], attr.pos); + // FIXME: do getDerivations? + } + } + + else if (name == "overlay") + checkOverlay(name, vOutput, pos); + + else if (name == "overlays") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkOverlay(fmt("%s.%s", name, state->symbols[attr.name]), *attr.value, attr.pos); + } + + else if (name == "nixosModule") + checkModule(name, vOutput, pos); + + else if (name == "nixosModules") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkModule(fmt("%s.%s", name, state->symbols[attr.name]), *attr.value, attr.pos); + } + + else if (name == "nixosConfigurations") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkNixOSConfiguration( + fmt("%s.%s", name, state->symbols[attr.name]), *attr.value, attr.pos); + } + + else if (name == "hydraJobs") + checkHydraJobs(name, vOutput, pos); + + else if (name == "defaultTemplate") + checkTemplate(name, vOutput, pos); + + else if (name == "templates") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) + checkTemplate(fmt("%s.%s", name, state->symbols[attr.name]), *attr.value, attr.pos); + } + + else if (name == "defaultBundler") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkBundler(fmt("%s.%s", name, attr_name), *attr.value, attr.pos); + }; + } + } + + else if (name == "bundlers") { + state->forceAttrs(vOutput, pos, ""); + for (auto & attr : *vOutput.attrs()) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs()) { + checkBundler( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, + attr2.pos); + } + }; + } + } + + else if ( + name == "lib" || name == "darwinConfigurations" || name == "darwinModules" + || name == "flakeModule" || name == "flakeModules" || name == "herculesCI" + || name == "homeConfigurations" || name == "homeModule" || name == "homeModules" + || name == "nixopsConfigurations") + // Known but unchecked community attribute + ; + + else + warn("unknown flake output '%s'", name); + + } catch (Error & e) { + e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); + reportError(e); + } + }); } if (build && !drvPaths.empty()) { - Activity act(*logger, lvlInfo, actUnknown, - fmt("running %d flake checks", drvPaths.size())); + Activity act(*logger, lvlInfo, actUnknown, fmt("running %d flake checks", drvPaths.size())); store->buildPaths(drvPaths); } if (hasErrors) @@ -833,8 +794,7 @@ struct CmdFlakeCheck : FlakeCommand warn( "The check omitted these incompatible systems: %s\n" "Use '--all-systems' to check all.", - concatStringsSep(", ", omittedSystems) - ); + concatStringsSep(", ", omittedSystems)); }; }; }; @@ -847,7 +807,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand std::string templateUrl = "templates"; Path destDir; - const LockFlags lockFlags{ .writeLockFile = false }; + const LockFlags lockFlags{.writeLockFile = false}; CmdFlakeInitCommon() { @@ -875,11 +835,15 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto evalState = getEvalState(); - auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment( - fetchSettings, templateUrl, std::filesystem::current_path().string()); + auto [templateFlakeRef, templateName] = + parseFlakeRefWithFragment(fetchSettings, templateUrl, std::filesystem::current_path().string()); - auto installable = InstallableFlake(nullptr, - evalState, std::move(templateFlakeRef), templateName, ExtendedOutputsSpec::Default(), + auto installable = InstallableFlake( + nullptr, + evalState, + std::move(templateFlakeRef), + templateName, + ExtendedOutputsSpec::Default(), defaultTemplateAttrPaths, defaultTemplateAttrPathsPrefixes, lockFlags); @@ -894,8 +858,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand std::vector conflictedFiles; std::function copyDir; - copyDir = [&](const SourcePath & from, const std::filesystem::path & to) - { + copyDir = [&](const SourcePath & from, const std::filesystem::path & to) { createDirs(to); for (auto & [name, entry] : from.readDirectory()) { @@ -911,7 +874,10 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand if (std::filesystem::exists(to_st)) { auto contents2 = readFile(to2.string()); if (contents != contents2) { - printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2.string(), from2); + printError( + "refusing to overwrite existing file '%s'\n please merge it manually with '%s'", + to2.string(), + from2); conflictedFiles.push_back(to2); } else { notice("skipping identical file: %s", from2); @@ -919,22 +885,26 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand continue; } else writeFile(to2, contents); - } - else if (st.type == SourceAccessor::tSymlink) { + } else if (st.type == SourceAccessor::tSymlink) { auto target = from2.readLink(); if (std::filesystem::exists(to_st)) { if (std::filesystem::read_symlink(to2) != target) { - printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2.string(), from2); + printError( + "refusing to overwrite existing file '%s'\n please merge it manually with '%s'", + to2.string(), + from2); conflictedFiles.push_back(to2); } else { notice("skipping identical file: %s", from2); } continue; } else - createSymlink(target, os_string_to_string(PathViewNG { to2 })); - } - else - throw Error("path '%s' needs to be a symlink, file, or directory but instead is a %s", from2, st.typeString()); + createSymlink(target, os_string_to_string(PathViewNG{to2})); + } else + throw Error( + "path '%s' needs to be a symlink, file, or directory but instead is a %s", + from2, + st.typeString()); changedFiles.push_back(to2); notice("wrote: %s", to2); } @@ -943,8 +913,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand copyDir(templateDir, flakeDir); if (!changedFiles.empty() && std::filesystem::exists(std::filesystem::path{flakeDir} / ".git")) { - Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" }; - for (auto & s : changedFiles) args.emplace_back(s.string()); + Strings args = {"-C", flakeDir, "add", "--intent-to-add", "--force", "--"}; + for (auto & s : changedFiles) + args.emplace_back(s.string()); runProgram("git", true, args); } @@ -968,8 +939,8 @@ struct CmdFlakeInit : CmdFlakeInitCommon std::string doc() override { return - #include "flake-init.md" - ; +#include "flake-init.md" + ; } CmdFlakeInit() @@ -988,17 +959,13 @@ struct CmdFlakeNew : CmdFlakeInitCommon std::string doc() override { return - #include "flake-new.md" - ; +#include "flake-new.md" + ; } CmdFlakeNew() { - expectArgs({ - .label = "dest-dir", - .handler = {&destDir}, - .completer = completePath - }); + expectArgs({.label = "dest-dir", .handler = {&destDir}, .completer = completePath}); } }; @@ -1014,8 +981,8 @@ struct CmdFlakeClone : FlakeCommand std::string doc() override { return - #include "flake-clone.md" - ; +#include "flake-clone.md" + ; } CmdFlakeClone() @@ -1069,8 +1036,8 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun std::string doc() override { return - #include "flake-archive.md" - ; +#include "flake-archive.md" + ; } void run(nix::ref store) override @@ -1085,17 +1052,14 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun // FIXME: use graph output, handle cycles. std::function traverse; - traverse = [&](const Node & node) - { + traverse = [&](const Node & node) { nlohmann::json jsonObj2 = json ? json::object() : nlohmann::json(nullptr); for (auto & [inputName, input] : node.inputs) { if (auto inputNode = std::get_if<0>(&input)) { std::optional storePath; if (!(*inputNode)->lockedRef.input.isRelative()) { - storePath = - dryRun - ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : (*inputNode)->lockedRef.input.fetchToStore(store).first; + storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) + : (*inputNode)->lockedRef.input.fetchToStore(store).first; sources.insert(*storePath); } if (json) { @@ -1155,8 +1119,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON std::string doc() override { return - #include "flake-show.md" - ; +#include "flake-show.md" + ; } void run(nix::ref store) override @@ -1167,10 +1131,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto flake = std::make_shared(lockFlake()); auto localSystem = std::string(settings.thisSystem.get()); - std::function &attrPath, - const Symbol &attr)> hasContent; + std::function & attrPath, const Symbol & attr)> + hasContent; // For frameworks it's important that structures are as lazy as possible // to prevent infinite recursions, performance issues and errors that @@ -1178,11 +1140,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON // to emit more attributes than strictly (sic) necessary. // However, these attributes with empty values are not useful to the user // so we omit them. - hasContent = [&]( - eval_cache::AttrCursor & visitor, - const std::vector &attrPath, - const Symbol &attr) -> bool - { + hasContent = + [&](eval_cache::AttrCursor & visitor, const std::vector & attrPath, const Symbol & attr) -> bool { auto attrPath2(attrPath); attrPath2.push_back(attr); auto attrPathS = state->symbols.resolve(attrPath2); @@ -1191,13 +1150,10 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto visitor2 = visitor.getAttr(attrName); try { - if ((attrPathS[0] == "apps" - || attrPathS[0] == "checks" - || attrPathS[0] == "devShells" - || attrPathS[0] == "legacyPackages" - || attrPathS[0] == "packages") + if ((attrPathS[0] == "apps" || attrPathS[0] == "checks" || attrPathS[0] == "devShells" + || attrPathS[0] == "legacyPackages" || attrPathS[0] == "packages") && (attrPathS.size() == 1 || attrPathS.size() == 2)) { - for (const auto &subAttr : visitor2->getAttrs()) { + for (const auto & subAttr : visitor2->getAttrs()) { if (hasContent(*visitor2, attrPath2, subAttr)) { return true; } @@ -1206,12 +1162,9 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } if ((attrPathS.size() == 1) - && (attrPathS[0] == "formatter" - || attrPathS[0] == "nixosConfigurations" - || attrPathS[0] == "nixosModules" - || attrPathS[0] == "overlays" - )) { - for (const auto &subAttr : visitor2->getAttrs()) { + && (attrPathS[0] == "formatter" || attrPathS[0] == "nixosConfigurations" + || attrPathS[0] == "nixosModules" || attrPathS[0] == "overlays")) { + for (const auto & subAttr : visitor2->getAttrs()) { if (hasContent(*visitor2, attrPath2, subAttr)) { return true; } @@ -1233,29 +1186,25 @@ struct CmdFlakeShow : FlakeCommand, MixJSON eval_cache::AttrCursor & visitor, const std::vector & attrPath, const std::string & headerPrefix, - const std::string & nextPrefix)> visit; + const std::string & nextPrefix)> + visit; - visit = [&]( - eval_cache::AttrCursor & visitor, - const std::vector & attrPath, - const std::string & headerPrefix, - const std::string & nextPrefix) - -> nlohmann::json - { + visit = [&](eval_cache::AttrCursor & visitor, + const std::vector & attrPath, + const std::string & headerPrefix, + const std::string & nextPrefix) -> nlohmann::json { auto j = nlohmann::json::object(); auto attrPathS = state->symbols.resolve(attrPath); - Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); try { - auto recurse = [&]() - { + auto recurse = [&]() { if (!json) logger->cout("%s", headerPrefix); std::vector attrs; - for (const auto &attr : visitor.getAttrs()) { + for (const auto & attr : visitor.getAttrs()) { if (hasContent(visitor, attrPath, attr)) attrs.push_back(attr); } @@ -1266,15 +1215,20 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto visitor2 = visitor.getAttr(attrName); auto attrPath2(attrPath); attrPath2.push_back(attr); - auto j2 = visit(*visitor2, attrPath2, - fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attrName), + auto j2 = visit( + *visitor2, + attrPath2, + fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, + nextPrefix, + last ? treeLast : treeConn, + attrName), nextPrefix + (last ? treeNull : treeLine)); - if (json) j.emplace(attrName, std::move(j2)); + if (json) + j.emplace(attrName, std::move(j2)); } }; - auto showDerivation = [&]() - { + auto showDerivation = [&]() { auto name = visitor.getAttr(state->sName)->getString(); if (json) { @@ -1287,47 +1241,43 @@ struct CmdFlakeShow : FlakeCommand, MixJSON j.emplace("name", name); j.emplace("description", description ? *description : ""); } else { - logger->cout("%s: %s '%s'", + logger->cout( + "%s: %s '%s'", headerPrefix, - attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" : - attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" : - attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" : - attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" : - "package", + attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" + : attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" + : attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" + : attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" + : "package", name); } }; if (attrPath.size() == 0 - || (attrPath.size() == 1 && ( - attrPathS[0] == "defaultPackage" - || attrPathS[0] == "devShell" - || attrPathS[0] == "formatter" - || attrPathS[0] == "nixosConfigurations" - || attrPathS[0] == "nixosModules" - || attrPathS[0] == "defaultApp" - || attrPathS[0] == "templates" - || attrPathS[0] == "overlays")) + || (attrPath.size() == 1 + && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" + || attrPathS[0] == "formatter" || attrPathS[0] == "nixosConfigurations" + || attrPathS[0] == "nixosModules" || attrPathS[0] == "defaultApp" + || attrPathS[0] == "templates" || attrPathS[0] == "overlays")) || ((attrPath.size() == 1 || attrPath.size() == 2) - && (attrPathS[0] == "checks" - || attrPathS[0] == "packages" - || attrPathS[0] == "devShells" - || attrPathS[0] == "apps")) - ) - { + && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells" + || attrPathS[0] == "apps"))) { recurse(); } else if ( - (attrPath.size() == 2 && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter")) - || (attrPath.size() == 3 && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells")) - ) - { + (attrPath.size() == 2 + && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter")) + || (attrPath.size() == 3 + && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells"))) { if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); + logger->cout( + fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", + headerPrefix)); else { - logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); } } else { try { @@ -1337,9 +1287,13 @@ struct CmdFlakeShow : FlakeCommand, MixJSON throw Error("expected a derivation"); } catch (IFDError & e) { if (!json) { - logger->cout(fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, headerPrefix)); + logger->cout( + fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, + headerPrefix)); } else { - logger->warn(fmt("%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted due to use of import from derivation", + concatStringsSep(".", attrPathS))); } } } @@ -1353,9 +1307,12 @@ struct CmdFlakeShow : FlakeCommand, MixJSON recurse(); } catch (IFDError & e) { if (!json) { - logger->cout(fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, headerPrefix)); + logger->cout( + fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, + headerPrefix)); } else { - logger->warn(fmt("%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + logger->warn(fmt( + "%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); } } } @@ -1363,17 +1320,21 @@ struct CmdFlakeShow : FlakeCommand, MixJSON else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") { if (attrPath.size() == 1) recurse(); - else if (!showLegacy){ + else if (!showLegacy) { if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); + logger->cout(fmt( + "%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); else { logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS))); } } else if (!showAllSystems && std::string(attrPathS[1]) != localSystem) { if (!json) - logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", headerPrefix)); + logger->cout( + fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--all-systems' to show)", + headerPrefix)); else { - logger->warn(fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted (use '--all-systems' to show)", concatStringsSep(".", attrPathS))); } } else { try { @@ -1384,18 +1345,21 @@ struct CmdFlakeShow : FlakeCommand, MixJSON recurse(); } catch (IFDError & e) { if (!json) { - logger->cout(fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, headerPrefix)); + logger->cout( + fmt("%s " ANSI_WARNING "omitted due to use of import from derivation" ANSI_NORMAL, + headerPrefix)); } else { - logger->warn(fmt("%s omitted due to use of import from derivation", concatStringsSep(".", attrPathS))); + logger->warn( + fmt("%s omitted due to use of import from derivation", + concatStringsSep(".", attrPathS))); } } } } else if ( - (attrPath.size() == 2 && attrPathS[0] == "defaultApp") || - (attrPath.size() == 3 && attrPathS[0] == "apps")) - { + (attrPath.size() == 2 && attrPathS[0] == "defaultApp") + || (attrPath.size() == 3 && attrPathS[0] == "apps")) { auto aType = visitor.maybeGetAttr("type"); std::optional description; if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { @@ -1409,14 +1373,16 @@ struct CmdFlakeShow : FlakeCommand, MixJSON if (description) j.emplace("description", *description); } else { - logger->cout("%s: app: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description ? *description : "no description"); + logger->cout( + "%s: app: " ANSI_BOLD "%s" ANSI_NORMAL, + headerPrefix, + description ? *description : "no description"); } } else if ( - (attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") || - (attrPath.size() == 2 && attrPathS[0] == "templates")) - { + (attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") + || (attrPath.size() == 2 && attrPathS[0] == "templates")) { auto description = visitor.getAttr("description")->getString(); if (json) { j.emplace("type", "template"); @@ -1427,13 +1393,15 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } else { - auto [type, description] = - (attrPath.size() == 1 && attrPathS[0] == "overlay") - || (attrPath.size() == 2 && attrPathS[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") : - attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") : - (attrPath.size() == 1 && attrPathS[0] == "nixosModule") - || (attrPath.size() == 2 && attrPathS[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") : - std::make_pair("unknown", "unknown"); + auto [type, description] = (attrPath.size() == 1 && attrPathS[0] == "overlay") + || (attrPath.size() == 2 && attrPathS[0] == "overlays") + ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") + : attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" + ? std::make_pair("nixos-configuration", "NixOS configuration") + : (attrPath.size() == 1 && attrPathS[0] == "nixosModule") + || (attrPath.size() == 2 && attrPathS[0] == "nixosModules") + ? std::make_pair("nixos-module", "NixOS module") + : std::make_pair("unknown", "unknown"); if (json) { j.emplace("type", type); } else { @@ -1480,8 +1448,8 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON std::string doc() override { return - #include "flake-prefetch.md" - ; +#include "flake-prefetch.md" + ; } void run(ref store) override @@ -1489,7 +1457,8 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON auto originalRef = getFlakeRef(); auto resolvedRef = originalRef.resolve(store); auto [accessor, lockedRef] = resolvedRef.lazyFetch(store); - auto storePath = fetchToStore(getEvalState()->fetchSettings, *store, accessor, FetchMode::Copy, lockedRef.input.getName()); + auto storePath = + fetchToStore(getEvalState()->fetchSettings, *store, accessor, FetchMode::Copy, lockedRef.input.getName()); auto hash = store->queryPathInfo(storePath)->narHash; if (json) { @@ -1501,7 +1470,8 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON res["locked"].erase("__final"); // internal for now printJSON(res); } else { - notice("Downloaded '%s' to '%s' (hash '%s').", + notice( + "Downloaded '%s' to '%s' (hash '%s').", lockedRef.to_string(), store->printStorePath(storePath), hash.to_string(HashFormat::SRI, true)); @@ -1531,8 +1501,8 @@ struct CmdFlake : NixMultiCommand std::string doc() override { return - #include "flake.md" - ; +#include "flake.md" + ; } void run() override diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 510cfa592..cc62aeb86 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -26,13 +26,10 @@ struct CmdHashBase : Command std::vector paths; std::optional modulus; - explicit CmdHashBase(FileIngestionMethod mode) : mode(mode) + explicit CmdHashBase(FileIngestionMethod mode) + : mode(mode) { - expectArgs({ - .label = "paths", - .handler = {&paths}, - .completer = completePath - }); + expectArgs({.label = "paths", .handler = {&paths}, .completer = completePath}); // FIXME The following flags should be deprecated, but we don't // yet have a mechanism for that. @@ -92,10 +89,9 @@ struct CmdHashBase : Command return PosixSourceAccessor::createAtRoot(makeParentCanonical(path)); }; - Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ + Hash h{HashAlgorithm::SHA256}; // throwaway def to appease C++ switch (mode) { - case FileIngestionMethod::Flat: - { + case FileIngestionMethod::Flat: { // While usually we could use the some code as for NixArchive, // the Flat method needs to support FIFOs, such as those // produced by bash process substitution, e.g.: @@ -107,8 +103,7 @@ struct CmdHashBase : Command h = hashSink->finish().first; break; } - case FileIngestionMethod::NixArchive: - { + case FileIngestionMethod::NixArchive: { auto sourcePath = makeSourcePath(); auto hashSink = makeSink(); dumpPath(sourcePath, *hashSink, (FileSerialisationMethod) mode); @@ -132,7 +127,8 @@ struct CmdHashBase : Command } } - if (truncate && h.hashSize > 20) h = compressHash(h, 20); + if (truncate && h.hashSize > 20) + h = compressHash(h, 20); logger->cout(h.to_string(hashFormat, hashFormat == HashFormat::SRI)); } } @@ -149,14 +145,14 @@ struct CmdHashPath : CmdHashBase addFlag(flag::hashAlgo("algo", &hashAlgo)); addFlag(flag::fileIngestionMethod(&mode)); addFlag(flag::hashFormatWithDefault("format", &hashFormat)); - #if 0 +#if 0 addFlag({ .longName = "modulo", .description = "Compute the hash modulo the specified string.", .labels = {"modulus"}, .handler = {&modulus}, }); - #endif +#endif } }; @@ -193,11 +189,12 @@ struct CmdToBase : Command std::string description() override { - return fmt("convert a hash to %s representation (deprecated, use `nix hash convert` instead)", - hashFormat == HashFormat::Base16 ? "base-16" : - hashFormat == HashFormat::Nix32 ? "base-32" : - hashFormat == HashFormat::Base64 ? "base-64" : - "SRI"); + return fmt( + "convert a hash to %s representation (deprecated, use `nix hash convert` instead)", + hashFormat == HashFormat::Base16 ? "base-16" + : hashFormat == HashFormat::Nix32 ? "base-32" + : hashFormat == HashFormat::Base64 ? "base-64" + : "SRI"); } void run() override @@ -219,13 +216,15 @@ struct CmdHashConvert : Command std::optional algo; std::vector hashStrings; - CmdHashConvert(): to(HashFormat::SRI) { + CmdHashConvert() + : to(HashFormat::SRI) + { addFlag(flag::hashFormatOpt("from", &from)); addFlag(flag::hashFormatWithDefault("to", &to)); addFlag(flag::hashAlgoOpt(&algo)); expectArgs({ - .label = "hashes", - .handler = {&hashStrings}, + .label = "hashes", + .handler = {&hashStrings}, }); } @@ -237,23 +236,21 @@ struct CmdHashConvert : Command std::string doc() override { return - #include "hash-convert.md" - ; +#include "hash-convert.md" + ; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } - void run() override { + void run() override + { for (const auto & s : hashStrings) { - Hash h = - from == HashFormat::SRI - ? Hash::parseSRI(s) - : Hash::parseAny(s, algo); - if (from - && from != HashFormat::SRI - && h.to_string(*from, false) != - (from == HashFormat::Base16 ? toLower(s) : s)) - { + Hash h = from == HashFormat::SRI ? Hash::parseSRI(s) : Hash::parseAny(s, algo); + if (from && from != HashFormat::SRI + && h.to_string(*from, false) != (from == HashFormat::Base16 ? toLower(s) : s)) { auto from_as_string = printHashFormat(*from); throw BadHash("input hash '%s' does not have the expected format for '--from %s'", s, from_as_string); } @@ -266,30 +263,34 @@ struct CmdHash : NixMultiCommand { CmdHash() : NixMultiCommand( - "hash", - { - {"convert", []() { return make_ref();}}, - {"path", []() { return make_ref(); }}, - {"file", []() { return make_ref(); }}, - {"to-base16", []() { return make_ref(HashFormat::Base16); }}, - {"to-base32", []() { return make_ref(HashFormat::Nix32); }}, - {"to-base64", []() { return make_ref(HashFormat::Base64); }}, - {"to-sri", []() { return make_ref(HashFormat::SRI); }}, - }) - { } + "hash", + { + {"convert", []() { return make_ref(); }}, + {"path", []() { return make_ref(); }}, + {"file", []() { return make_ref(); }}, + {"to-base16", []() { return make_ref(HashFormat::Base16); }}, + {"to-base32", []() { return make_ref(HashFormat::Nix32); }}, + {"to-base64", []() { return make_ref(HashFormat::Base64); }}, + {"to-sri", []() { return make_ref(HashFormat::SRI); }}, + }) + { + } std::string description() override { return "compute and convert cryptographic hashes"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdHash = registerCommand("hash"); /* Legacy nix-hash command. */ -static int compatNixHash(int argc, char * * argv) +static int compatNixHash(int argc, char ** argv) { // Wait until `nix hash convert` is not hidden behind experimental flags anymore. // warn("`nix-hash` has been deprecated in favor of `nix hash convert`."); @@ -298,7 +299,9 @@ static int compatNixHash(int argc, char * * argv) bool flat = false; HashFormat hashFormat = HashFormat::Base16; bool truncate = false; + enum { opHash, opTo } op = opHash; + std::vector ss; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { @@ -306,33 +309,34 @@ static int compatNixHash(int argc, char * * argv) showManPage("nix-hash"); else if (*arg == "--version") printVersion("nix-hash"); - else if (*arg == "--flat") flat = true; - else if (*arg == "--base16") hashFormat = HashFormat::Base16; - else if (*arg == "--base32") hashFormat = HashFormat::Nix32; - else if (*arg == "--base64") hashFormat = HashFormat::Base64; - else if (*arg == "--sri") hashFormat = HashFormat::SRI; - else if (*arg == "--truncate") truncate = true; + else if (*arg == "--flat") + flat = true; + else if (*arg == "--base16") + hashFormat = HashFormat::Base16; + else if (*arg == "--base32") + hashFormat = HashFormat::Nix32; + else if (*arg == "--base64") + hashFormat = HashFormat::Base64; + else if (*arg == "--sri") + hashFormat = HashFormat::SRI; + else if (*arg == "--truncate") + truncate = true; else if (*arg == "--type") { std::string s = getArg(*arg, arg, end); hashAlgo = parseHashAlgo(s); - } - else if (*arg == "--to-base16") { + } else if (*arg == "--to-base16") { op = opTo; hashFormat = HashFormat::Base16; - } - else if (*arg == "--to-base32") { + } else if (*arg == "--to-base32") { op = opTo; hashFormat = HashFormat::Nix32; - } - else if (*arg == "--to-base64") { + } else if (*arg == "--to-base64") { op = opTo; hashFormat = HashFormat::Base64; - } - else if (*arg == "--to-sri") { + } else if (*arg == "--to-sri") { op = opTo; hashFormat = HashFormat::SRI; - } - else if (*arg != "" && arg->at(0) == '-') + } else if (*arg != "" && arg->at(0) == '-') return false; else ss.push_back(*arg); @@ -341,7 +345,8 @@ static int compatNixHash(int argc, char * * argv) if (op == opHash) { CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::NixArchive); - if (!hashAlgo.has_value()) hashAlgo = HashAlgorithm::MD5; + if (!hashAlgo.has_value()) + hashAlgo = HashAlgorithm::MD5; cmd.hashAlgo = hashAlgo.value(); cmd.hashFormat = hashFormat; cmd.truncate = truncate; @@ -352,7 +357,8 @@ static int compatNixHash(int argc, char * * argv) else { CmdToBase cmd(hashFormat, true); cmd.args = ss; - if (hashAlgo.has_value()) cmd.hashAlgo = hashAlgo; + if (hashAlgo.has_value()) + cmd.hashAlgo = hashAlgo; cmd.run(); } diff --git a/src/nix/log.cc b/src/nix/log.cc index 78f1dd570..56e44645b 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -16,11 +16,14 @@ struct CmdLog : InstallableCommand std::string doc() override { return - #include "log.md" - ; +#include "log.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref store, ref installable) override { @@ -33,14 +36,12 @@ struct CmdLog : InstallableCommand auto b = installable->toDerivedPath(); // For compat with CLI today, TODO revisit - auto oneUp = std::visit(overloaded { - [&](const DerivedPath::Opaque & bo) { - return make_ref(bo); + auto oneUp = std::visit( + overloaded{ + [&](const DerivedPath::Opaque & bo) { return make_ref(bo); }, + [&](const DerivedPath::Built & bfd) { return bfd.drvPath; }, }, - [&](const DerivedPath::Built & bfd) { - return bfd.drvPath; - }, - }, b.path.raw()); + b.path.raw()); auto path = resolveDerivedPath(*store, *oneUp); RunPager pager; @@ -53,7 +54,8 @@ struct CmdLog : InstallableCommand auto & logSub = *logSubP; auto log = logSub.getBuildLog(path); - if (!log) continue; + if (!log) + continue; logger->stop(); printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri()); writeFull(getStandardOutput(), *log); diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 4b282bc43..dcc46fa14 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -43,11 +43,10 @@ struct MixLs : virtual Args, MixJSON auto showFile = [&](const CanonPath & curPath, std::string_view relPath) { if (verbose) { auto st = accessor->lstat(curPath); - std::string tp = - st.type == SourceAccessor::Type::tRegular ? - (st.isExecutable ? "-r-xr-xr-x" : "-r--r--r--") : - st.type == SourceAccessor::Type::tSymlink ? "lrwxrwxrwx" : - "dr-xr-xr-x"; + std::string tp = st.type == SourceAccessor::Type::tRegular + ? (st.isExecutable ? "-r-xr-xr-x" : "-r--r--r--") + : st.type == SourceAccessor::Type::tSymlink ? "lrwxrwxrwx" + : "dr-xr-xr-x"; auto line = fmt("%s %20d %s", tp, st.fileSize.value_or(0), relPath); if (st.type == SourceAccessor::Type::tSymlink) line += " -> " + accessor->readLink(curPath); @@ -64,9 +63,10 @@ struct MixLs : virtual Args, MixJSON } }; - doPath = [&](const SourceAccessor::Stat & st, const CanonPath & curPath, - std::string_view relPath, bool showDirectory) - { + doPath = [&](const SourceAccessor::Stat & st, + const CanonPath & curPath, + std::string_view relPath, + bool showDirectory) { if (st.type == SourceAccessor::Type::tDirectory && !showDirectory) { auto names = accessor->readDirectory(curPath); for (auto & [name, type] : names) @@ -76,9 +76,8 @@ struct MixLs : virtual Args, MixJSON }; auto st = accessor->lstat(path); - doPath(st, path, - st.type == SourceAccessor::Type::tDirectory ? "." : path.baseName().value_or(""), - showDirectory); + doPath( + st, path, st.type == SourceAccessor::Type::tDirectory ? "." : path.baseName().value_or(""), showDirectory); } void list(ref accessor, CanonPath path) @@ -98,11 +97,7 @@ struct CmdLsStore : StoreCommand, MixLs CmdLsStore() { - expectArgs({ - .label = "path", - .handler = {&path}, - .completer = completePath - }); + expectArgs({.label = "path", .handler = {&path}, .completer = completePath}); } std::string description() override @@ -113,8 +108,8 @@ struct CmdLsStore : StoreCommand, MixLs std::string doc() override { return - #include "store-ls.md" - ; +#include "store-ls.md" + ; } void run(ref store) override @@ -132,19 +127,15 @@ struct CmdLsNar : Command, MixLs CmdLsNar() { - expectArgs({ - .label = "nar", - .handler = {&narPath}, - .completer = completePath - }); + expectArgs({.label = "nar", .handler = {&narPath}, .completer = completePath}); expectArg("path", &path); } std::string doc() override { return - #include "nar-ls.md" - ; +#include "nar-ls.md" + ; } std::string description() override diff --git a/src/nix/main.cc b/src/nix/main.cc index 502e04e60..a6077f5e9 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -31,20 +31,20 @@ #include #ifndef _WIN32 -# include -# include -# include -# include +# include +# include +# include +# include #endif #ifdef __linux__ -# include "nix/util/linux-namespaces.hh" +# include "nix/util/linux-namespaces.hh" #endif #ifndef _WIN32 extern std::string chrootHelperName; -void chrootHelper(int argc, char * * argv); +void chrootHelper(int argc, char ** argv); #endif #include "nix/util/strings.hh" @@ -63,19 +63,21 @@ static bool haveInternet() Finally free([&]() { freeifaddrs(addrs); }); for (auto i = addrs; i; i = i->ifa_next) { - if (!i->ifa_addr) continue; + if (!i->ifa_addr) + continue; if (i->ifa_addr->sa_family == AF_INET) { if (ntohl(((sockaddr_in *) i->ifa_addr)->sin_addr.s_addr) != INADDR_LOOPBACK) { return true; } } else if (i->ifa_addr->sa_family == AF_INET6) { - if (!IN6_IS_ADDR_LOOPBACK(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr) && - !IN6_IS_ADDR_LINKLOCAL(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr)) + if (!IN6_IS_ADDR_LOOPBACK(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr) + && !IN6_IS_ADDR_LINKLOCAL(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr)) return true; } } - if (haveNetworkProxyConnection()) return true; + if (haveNetworkProxyConnection()) + return true; return false; #else @@ -93,7 +95,9 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs bool helpRequested = false; bool showVersion = false; - NixArgs() : MultiCommand("", RegisterCommand::getCommandsFor({})), MixCommonArgs("nix") + NixArgs() + : MultiCommand("", RegisterCommand::getCommandsFor({})) + , MixCommonArgs("nix") { categories.clear(); categories[catHelp] = "Help commands"; @@ -143,29 +147,29 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs }); aliases = { - {"add-to-store", { AliasStatus::Deprecated, {"store", "add-path"}}}, - {"cat-nar", { AliasStatus::Deprecated, {"nar", "cat"}}}, - {"cat-store", { AliasStatus::Deprecated, {"store", "cat"}}}, - {"copy-sigs", { AliasStatus::Deprecated, {"store", "copy-sigs"}}}, - {"dev-shell", { AliasStatus::Deprecated, {"develop"}}}, - {"diff-closures", { AliasStatus::Deprecated, {"store", "diff-closures"}}}, - {"dump-path", { AliasStatus::Deprecated, {"store", "dump-path"}}}, - {"hash-file", { AliasStatus::Deprecated, {"hash", "file"}}}, - {"hash-path", { AliasStatus::Deprecated, {"hash", "path"}}}, - {"ls-nar", { AliasStatus::Deprecated, {"nar", "ls"}}}, - {"ls-store", { AliasStatus::Deprecated, {"store", "ls"}}}, - {"make-content-addressable", { AliasStatus::Deprecated, {"store", "make-content-addressed"}}}, - {"optimise-store", { AliasStatus::Deprecated, {"store", "optimise"}}}, - {"ping-store", { AliasStatus::Deprecated, {"store", "info"}}}, - {"sign-paths", { AliasStatus::Deprecated, {"store", "sign"}}}, - {"shell", { AliasStatus::AcceptedShorthand, {"env", "shell"}}}, - {"show-derivation", { AliasStatus::Deprecated, {"derivation", "show"}}}, - {"show-config", { AliasStatus::Deprecated, {"config", "show"}}}, - {"to-base16", { AliasStatus::Deprecated, {"hash", "to-base16"}}}, - {"to-base32", { AliasStatus::Deprecated, {"hash", "to-base32"}}}, - {"to-base64", { AliasStatus::Deprecated, {"hash", "to-base64"}}}, - {"verify", { AliasStatus::Deprecated, {"store", "verify"}}}, - {"doctor", { AliasStatus::Deprecated, {"config", "check"}}}, + {"add-to-store", {AliasStatus::Deprecated, {"store", "add-path"}}}, + {"cat-nar", {AliasStatus::Deprecated, {"nar", "cat"}}}, + {"cat-store", {AliasStatus::Deprecated, {"store", "cat"}}}, + {"copy-sigs", {AliasStatus::Deprecated, {"store", "copy-sigs"}}}, + {"dev-shell", {AliasStatus::Deprecated, {"develop"}}}, + {"diff-closures", {AliasStatus::Deprecated, {"store", "diff-closures"}}}, + {"dump-path", {AliasStatus::Deprecated, {"store", "dump-path"}}}, + {"hash-file", {AliasStatus::Deprecated, {"hash", "file"}}}, + {"hash-path", {AliasStatus::Deprecated, {"hash", "path"}}}, + {"ls-nar", {AliasStatus::Deprecated, {"nar", "ls"}}}, + {"ls-store", {AliasStatus::Deprecated, {"store", "ls"}}}, + {"make-content-addressable", {AliasStatus::Deprecated, {"store", "make-content-addressed"}}}, + {"optimise-store", {AliasStatus::Deprecated, {"store", "optimise"}}}, + {"ping-store", {AliasStatus::Deprecated, {"store", "info"}}}, + {"sign-paths", {AliasStatus::Deprecated, {"store", "sign"}}}, + {"shell", {AliasStatus::AcceptedShorthand, {"env", "shell"}}}, + {"show-derivation", {AliasStatus::Deprecated, {"derivation", "show"}}}, + {"show-config", {AliasStatus::Deprecated, {"config", "show"}}}, + {"to-base16", {AliasStatus::Deprecated, {"hash", "to-base16"}}}, + {"to-base32", {AliasStatus::Deprecated, {"hash", "to-base32"}}}, + {"to-base64", {AliasStatus::Deprecated, {"hash", "to-base64"}}}, + {"verify", {AliasStatus::Deprecated, {"store", "verify"}}}, + {"doctor", {AliasStatus::Deprecated, {"config", "check"}}}, }; }; @@ -177,8 +181,8 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs std::string doc() override { return - #include "nix.md" - ; +#include "nix.md" + ; } // Plugins may add new subcommands. @@ -227,24 +231,26 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) EvalState state({}, openStore("dummy://"), fetchSettings, evalSettings); auto vGenerateManpage = state.allocValue(); - state.eval(state.parseExprFromString( - #include "generate-manpage.nix.gen.hh" - , state.rootPath(CanonPath::root)), *vGenerateManpage); + state.eval( + state.parseExprFromString( +#include "generate-manpage.nix.gen.hh" + , state.rootPath(CanonPath::root)), + *vGenerateManpage); state.corepkgsFS->addFile( CanonPath("utils.nix"), - #include "utils.nix.gen.hh" - ); +#include "utils.nix.gen.hh" + ); state.corepkgsFS->addFile( CanonPath("/generate-settings.nix"), - #include "generate-settings.nix.gen.hh" - ); +#include "generate-settings.nix.gen.hh" + ); state.corepkgsFS->addFile( CanonPath("/generate-store-info.nix"), - #include "generate-store-info.nix.gen.hh" - ); +#include "generate-store-info.nix.gen.hh" + ); auto vDump = state.allocValue(); vDump->mkString(toplevel.dumpCli()); @@ -288,17 +294,21 @@ struct CmdHelp : Command std::string doc() override { return - #include "help.md" - ; +#include "help.md" + ; } - Category category() override { return catHelp; } + Category category() override + { + return catHelp; + } void run() override { assert(parent); MultiCommand * toplevel = parent; - while (toplevel->parent) toplevel = toplevel->parent; + while (toplevel->parent) + toplevel = toplevel->parent; showHelp(subcommand, getNixArgs(*this)); } }; @@ -315,11 +325,14 @@ struct CmdHelpStores : Command std::string doc() override { return - #include "help-stores.md.gen.hh" - ; +#include "help-stores.md.gen.hh" + ; } - Category category() override { return catHelp; } + Category category() override + { + return catHelp; + } void run() override { @@ -329,7 +342,7 @@ struct CmdHelpStores : Command static auto rCmdHelpStores = registerCommand("help-stores"); -void mainWrapped(int argc, char * * argv) +void mainWrapped(int argc, char ** argv) { savedArgv = argv; @@ -354,20 +367,22 @@ void mainWrapped(int argc, char * * argv) self-aware. That is, it has to know where it is installed. We don't think it's sentient. */ - settings.buildHook.setDefault(Strings { - getNixBin({}).string(), - "__build-remote", - }); + settings.buildHook.setDefault( + Strings{ + getNixBin({}).string(), + "__build-remote", + }); - #ifdef __linux__ +#ifdef __linux__ if (isRootUser()) { try { saveMountNamespace(); if (unshare(CLONE_NEWNS) == -1) throw SysError("setting up a private mount namespace"); - } catch (Error & e) { } + } catch (Error & e) { + } } - #endif +#endif programPath = argv[0]; auto programName = std::string(baseNameOf(programPath)); @@ -377,12 +392,14 @@ void mainWrapped(int argc, char * * argv) if (argc > 1 && std::string_view(argv[1]) == "__build-remote") { programName = "build-remote"; - argv++; argc--; + argv++; + argc--; } { auto legacy = RegisterLegacyCommand::commands()[programName]; - if (legacy) return legacy(argc, argv); + if (legacy) + return legacy(argc, argv); } evalSettings.pureEval = true; @@ -417,9 +434,11 @@ void mainWrapped(int argc, char * * argv) for (auto & builtinPtr : state.getBuiltins().attrs()->lexicographicOrder(state.symbols)) { auto & builtin = *builtinPtr; auto b = nlohmann::json::object(); - if (!builtin.value->isPrimOp()) continue; + if (!builtin.value->isPrimOp()) + continue; auto primOp = builtin.value->primOp(); - if (!primOp->doc) continue; + if (!primOp->doc) + continue; b["args"] = primOp->args; b["doc"] = trim(stripIndentation(primOp->doc)); if (primOp->experimentalFeature) @@ -428,7 +447,8 @@ void mainWrapped(int argc, char * * argv) } for (auto & [name, info] : state.constantInfos) { auto b = nlohmann::json::object(); - if (!info.doc) continue; + if (!info.doc) + continue; b["doc"] = trim(stripIndentation(info.doc)); b["type"] = showType(info.type, false); if (info.impureOnly) @@ -444,16 +464,18 @@ void mainWrapped(int argc, char * * argv) return; } - Finally printCompletions([&]() - { + Finally printCompletions([&]() { if (args.completions) { switch (args.completions->type) { case Completions::Type::Normal: - logger->cout("normal"); break; + logger->cout("normal"); + break; case Completions::Type::Filenames: - logger->cout("filenames"); break; + logger->cout("filenames"); + break; case Completions::Type::Attrs: - logger->cout("attrs"); break; + logger->cout("attrs"); + break; } for (auto & s : args.completions->completions) logger->cout(s.completion + "\t" + trim(s.description)); @@ -463,9 +485,10 @@ void mainWrapped(int argc, char * * argv) try { auto isNixCommand = std::regex_search(programName, std::regex("nix$")); auto allowShebang = isNixCommand && argc > 1; - args.parseCmdline(argvToStrings(argc, argv),allowShebang); + args.parseCmdline(argvToStrings(argc, argv), allowShebang); } catch (UsageError &) { - if (!args.helpRequested && !args.completions) throw; + if (!args.helpRequested && !args.completions) + throw; } applyJSONLogger(); @@ -484,7 +507,8 @@ void mainWrapped(int argc, char * * argv) return; } - if (args.completions) return; + if (args.completions) + return; if (args.showVersion) { printVersion(programName); @@ -494,8 +518,7 @@ void mainWrapped(int argc, char * * argv) if (!args.command) throw UsageError("no subcommand specified"); - experimentalFeatureSettings.require( - args.command->second->experimentalFeature()); + experimentalFeatureSettings.require(args.command->second->experimentalFeature()); if (args.useNet && !haveInternet()) { warn("you don't have Internet access; disabling some network-dependent features"); @@ -534,9 +557,9 @@ void mainWrapped(int argc, char * * argv) } } -} +} // namespace nix -int main(int argc, char * * argv) +int main(int argc, char ** argv) { // The CLI has a more detailed version than the libraries; see nixVersion. nix::nixVersion = NIX_CLI_VERSION; @@ -546,7 +569,5 @@ int main(int argc, char * * argv) nix::setStackSize(64 * 1024 * 1024); #endif - return nix::handleExceptions(argv[0], [&]() { - nix::mainWrapped(argc, argv); - }); + return nix::handleExceptions(argv[0], [&]() { nix::mainWrapped(argc, argv); }); } diff --git a/src/nix/make-content-addressed.cc b/src/nix/make-content-addressed.cc index 5523ae279..a54729c45 100644 --- a/src/nix/make-content-addressed.cc +++ b/src/nix/make-content-addressed.cc @@ -24,16 +24,16 @@ struct CmdMakeContentAddressed : virtual CopyCommand, virtual StorePathsCommand, std::string doc() override { return - #include "make-content-addressed.md" - ; +#include "make-content-addressed.md" + ; } void run(ref srcStore, StorePaths && storePaths) override { auto dstStore = dstUri.empty() ? openStore() : openStore(dstUri); - auto remappings = makeContentAddressed(*srcStore, *dstStore, - StorePathSet(storePaths.begin(), storePaths.end())); + auto remappings = + makeContentAddressed(*srcStore, *dstStore, StorePathSet(storePaths.begin(), storePaths.end())); if (json) { auto jsonRewrites = json::object(); @@ -49,9 +49,7 @@ struct CmdMakeContentAddressed : virtual CopyCommand, virtual StorePathsCommand, for (auto & path : storePaths) { auto i = remappings.find(path); assert(i != remappings.end()); - notice("rewrote '%s' to '%s'", - srcStore->printStorePath(path), - srcStore->printStorePath(i->second)); + notice("rewrote '%s' to '%s'", srcStore->printStorePath(path), srcStore->printStorePath(i->second)); } } } diff --git a/src/nix/man-pages.cc b/src/nix/man-pages.cc index 8585c164c..7ab8a0eeb 100644 --- a/src/nix/man-pages.cc +++ b/src/nix/man-pages.cc @@ -27,4 +27,4 @@ void showManPage(const std::string & name) throw SysError("command 'man %1%' failed", name.c_str()); } -} +} // namespace nix diff --git a/src/nix/man-pages.hh b/src/nix/man-pages.hh index 9ba035af8..7a71f98e8 100644 --- a/src/nix/man-pages.hh +++ b/src/nix/man-pages.hh @@ -25,4 +25,4 @@ std::filesystem::path getNixManDir(); */ void showManPage(const std::string & name); -} +} // namespace nix diff --git a/src/nix/nar.cc b/src/nix/nar.cc index debb6b95e..bae77b6cc 100644 --- a/src/nix/nar.cc +++ b/src/nix/nar.cc @@ -4,8 +4,10 @@ using namespace nix; struct CmdNar : NixMultiCommand { - CmdNar() : NixMultiCommand("nar", RegisterCommand::getCommandsFor({"nar"})) - { } + CmdNar() + : NixMultiCommand("nar", RegisterCommand::getCommandsFor({"nar"})) + { + } std::string description() override { @@ -15,11 +17,14 @@ struct CmdNar : NixMultiCommand std::string doc() override { return - #include "nar.md" - ; +#include "nar.md" + ; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdNar = registerCommand("nar"); diff --git a/src/nix/optimise-store.cc b/src/nix/optimise-store.cc index e319f5c90..e000026fc 100644 --- a/src/nix/optimise-store.cc +++ b/src/nix/optimise-store.cc @@ -16,8 +16,8 @@ struct CmdOptimiseStore : StoreCommand std::string doc() override { return - #include "optimise-store.md" - ; +#include "optimise-store.md" + ; } void run(ref store) override diff --git a/src/nix/path-from-hash-part.cc b/src/nix/path-from-hash-part.cc index 814b723f9..7e6c6ec28 100644 --- a/src/nix/path-from-hash-part.cc +++ b/src/nix/path-from-hash-part.cc @@ -23,8 +23,8 @@ struct CmdPathFromHashPart : StoreCommand std::string doc() override { return - #include "path-from-hash-part.md" - ; +#include "path-from-hash-part.md" + ; } void run(ref store) override diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 04af72646..fef3ae120 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -28,7 +28,6 @@ static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & clo return totalNarSize; } - /** * Write a JSON representation of store object metadata, such as the * hash and the references. @@ -36,10 +35,7 @@ static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & clo * @param showClosureSize If true, the closure size of each path is * included. */ -static json pathInfoToJSON( - Store & store, - const StorePathSet & storePaths, - bool showClosureSize) +static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool showClosureSize) { json::object_t jsonAllObjects = json::object(); @@ -70,7 +66,8 @@ static json pathInfoToJSON( if (auto * depNarInfo = dynamic_cast(&*depInfo)) totalDownloadSize += depNarInfo->fileSize; else - throw Error("Missing .narinfo for dep %s of %s", + throw Error( + "Missing .narinfo for dep %s of %s", store.printStorePath(p), store.printStorePath(storePath)); } @@ -87,7 +84,6 @@ static json pathInfoToJSON( return jsonAllObjects; } - struct CmdPathInfo : StorePathsCommand, MixJSON { bool showSize = false; @@ -133,11 +129,14 @@ struct CmdPathInfo : StorePathsCommand, MixJSON std::string doc() override { return - #include "path-info.md" - ; +#include "path-info.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void printSize(std::ostream & str, uint64_t value) { @@ -186,15 +185,17 @@ struct CmdPathInfo : StorePathsCommand, MixJSON if (showSigs) { str << '\t'; Strings ss; - if (info->ultimate) ss.push_back("ultimate"); - if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca)); - for (auto & sig : info->sigs) ss.push_back(sig); + if (info->ultimate) + ss.push_back("ultimate"); + if (info->ca) + ss.push_back("ca:" + renderContentAddress(*info->ca)); + for (auto & sig : info->sigs) + ss.push_back(sig); str << concatStringsSep(" ", ss); } logger->cout(str.str()); } - } } }; diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 96dcdb4e8..1423ce517 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -23,18 +23,20 @@ using namespace nix; mirrors defined in Nixpkgs. */ std::string resolveMirrorUrl(EvalState & state, const std::string & url) { - if (url.substr(0, 9) != "mirror://") return url; + if (url.substr(0, 9) != "mirror://") + return url; std::string s(url, 9); auto p = s.find('/'); - if (p == std::string::npos) throw Error("invalid mirror URL '%s'", url); + if (p == std::string::npos) + throw Error("invalid mirror URL '%s'", url); std::string mirrorName(s, 0, p); Value vMirrors; // FIXME: use nixpkgs flake - state.eval(state.parseExprFromString( - "import ", - state.rootPath(CanonPath::root)), + state.eval( + state.parseExprFromString( + "import ", state.rootPath(CanonPath::root)), vMirrors); state.forceAttrs(vMirrors, noPos, "while evaluating the set of all mirrors"); @@ -46,22 +48,22 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url) if (mirrorList->value->listSize() < 1) throw Error("mirror URL '%s' did not expand to anything", url); - std::string mirror(state.forceString(*mirrorList->value->listView()[0], noPos, "while evaluating the first available mirror")); + std::string mirror( + state.forceString(*mirrorList->value->listView()[0], noPos, "while evaluating the first available mirror")); return mirror + (hasSuffix(mirror, "/") ? "" : "/") + s.substr(p + 1); } std::tuple prefetchFile( - ref store, - std::string_view url, - std::optional name, - HashAlgorithm hashAlgo, - std::optional expectedHash, - bool unpack, - bool executable) + ref store, + std::string_view url, + std::optional name, + HashAlgorithm hashAlgo, + std::optional expectedHash, + bool unpack, + bool executable) { - ContentAddressMethod method = unpack || executable - ? ContentAddressMethod::Raw::NixArchive - : ContentAddressMethod::Raw::Flat; + ContentAddressMethod method = + unpack || executable ? ContentAddressMethod::Raw::NixArchive : ContentAddressMethod::Raw::Flat; /* Figure out a name in the Nix store. */ if (!name) { @@ -77,10 +79,8 @@ std::tuple prefetchFile( the store. */ if (expectedHash) { hashAlgo = expectedHash->algo; - storePath = store->makeFixedOutputPathFromCA(*name, ContentAddressWithReferences::fromParts( - method, - *expectedHash, - {})); + storePath = + store->makeFixedOutputPathFromCA(*name, ContentAddressWithReferences::fromParts(method, *expectedHash, {})); if (store->isValidPath(*storePath)) hash = expectedHash; else @@ -99,7 +99,8 @@ std::tuple prefetchFile( mode = 0700; AutoCloseFD fd = toDescriptor(open(tmpFile.string().c_str(), O_WRONLY | O_CREAT | O_EXCL, mode)); - if (!fd) throw SysError("creating temporary file '%s'", tmpFile); + if (!fd) + throw SysError("creating temporary file '%s'", tmpFile); FdSink sink(fd.get()); @@ -110,8 +111,7 @@ std::tuple prefetchFile( /* Optionally unpack the file. */ if (unpack) { - Activity act(*logger, lvlChatty, actUnknown, - fmt("unpacking '%s'", url)); + Activity act(*logger, lvlChatty, actUnknown, fmt("unpacking '%s'", url)); auto unpacked = (tmpDir.path() / "unpacked").string(); createDirs(unpacked); unpackTarfile(tmpFile.string(), unpacked); @@ -127,12 +127,10 @@ std::tuple prefetchFile( } } - Activity act(*logger, lvlChatty, actUnknown, - fmt("adding '%s' to the store", url)); + Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); auto info = store->addToStoreSlow( - *name, PosixSourceAccessor::createAtRoot(tmpFile), - method, hashAlgo, {}, expectedHash); + *name, PosixSourceAccessor::createAtRoot(tmpFile), method, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); hash = info.ca->hash; @@ -141,7 +139,7 @@ std::tuple prefetchFile( return {storePath.value(), hash.value()}; } -static int main_nix_prefetch_url(int argc, char * * argv) +static int main_nix_prefetch_url(int argc, char ** argv) { { HashAlgorithm ha = HashAlgorithm::SHA256; @@ -166,14 +164,12 @@ static int main_nix_prefetch_url(int argc, char * * argv) else if (*arg == "--type") { auto s = getArg(*arg, arg, end); ha = parseHashAlgo(s); - } - else if (*arg == "--print-path") + } else if (*arg == "--print-path") printPath = true; else if (*arg == "--attr" || *arg == "-A") { fromExpr = true; attrPath = getArg(*arg, arg, end); - } - else if (*arg == "--unpack") + } else if (*arg == "--unpack") unpack = true; else if (*arg == "--executable") executable = true; @@ -207,10 +203,7 @@ static int main_nix_prefetch_url(int argc, char * * argv) url = args[0]; } else { Value vRoot; - state->evalFile( - resolveExprPath( - lookupFileArg(*state, args.empty() ? "." : args[0])), - vRoot); + state->evalFile(resolveExprPath(lookupFileArg(*state, args.empty() ? "." : args[0])), vRoot); Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first); state->forceAttrs(v, noPos, "while evaluating the source attribute to prefetch"); @@ -221,20 +214,24 @@ static int main_nix_prefetch_url(int argc, char * * argv) state->forceList(*attr->value, noPos, "while evaluating the urls to prefetch"); if (attr->value->listSize() < 1) throw Error("'urls' list is empty"); - url = state->forceString(*attr->value->listView()[0], noPos, "while evaluating the first url from the urls list"); + url = state->forceString( + *attr->value->listView()[0], noPos, "while evaluating the first url from the urls list"); /* Extract the hash mode. */ auto attr2 = v.attrs()->get(state->symbols.create("outputHashMode")); if (!attr2) printInfo("warning: this does not look like a fetchurl call"); else - unpack = state->forceString(*attr2->value, noPos, "while evaluating the outputHashMode of the source to prefetch") == "recursive"; + unpack = state->forceString( + *attr2->value, noPos, "while evaluating the outputHashMode of the source to prefetch") + == "recursive"; /* Extract the name. */ if (!name) { auto attr3 = v.attrs()->get(state->symbols.create("name")); if (!attr3) - name = state->forceString(*attr3->value, noPos, "while evaluating the name of the source to prefetch"); + name = + state->forceString(*attr3->value, noPos, "while evaluating the name of the source to prefetch"); } } @@ -242,8 +239,8 @@ static int main_nix_prefetch_url(int argc, char * * argv) if (args.size() == 2) expectedHash = Hash::parseAny(args[1], ha); - auto [storePath, hash] = prefetchFile( - store, resolveMirrorUrl(*state, url), name, ha, expectedHash, unpack, executable); + auto [storePath, hash] = + prefetchFile(store, resolveMirrorUrl(*state, url), name, ha, expectedHash, unpack, executable); logger->stop(); @@ -273,7 +270,8 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON { addFlag({ .longName = "name", - .description = "Override the name component of the resulting store path. It defaults to the base name of *url*.", + .description = + "Override the name component of the resulting store path. It defaults to the base name of *url*.", .labels = {"name"}, .handler = {&name}, }); @@ -282,26 +280,22 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .longName = "expected-hash", .description = "The expected hash of the file.", .labels = {"hash"}, - .handler = {[&](std::string s) { - expectedHash = Hash::parseAny(s, hashAlgo); - }}, + .handler = {[&](std::string s) { expectedHash = Hash::parseAny(s, hashAlgo); }}, }); addFlag(flag::hashAlgo("hash-type", &hashAlgo)); addFlag({ .longName = "executable", - .description = - "Make the resulting file executable. Note that this causes the " - "resulting hash to be a NAR hash rather than a flat file hash.", + .description = "Make the resulting file executable. Note that this causes the " + "resulting hash to be a NAR hash rather than a flat file hash.", .handler = {&executable, true}, }); addFlag({ .longName = "unpack", - .description = - "Unpack the archive (which must be a tarball or zip file) and add " - "the result to the Nix store.", + .description = "Unpack the archive (which must be a tarball or zip file) and add " + "the result to the Nix store.", .handler = {&unpack, true}, }); @@ -316,9 +310,10 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON std::string doc() override { return - #include "store-prefetch-file.md" - ; +#include "store-prefetch-file.md" + ; } + void run(ref store) override { auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, unpack, executable); @@ -329,7 +324,8 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON res["hash"] = hash.to_string(HashFormat::SRI, true); printJSON(res); } else { - notice("Downloaded '%s' to '%s' (hash '%s').", + notice( + "Downloaded '%s' to '%s' (hash '%s').", url, store->printStorePath(storePath), hash.to_string(HashFormat::SRI, true)); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 2c593729f..1f00d8fa4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -30,12 +30,11 @@ struct ProfileElementSource ExtendedOutputsSpec outputs; // TODO libc++ 16 (used by darwin) missing `std::set::operator <=>`, can't do yet. - //auto operator <=> (const ProfileElementSource & other) const - auto operator < (const ProfileElementSource & other) const + // auto operator <=> (const ProfileElementSource & other) const + auto operator<(const ProfileElementSource & other) const { - return - std::tuple(originalRef.to_string(), attrPath, outputs) < - std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs); + return std::tuple(originalRef.to_string(), attrPath, outputs) + < std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs); } std::string to_string() const @@ -85,22 +84,19 @@ struct ProfileElement return showVersions(versions); } - void updateStorePaths( - ref evalStore, - ref store, - const BuiltPaths & builtPaths) + void updateStorePaths(ref evalStore, ref store, const BuiltPaths & builtPaths) { storePaths.clear(); for (auto & buildable : builtPaths) { - std::visit(overloaded { - [&](const BuiltPath::Opaque & bo) { - storePaths.insert(bo.path); + std::visit( + overloaded{ + [&](const BuiltPath::Opaque & bo) { storePaths.insert(bo.path); }, + [&](const BuiltPath::Built & bfd) { + for (auto & output : bfd.outputs) + storePaths.insert(output.second); + }, }, - [&](const BuiltPath::Built & bfd) { - for (auto & output : bfd.outputs) - storePaths.insert(output.second); - }, - }, buildable.raw()); + buildable.raw()); } } }; @@ -120,7 +116,7 @@ struct ProfileManifest std::map elements; - ProfileManifest() { } + ProfileManifest() {} ProfileManifest(EvalState & state, const std::filesystem::path & profile) { @@ -133,17 +129,17 @@ struct ProfileManifest std::string sUrl; std::string sOriginalUrl; switch (version) { - case 1: - sUrl = "uri"; - sOriginalUrl = "originalUri"; - break; - case 2: - case 3: - sUrl = "url"; - sOriginalUrl = "originalUrl"; - break; - default: - throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); + case 1: + sUrl = "uri"; + sOriginalUrl = "originalUri"; + break; + case 2: + case 3: + sUrl = "url"; + sOriginalUrl = "originalUrl"; + break; + default: + throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); } auto elems = json["elements"]; @@ -153,24 +149,22 @@ struct ProfileManifest for (auto & p : e["storePaths"]) element.storePaths.insert(state.store->parseStorePath((std::string) p)); element.active = e["active"]; - if(e.contains("priority")) { + if (e.contains("priority")) { element.priority = e["priority"]; } if (e.value(sUrl, "") != "") { - element.source = ProfileElementSource { + element.source = ProfileElementSource{ parseFlakeRef(fetchSettings, e[sOriginalUrl]), parseFlakeRef(fetchSettings, e[sUrl]), e["attrPath"], - e["outputs"].get() - }; + e["outputs"].get()}; } std::string name = - elems.is_object() - ? elem.key() + elems.is_object() ? elem.key() : element.source - ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) - : element.identifier(); + ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) + : element.identifier(); addElement(name, std::move(element)); } @@ -258,17 +252,18 @@ struct ProfileManifest auto narHash = hashString(HashAlgorithm::SHA256, sink.s); - ValidPathInfo info { + ValidPathInfo info{ *store, "profile", - FixedOutputInfo { + FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, .hash = narHash, - .references = { - .others = std::move(references), - // profiles never refer to themselves - .self = false, - }, + .references = + { + .others = std::move(references), + // profiles never refer to themselves + .self = false, + }, }, narHash, }; @@ -292,13 +287,11 @@ struct ProfileManifest logger->cout("%s%s: ∅ -> %s", indent, j->second.identifier(), j->second.versions()); changes = true; ++j; - } - else if (i != prev.elements.end() && (j == cur.elements.end() || i->first < j->first)) { + } else if (i != prev.elements.end() && (j == cur.elements.end() || i->first < j->first)) { logger->cout("%s%s: %s -> ∅", indent, i->second.identifier(), i->second.versions()); changes = true; ++i; - } - else { + } else { auto v1 = i->second.versions(); auto v2 = j->second.versions(); if (v1 != v2) { @@ -316,18 +309,16 @@ struct ProfileManifest }; static std::map>> -builtPathsPerInstallable( - const std::vector, BuiltPathWithResult>> & builtPaths) +builtPathsPerInstallable(const std::vector, BuiltPathWithResult>> & builtPaths) { std::map>> res; for (auto & [installable, builtPath] : builtPaths) { - auto & r = res.insert({ - &*installable, - { - {}, - make_ref(), - } - }).first->second; + auto & r = res.insert({&*installable, + { + {}, + make_ref(), + }}) + .first->second; /* Note that there could be conflicting info (e.g. meta.priority fields) if the installable returned multiple derivations. So pick one arbitrarily. FIXME: @@ -342,7 +333,8 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile { std::optional priority; - CmdProfileAdd() { + CmdProfileAdd() + { addFlag({ .longName = "priority", .description = "The priority of the package to add.", @@ -359,8 +351,8 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile std::string doc() override { return - #include "profile-add.md" - ; +#include "profile-add.md" + ; } void run(ref store, Installables && installables) override @@ -368,18 +360,18 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile ProfileManifest manifest(*getEvalState(), *profile); auto builtPaths = builtPathsPerInstallable( - Installable::build2( - getEvalStore(), store, Realise::Outputs, installables, bmNormal)); + Installable::build2(getEvalStore(), store, Realise::Outputs, installables, bmNormal)); for (auto & installable : installables) { ProfileElement element; auto iter = builtPaths.find(&*installable); - if (iter == builtPaths.end()) continue; + if (iter == builtPaths.end()) + continue; auto & [res, info] = iter->second; if (auto * info2 = dynamic_cast(&*info)) { - element.source = ProfileElementSource { + element.source = ProfileElementSource{ .originalRef = info2->flake.originalRef, .lockedRef = info2->flake.lockedRef, .attrPath = info2->value.attrPath, @@ -389,15 +381,10 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile // If --priority was specified we want to override the // priority of the installable. - element.priority = - priority - ? *priority - : ({ - auto * info2 = dynamic_cast(&*info); - info2 - ? info2->value.priority.value_or(defaultPriority) - : defaultPriority; - }); + element.priority = priority ? *priority : ({ + auto * info2 = dynamic_cast(&*info); + info2 ? info2->value.priority.value_or(defaultPriority) : defaultPriority; + }); element.updateStorePaths(getEvalStore(), store, res); @@ -409,12 +396,9 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile auto existingElement = existingPair->second; auto existingSource = existingElement.source; auto elementSource = element.source; - if (existingSource - && elementSource - && existingElement.priority == element.priority + if (existingSource && elementSource && existingElement.priority == element.priority && existingSource->originalRef == elementSource->originalRef - && existingSource->attrPath == elementSource->attrPath - ) { + && existingSource->attrPath == elementSource->attrPath) { warn("'%s' is already added", elementName); continue; } @@ -427,7 +411,8 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile updateProfile(manifest.build(store)); } catch (BuildEnvFileConflictError & conflictError) { // FIXME use C++20 std::ranges once macOS has it - // See https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102 + // See + // https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102 auto findRefByFilePath = [&](Iterator begin, Iterator end) { for (auto it = begin; it != end; it++) { auto & [name, profileElement] = *it; @@ -445,9 +430,11 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile // There are 2 conflicting files. We need to find out which one is from the already installed package and // which one is the package that is the new package that is being installed. // The first matching package is the one that was already installed (original). - auto [originalConflictingFilePath, originalEntryName, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end()); + auto [originalConflictingFilePath, originalEntryName, originalConflictingRefs] = + findRefByFilePath(manifest.elements.begin(), manifest.elements.end()); // The last matching package is the one that was going to be installed (new). - auto [newConflictingFilePath, newEntryName, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend()); + auto [newConflictingFilePath, newEntryName, newConflictingRefs] = + findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend()); throw Error( "An existing package already provides the following file:\n" @@ -477,15 +464,15 @@ struct CmdProfileAdd : InstallablesCommand, MixDefaultProfile concatStringsSep(" ", newConflictingRefs), conflictError.priority, conflictError.priority - 1, - conflictError.priority + 1 - ); + conflictError.priority + 1); } } }; struct Matcher { - virtual ~Matcher() { } + virtual ~Matcher() {} + virtual std::string getTitle() = 0; virtual bool matches(const std::string & name, const ProfileElement & element) = 0; }; @@ -495,8 +482,11 @@ struct RegexMatcher final : public Matcher std::regex regex; std::string pattern; - RegexMatcher(const std::string & pattern) : regex(pattern, std::regex::extended | std::regex::icase), pattern(pattern) - { } + RegexMatcher(const std::string & pattern) + : regex(pattern, std::regex::extended | std::regex::icase) + , pattern(pattern) + { + } std::string getTitle() override { @@ -513,8 +503,10 @@ struct StorePathMatcher final : public Matcher { nix::StorePath storePath; - StorePathMatcher(const nix::StorePath & storePath) : storePath(storePath) - { } + StorePathMatcher(const nix::StorePath & storePath) + : storePath(storePath) + { + } std::string getTitle() override { @@ -531,8 +523,10 @@ struct NameMatcher final : public Matcher { std::string name; - NameMatcher(const std::string & name) : name(name) - { } + NameMatcher(const std::string & name) + : name(name) + { + } std::string getTitle() override { @@ -572,40 +566,43 @@ public: .longName = "all", .description = "Match all packages in the profile.", .handler = {[this]() { - _matchers.push_back(ref(std::shared_ptr(&all, [](AllMatcher*) {}))); + _matchers.push_back(ref(std::shared_ptr(&all, [](AllMatcher *) {}))); }}, }); addFlag({ .longName = "regex", .description = "A regular expression to match one or more packages in the profile.", .labels = {"pattern"}, - .handler = {[this](std::string arg) { - _matchers.push_back(make_ref(arg)); - }}, - }); - expectArgs({ - .label = "elements", - .optional = true, - .handler = {[this](std::vector args) { - for (auto & arg : args) { - if (auto n = string2Int(arg)) { - throw Error("'nix profile' no longer supports indices ('%d')", *n); - } else if (getStore()->isStorePath(arg)) { - _matchers.push_back(make_ref(getStore()->parseStorePath(arg))); - } else { - _matchers.push_back(make_ref(arg)); - } - } - }} + .handler = {[this](std::string arg) { _matchers.push_back(make_ref(arg)); }}, }); + expectArgs( + {.label = "elements", + .optional = true, + .handler = {[this](std::vector args) { + for (auto & arg : args) { + if (auto n = string2Int(arg)) { + throw Error("'nix profile' no longer supports indices ('%d')", *n); + } else if (getStore()->isStorePath(arg)) { + _matchers.push_back(make_ref(getStore()->parseStorePath(arg))); + } else { + _matchers.push_back(make_ref(arg)); + } + } + }}}); } - StringSet getMatchingElementNames(ProfileManifest & manifest) { + StringSet getMatchingElementNames(ProfileManifest & manifest) + { if (_matchers.empty()) { throw UsageError("No packages specified."); } - if (std::find_if(_matchers.begin(), _matchers.end(), [](const ref & m) { return m.dynamic_pointer_cast(); }) != _matchers.end() && _matchers.size() > 1) { + if (std::find_if( + _matchers.begin(), + _matchers.end(), + [](const ref & m) { return m.dynamic_pointer_cast(); }) + != _matchers.end() + && _matchers.size() > 1) { throw UsageError("--all cannot be used with package names or regular expressions."); } @@ -641,8 +638,8 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem std::string doc() override { return - #include "profile-remove.md" - ; +#include "profile-remove.md" + ; } void run(ref store) override @@ -654,7 +651,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem auto matchingElementNames = getMatchingElementNames(oldManifest); if (matchingElementNames.empty()) { - warn ("No packages to remove. Use 'nix profile list' to see the current profile."); + warn("No packages to remove. Use 'nix profile list' to see the current profile."); return; } @@ -665,9 +662,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem } auto removedCount = oldManifest.elements.size() - newManifest.elements.size(); - printInfo("removed %d packages, kept %d packages", - removedCount, - newManifest.elements.size()); + printInfo("removed %d packages, kept %d packages", removedCount, newManifest.elements.size()); updateProfile(newManifest.build(store)); } @@ -683,8 +678,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf std::string doc() override { return - #include "profile-upgrade.md" - ; +#include "profile-upgrade.md" + ; } void run(ref store) override @@ -721,8 +716,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf upgradedCount++; - Activity act(*logger, lvlChatty, actUnknown, - fmt("checking '%s' for updates", element.source->attrPath)); + Activity act(*logger, lvlChatty, actUnknown, fmt("checking '%s' for updates", element.source->attrPath)); auto installable = make_ref( this, @@ -735,20 +729,23 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf lockFlags); auto derivedPaths = installable->toDerivedPaths(); - if (derivedPaths.empty()) continue; + if (derivedPaths.empty()) + continue; auto * infop = dynamic_cast(&*derivedPaths[0].info); // `InstallableFlake` should use `ExtraPathInfoFlake`. assert(infop); auto & info = *infop; - if (info.flake.lockedRef.input.isLocked() - && element.source->lockedRef == info.flake.lockedRef) + if (info.flake.lockedRef.input.isLocked() && element.source->lockedRef == info.flake.lockedRef) continue; - printInfo("upgrading '%s' from flake '%s' to '%s'", - element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); + printInfo( + "upgrading '%s' from flake '%s' to '%s'", + element.source->attrPath, + element.source->lockedRef, + info.flake.lockedRef); - element.source = ProfileElementSource { + element.source = ProfileElementSource{ .originalRef = installable->flakeRef, .lockedRef = info.flake.lockedRef, .attrPath = info.value.attrPath, @@ -765,16 +762,12 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf } auto builtPaths = builtPathsPerInstallable( - Installable::build2( - getEvalStore(), store, Realise::Outputs, installables, bmNormal)); + Installable::build2(getEvalStore(), store, Realise::Outputs, installables, bmNormal)); for (size_t i = 0; i < installables.size(); ++i) { auto & installable = installables.at(i); auto & element = *elems.at(i); - element.updateStorePaths( - getEvalStore(), - store, - builtPaths.find(&*installable)->second.first); + element.updateStorePaths(getEvalStore(), store, builtPaths.find(&*installable)->second.first); } updateProfile(manifest.build(store)); @@ -791,8 +784,8 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro std::string doc() override { return - #include "profile-list.md" - ; +#include "profile-list.md" + ; } void run(ref store) override @@ -804,16 +797,20 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro } else { for (const auto & [i, e] : enumerate(manifest.elements)) { auto & [name, element] = e; - if (i) logger->cout(""); - logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", + if (i) + logger->cout(""); + logger->cout( + "Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); if (element.source) { - logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); + logger->cout( + "Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); logger->cout("Locked flake URL: %s", element.source->lockedRef.to_string()); } - logger->cout("Store paths: %s", concatStringsSep(" ", store->printStorePathSet(element.storePaths))); + logger->cout( + "Store paths: %s", concatStringsSep(" ", store->printStorePathSet(element.storePaths))); } } } @@ -829,8 +826,8 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile std::string doc() override { return - #include "profile-diff-closures.md" - ; +#include "profile-diff-closures.md" + ; } void run(ref store) override @@ -842,13 +839,12 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile for (auto & gen : gens) { if (prevGen) { - if (!first) logger->cout(""); + if (!first) + logger->cout(""); first = false; logger->cout("Version %d -> %d:", prevGen->number, gen.number); - printClosureDiff(store, - store->followLinksToStorePath(prevGen->path), - store->followLinksToStorePath(gen.path), - " "); + printClosureDiff( + store, store->followLinksToStorePath(prevGen->path), store->followLinksToStorePath(gen.path), " "); } prevGen = gen; @@ -866,8 +862,8 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile std::string doc() override { return - #include "profile-history.md" - ; +#include "profile-history.md" + ; } void run(ref store) override @@ -880,19 +876,18 @@ struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile for (auto & gen : gens) { ProfileManifest manifest(*getEvalState(), gen.path); - if (!first) logger->cout(""); + if (!first) + logger->cout(""); first = false; - logger->cout("Version %s%d" ANSI_NORMAL " (%s)%s:", + logger->cout( + "Version %s%d" ANSI_NORMAL " (%s)%s:", gen.number == curGen ? ANSI_GREEN : ANSI_BOLD, gen.number, std::put_time(std::gmtime(&gen.creationTime), "%Y-%m-%d"), prevGen ? fmt(" <- %d", prevGen->first.number) : ""); - ProfileManifest::printDiff( - prevGen ? prevGen->second : ProfileManifest(), - manifest, - " "); + ProfileManifest::printDiff(prevGen ? prevGen->second : ProfileManifest(), manifest, " "); prevGen = {gen, std::move(manifest)}; } @@ -921,8 +916,8 @@ struct CmdProfileRollback : virtual StoreCommand, MixDefaultProfile, MixDryRun std::string doc() override { return - #include "profile-rollback.md" - ; +#include "profile-rollback.md" + ; } void run(ref store) override @@ -939,10 +934,9 @@ struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRu { addFlag({ .longName = "older-than", - .description = - "Delete versions older than the specified age. *age* " - "must be in the format *N*`d`, where *N* denotes a number " - "of days.", + .description = "Delete versions older than the specified age. *age* " + "must be in the format *N*`d`, where *N* denotes a number " + "of days.", .labels = {"age"}, .handler = {&minAge}, }); @@ -956,8 +950,8 @@ struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRu std::string doc() override { return - #include "profile-wipe-history.md" - ; +#include "profile-wipe-history.md" + ; } void run(ref store) override @@ -974,20 +968,20 @@ struct CmdProfile : NixMultiCommand { CmdProfile() : NixMultiCommand( - "profile", - { - {"add", []() { return make_ref(); }}, - {"remove", []() { return make_ref(); }}, - {"upgrade", []() { return make_ref(); }}, - {"list", []() { return make_ref(); }}, - {"diff-closures", []() { return make_ref(); }}, - {"history", []() { return make_ref(); }}, - {"rollback", []() { return make_ref(); }}, - {"wipe-history", []() { return make_ref(); }}, - }) + "profile", + { + {"add", []() { return make_ref(); }}, + {"remove", []() { return make_ref(); }}, + {"upgrade", []() { return make_ref(); }}, + {"list", []() { return make_ref(); }}, + {"diff-closures", []() { return make_ref(); }}, + {"history", []() { return make_ref(); }}, + {"rollback", []() { return make_ref(); }}, + {"wipe-history", []() { return make_ref(); }}, + }) { aliases = { - {"install", { AliasStatus::Deprecated, {"add"}}}, + {"install", {AliasStatus::Deprecated, {"add"}}}, }; } @@ -999,8 +993,8 @@ struct CmdProfile : NixMultiCommand std::string doc() override { return - #include "profile.md" - ; +#include "profile.md" + ; } }; diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc index f21567639..a0e400f54 100644 --- a/src/nix/realisation.cc +++ b/src/nix/realisation.cc @@ -7,15 +7,20 @@ using namespace nix; struct CmdRealisation : NixMultiCommand { - CmdRealisation() : NixMultiCommand("realisation", RegisterCommand::getCommandsFor({"realisation"})) - { } + CmdRealisation() + : NixMultiCommand("realisation", RegisterCommand::getCommandsFor({"realisation"})) + { + } std::string description() override { return "manipulate a Nix realisation"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdRealisation = registerCommand("realisation"); @@ -30,11 +35,14 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON std::string doc() override { return - #include "realisation/info.md" +#include "realisation/info.md" ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref store, BuiltPaths && paths, BuiltPaths && rootPaths) override { @@ -58,13 +66,10 @@ struct CmdRealisationInfo : BuiltPathsCommand, MixJSON res.push_back(currentPath); } printJSON(res); - } - else { + } else { for (auto & path : realisations) { if (auto realisation = std::get_if(&path.raw)) { - logger->cout("%s %s", - realisation->id.to_string(), - store->printStorePath(realisation->outPath)); + logger->cout("%s %s", realisation->id.to_string(), store->printStorePath(realisation->outPath)); } else logger->cout("%s", store->printStorePath(path.path())); } diff --git a/src/nix/registry.cc b/src/nix/registry.cc index 340d10ec4..d9fcf09fc 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -10,7 +10,6 @@ using namespace nix; using namespace nix::flake; - class RegistryCommand : virtual Args { std::string registry_path; @@ -31,7 +30,8 @@ public: std::shared_ptr getRegistry() { - if (registry) return registry; + if (registry) + return registry; if (registry_path.empty()) { registry = fetchers::getUserRegistry(fetchSettings); } else { @@ -60,8 +60,8 @@ struct CmdRegistryList : StoreCommand std::string doc() override { return - #include "registry-list.md" - ; +#include "registry-list.md" + ; } void run(nix::ref store) override @@ -73,11 +73,12 @@ struct CmdRegistryList : StoreCommand for (auto & registry : registries) { for (auto & entry : registry->entries) { // FIXME: format nicely - logger->cout("%s %s %s", - registry->type == Registry::Flag ? "flags " : - registry->type == Registry::User ? "user " : - registry->type == Registry::System ? "system" : - "global", + logger->cout( + "%s %s %s", + registry->type == Registry::Flag ? "flags " + : registry->type == Registry::User ? "user " + : registry->type == Registry::System ? "system" + : "global", entry.from.toURLString(), entry.to.toURLString(attrsToQuery(entry.extraAttrs))); } @@ -97,8 +98,8 @@ struct CmdRegistryAdd : MixEvalArgs, Command, RegistryCommand std::string doc() override { return - #include "registry-add.md" - ; +#include "registry-add.md" + ; } CmdRegistryAdd() @@ -113,7 +114,8 @@ struct CmdRegistryAdd : MixEvalArgs, Command, RegistryCommand auto toRef = parseFlakeRef(fetchSettings, toUrl); auto registry = getRegistry(); fetchers::Attrs extraAttrs; - if (toRef.subdir != "") extraAttrs["dir"] = toRef.subdir; + if (toRef.subdir != "") + extraAttrs["dir"] = toRef.subdir; registry->remove(fromRef.input); registry->add(fromRef.input, toRef.input, extraAttrs); registry->write(getRegistryPath()); @@ -132,8 +134,8 @@ struct CmdRegistryRemove : RegistryCommand, Command std::string doc() override { return - #include "registry-remove.md" - ; +#include "registry-remove.md" + ; } CmdRegistryRemove() @@ -163,27 +165,27 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand std::string doc() override { return - #include "registry-pin.md" - ; +#include "registry-pin.md" + ; } CmdRegistryPin() { expectArg("url", &url); - expectArgs({ - .label = "locked", - .optional = true, - .handler = {&locked}, - .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { - completeFlakeRef(completions, getStore(), prefix); - }} - }); + expectArgs( + {.label = "locked", + .optional = true, + .handler = {&locked}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); + }}}); } void run(nix::ref store) override { - if (locked.empty()) locked = url; + if (locked.empty()) + locked = url; auto registry = getRegistry(); auto ref = parseFlakeRef(fetchSettings, url); auto lockedRef = parseFlakeRef(fetchSettings, locked); @@ -192,7 +194,8 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand if (!resolved.isLocked()) warn("flake '%s' is not locked", resolved.to_string()); fetchers::Attrs extraAttrs; - if (ref.subdir != "") extraAttrs["dir"] = ref.subdir; + if (ref.subdir != "") + extraAttrs["dir"] = ref.subdir; registry->add(ref.input, resolved, extraAttrs); registry->write(getRegistryPath()); } @@ -202,13 +205,13 @@ struct CmdRegistry : NixMultiCommand { CmdRegistry() : NixMultiCommand( - "registry", - { - {"list", []() { return make_ref(); }}, - {"add", []() { return make_ref(); }}, - {"remove", []() { return make_ref(); }}, - {"pin", []() { return make_ref(); }}, - }) + "registry", + { + {"list", []() { return make_ref(); }}, + {"add", []() { return make_ref(); }}, + {"remove", []() { return make_ref(); }}, + {"pin", []() { return make_ref(); }}, + }) { } @@ -220,11 +223,14 @@ struct CmdRegistry : NixMultiCommand std::string doc() override { return - #include "registry.md" - ; +#include "registry.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } }; static auto rCmdRegistry = registerCommand("registry"); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index ca470e99b..5dd53e932 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -11,26 +11,27 @@ namespace nix { -void runNix(Path program, const Strings & args, - const std::optional & input = {}) +void runNix(Path program, const Strings & args, const std::optional & input = {}) { auto subprocessEnv = getEnv(); subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue(); - //isInteractive avoid grabling interactive commands - runProgram2(RunOptions { - .program = getNixBin(program).string(), - .args = args, - .environment = subprocessEnv, - .input = input, - .isInteractive = true, - }); + // isInteractive avoid grabling interactive commands + runProgram2( + RunOptions{ + .program = getNixBin(program).string(), + .args = args, + .environment = subprocessEnv, + .input = input, + .isInteractive = true, + }); return; } struct CmdRepl : RawInstallablesCommand { - CmdRepl() { + CmdRepl() + { evalSettings.pureEval = false; } @@ -62,8 +63,8 @@ struct CmdRepl : RawInstallablesCommand std::string doc() override { return - #include "repl.md" - ; +#include "repl.md" + ; } void applyDefaultInstallables(std::vector & rawInstallables) override @@ -76,13 +77,13 @@ struct CmdRepl : RawInstallablesCommand void run(ref store, std::vector && rawInstallables) override { auto state = getEvalState(); - auto getValues = [&]()->AbstractNixRepl::AnnotatedValues{ + auto getValues = [&]() -> AbstractNixRepl::AnnotatedValues { auto installables = parseInstallables(store, rawInstallables); AbstractNixRepl::AnnotatedValues values; - for (auto & installable_: installables){ + for (auto & installable_ : installables) { auto & installable = InstallableValue::require(*installable_); auto what = installable.what(); - if (file){ + if (file) { auto [val, pos] = installable.toValue(*state); auto what = installable.what(); state->forceValue(*val, pos); @@ -90,21 +91,15 @@ struct CmdRepl : RawInstallablesCommand auto valPost = state->allocValue(); state->autoCallFunction(*autoArgs, *val, *valPost); state->forceValue(*valPost, pos); - values.push_back( {valPost, what }); + values.push_back({valPost, what}); } else { auto [val, pos] = installable.toValue(*state); - values.push_back( {val, what} ); + values.push_back({val, what}); } } return values; }; - auto repl = AbstractNixRepl::create( - lookupPath, - openStore(), - state, - getValues, - runNix - ); + auto repl = AbstractNixRepl::create(lookupPath, openStore(), state, getValues, runNix); repl->autoArgs = getAutoArgs(*repl->state); repl->initEnv(); repl->mainLoop(); @@ -113,4 +108,4 @@ struct CmdRepl : RawInstallablesCommand static auto rCmdRepl = registerCommand("repl"); -} +} // namespace nix diff --git a/src/nix/run.cc b/src/nix/run.cc index 3dae8ebc9..bde2cacd8 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -14,15 +14,17 @@ #include #ifdef __linux__ -# include -# include "nix/store/personality.hh" +# include +# include "nix/store/personality.hh" #endif #include extern char ** environ __attribute__((weak)); -namespace nix::fs { using namespace std::filesystem; } +namespace nix::fs { +using namespace std::filesystem; +} using namespace nix; @@ -41,7 +43,8 @@ Strings toEnvp(StringMap env) return envStrs; } -void execProgramInStore(ref store, +void execProgramInStore( + ref store, UseLookupPath useLookupPath, const std::string & program, const Strings & args, @@ -50,7 +53,7 @@ void execProgramInStore(ref store, { logger->stop(); - char **envp; + char ** envp; Strings envStrs; std::vector envCharPtrs; if (env.has_value()) { @@ -77,8 +80,10 @@ void execProgramInStore(ref store, throw Error("store '%s' is not a local store so it does not support command execution", store->getUri()); if (store->storeDir != store2->getRealStoreDir()) { - Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), std::string(system.value_or("")), program }; - for (auto & arg : args) helperArgs.push_back(arg); + Strings helperArgs = { + chrootHelperName, store->storeDir, store2->getRealStoreDir(), std::string(system.value_or("")), program}; + for (auto & arg : args) + helperArgs.push_back(arg); execve(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data(), envp); @@ -100,7 +105,7 @@ void execProgramInStore(ref store, throw SysError("unable to execute '%s'", program); } -} +} // namespace nix struct CmdRun : InstallableValueCommand, MixEnvironment { @@ -110,11 +115,7 @@ struct CmdRun : InstallableValueCommand, MixEnvironment CmdRun() { - expectArgs({ - .label = "args", - .handler = {&args}, - .completer = completePath - }); + expectArgs({.label = "args", .handler = {&args}, .completer = completePath}); } std::string description() override @@ -125,8 +126,8 @@ struct CmdRun : InstallableValueCommand, MixEnvironment std::string doc() override { return - #include "run.md" - ; +#include "run.md" + ; } Strings getDefaultFlakeAttrPaths() override @@ -156,7 +157,8 @@ struct CmdRun : InstallableValueCommand, MixEnvironment auto app = installable->toApp(*state).resolve(getEvalStore(), store); Strings allArgs{app.program}; - for (auto & i : args) allArgs.push_back(i); + for (auto & i : args) + allArgs.push_back(i); // Release our references to eval caches to ensure they are persisted to disk, because // we are about to exec out of this process without running C++ destructors. @@ -170,7 +172,7 @@ struct CmdRun : InstallableValueCommand, MixEnvironment static auto rCmdRun = registerCommand("run"); -void chrootHelper(int argc, char * * argv) +void chrootHelper(int argc, char ** argv) { int p = 1; std::string storeDir = argv[p++]; @@ -211,7 +213,8 @@ void chrootHelper(int argc, char * * argv) checkInterrupt(); const auto & src = entry.path(); std::filesystem::path dst = tmpDir / entry.path().filename(); - if (pathExists(dst)) continue; + if (pathExists(dst)) + continue; auto st = entry.symlink_status(); if (std::filesystem::is_directory(st)) { if (mkdir(dst.c_str(), 0700) == -1) @@ -223,7 +226,8 @@ void chrootHelper(int argc, char * * argv) } char * cwd = getcwd(0, 0); - if (!cwd) throw SysError("getting current directory"); + if (!cwd) + throw SysError("getting current directory"); Finally freeCwd([&]() { free(cwd); }); if (chroot(tmpDir.c_str()) == -1) @@ -231,19 +235,20 @@ void chrootHelper(int argc, char * * argv) if (chdir(cwd) == -1) throw SysError("chdir to '%s' in chroot", cwd); - } else - if (mount("overlay", storeDir.c_str(), "overlay", MS_MGC_VAL, fmt("lowerdir=%s:%s", storeDir, realStoreDir).c_str()) == -1) - if (mount(realStoreDir.c_str(), storeDir.c_str(), "", MS_BIND, 0) == -1) - throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); + } else if ( + mount("overlay", storeDir.c_str(), "overlay", MS_MGC_VAL, fmt("lowerdir=%s:%s", storeDir, realStoreDir).c_str()) + == -1) + if (mount(realStoreDir.c_str(), storeDir.c_str(), "", MS_BIND, 0) == -1) + throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); writeFile(std::filesystem::path{"/proc/self/setgroups"}, "deny"); writeFile(std::filesystem::path{"/proc/self/uid_map"}, fmt("%d %d %d", uid, uid, 1)); writeFile(std::filesystem::path{"/proc/self/gid_map"}, fmt("%d %d %d", gid, gid, 1)); -#ifdef __linux__ +# ifdef __linux__ if (system != "") linux::setPersonality(system); -#endif +# endif execvp(cmd.c_str(), stringsToCharPtrs(args).data()); diff --git a/src/nix/run.hh b/src/nix/run.hh index 5367c515c..cfee02a66 100644 --- a/src/nix/run.hh +++ b/src/nix/run.hh @@ -5,16 +5,14 @@ namespace nix { -enum struct UseLookupPath { - Use, - DontUse -}; +enum struct UseLookupPath { Use, DontUse }; -void execProgramInStore(ref store, +void execProgramInStore( + ref store, UseLookupPath useLookupPath, const std::string & program, const Strings & args, std::optional system = std::nullopt, std::optional env = std::nullopt); -} +} // namespace nix diff --git a/src/nix/search.cc b/src/nix/search.cc index 306a80594..562af3151 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -34,15 +34,14 @@ struct CmdSearch : InstallableValueCommand, MixJSON CmdSearch() { expectArgs("regex", &res); - addFlag(Flag { - .longName = "exclude", - .shortName = 'e', - .description = "Hide packages whose attribute path, name or description contain *regex*.", - .labels = {"regex"}, - .handler = {[this](std::string s) { - excludeRes.push_back(s); - }}, - }); + addFlag( + Flag{ + .longName = "exclude", + .shortName = 'e', + .description = "Hide packages whose attribute path, name or description contain *regex*.", + .labels = {"regex"}, + .handler = {[this](std::string s) { excludeRes.push_back(s); }}, + }); } std::string description() override @@ -53,16 +52,13 @@ struct CmdSearch : InstallableValueCommand, MixJSON std::string doc() override { return - #include "search.md" - ; +#include "search.md" + ; } Strings getDefaultFlakeAttrPaths() override { - return { - "packages." + settings.thisSystem.get(), - "legacyPackages." + settings.thisSystem.get() - }; + return {"packages." + settings.thisSystem.get(), "legacyPackages." + settings.thisSystem.get()}; } void run(ref store, ref installable) override @@ -72,7 +68,8 @@ struct CmdSearch : InstallableValueCommand, MixJSON // Recommend "^" here instead of ".*" due to differences in resulting highlighting if (res.empty()) - throw UsageError("Must provide at least one regex! To match all packages, use '%s'.", "nix search ^"); + throw UsageError( + "Must provide at least one regex! To match all packages, use '%s'.", "nix search ^"); std::vector regexes; std::vector excludeRegexes; @@ -88,21 +85,20 @@ struct CmdSearch : InstallableValueCommand, MixJSON auto state = getEvalState(); std::optional jsonOut; - if (json) jsonOut = json::object(); + if (json) + jsonOut = json::object(); uint64_t results = 0; - std::function & attrPath, bool initialRecurse)> visit; + std::function & attrPath, bool initialRecurse)> + visit; - visit = [&](eval_cache::AttrCursor & cursor, const std::vector & attrPath, bool initialRecurse) - { + visit = [&](eval_cache::AttrCursor & cursor, const std::vector & attrPath, bool initialRecurse) { auto attrPathS = state->symbols.resolve(attrPath); - Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + Activity act(*logger, lvlInfo, actUnknown, fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); try { - auto recurse = [&]() - { + auto recurse = [&]() { for (const auto & attr : cursor.getAttrs()) { auto cursor2 = cursor.getAttr(state->symbols[attr]); auto attrPath2(attrPath); @@ -126,9 +122,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON bool found = false; for (auto & regex : excludeRegexes) { - if ( - std::regex_search(attrPath2, regex) - || std::regex_search(name.name, regex) + if (std::regex_search(attrPath2, regex) || std::regex_search(name.name, regex) || std::regex_search(description, regex)) return; } @@ -151,8 +145,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON break; } - if (found) - { + if (found) { results++; if (json) { (*jsonOut)[attrPath2] = { @@ -161,7 +154,8 @@ struct CmdSearch : InstallableValueCommand, MixJSON {"description", description}, }; } else { - if (results > 1) logger->cout(""); + if (results > 1) + logger->cout(""); logger->cout( "* %s%s", wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")), @@ -174,8 +168,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON } else if ( - attrPath.size() == 0 - || (attrPathS[0] == "legacyPackages" && attrPath.size() <= 2) + attrPath.size() == 0 || (attrPathS[0] == "legacyPackages" && attrPath.size() <= 2) || (attrPathS[0] == "packages" && attrPath.size() <= 2)) recurse(); diff --git a/src/nix/self-exe.cc b/src/nix/self-exe.cc index b5eb1190d..36f6e17ec 100644 --- a/src/nix/self-exe.cc +++ b/src/nix/self-exe.cc @@ -36,4 +36,4 @@ std::filesystem::path getNixBin(std::optional binaryNameOpt) return getBinaryName(); } -} +} // namespace nix diff --git a/src/nix/self-exe.hh b/src/nix/self-exe.hh index 91e260f0b..b02aff5af 100644 --- a/src/nix/self-exe.hh +++ b/src/nix/self-exe.hh @@ -30,4 +30,4 @@ namespace nix { */ std::filesystem::path getNixBin(std::optional binary_name = {}); -} +} // namespace nix diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index fb868baa1..92bb00500 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -42,10 +42,10 @@ struct CmdCopySigs : StorePathsCommand std::atomic added{0}; - //logger->setExpected(doneLabel, storePaths.size()); + // logger->setExpected(doneLabel, storePaths.size()); auto doPath = [&](const Path & storePathS) { - //Activity act(*logger, lvlInfo, "getting signatures for '%s'", storePath); + // Activity act(*logger, lvlInfo, "getting signatures for '%s'", storePath); checkInterrupt(); @@ -61,9 +61,8 @@ struct CmdCopySigs : StorePathsCommand /* Don't import signatures that don't match this binary. */ - if (info->narHash != info2->narHash || - info->narSize != info2->narSize || - info->references != info2->references) + if (info->narHash != info2->narHash || info->narSize != info2->narSize + || info->references != info2->references) continue; for (auto & sig : info2->sigs) @@ -78,7 +77,7 @@ struct CmdCopySigs : StorePathsCommand added += newSigs.size(); } - //logger->incProgress(doneLabel); + // logger->incProgress(doneLabel); }; for (auto & storePath : storePaths) @@ -165,8 +164,8 @@ struct CmdKeyGenerateSecret : Command std::string doc() override { return - #include "key-generate-secret.md" - ; +#include "key-generate-secret.md" + ; } void run() override @@ -189,8 +188,8 @@ struct CmdKeyConvertSecretToPublic : Command std::string doc() override { return - #include "key-convert-secret-to-public.md" - ; +#include "key-convert-secret-to-public.md" + ; } void run() override @@ -205,11 +204,11 @@ struct CmdKey : NixMultiCommand { CmdKey() : NixMultiCommand( - "key", - { - {"generate-secret", []() { return make_ref(); }}, - {"convert-secret-to-public", []() { return make_ref(); }}, - }) + "key", + { + {"generate-secret", []() { return make_ref(); }}, + {"convert-secret-to-public", []() { return make_ref(); }}, + }) { } @@ -218,7 +217,10 @@ struct CmdKey : NixMultiCommand return "generate and convert Nix signing keys"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdKey = registerCommand("key"); diff --git a/src/nix/store-copy-log.cc b/src/nix/store-copy-log.cc index 599b40edc..6e442f371 100644 --- a/src/nix/store-copy-log.cc +++ b/src/nix/store-copy-log.cc @@ -20,8 +20,8 @@ struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand std::string doc() override { return - #include "store-copy-log.md" - ; +#include "store-copy-log.md" + ; } void run(ref srcStore, Installables && installables) override diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc index fae960c90..42517c882 100644 --- a/src/nix/store-delete.cc +++ b/src/nix/store-delete.cc @@ -9,7 +9,7 @@ using namespace nix; struct CmdStoreDelete : StorePathsCommand { - GCOptions options { .action = GCOptions::gcDeleteSpecific }; + GCOptions options{.action = GCOptions::gcDeleteSpecific}; CmdStoreDelete() { @@ -28,8 +28,8 @@ struct CmdStoreDelete : StorePathsCommand std::string doc() override { return - #include "store-delete.md" - ; +#include "store-delete.md" + ; } void run(ref store, StorePaths && storePaths) override diff --git a/src/nix/store-gc.cc b/src/nix/store-gc.cc index c71e89233..b0a627837 100644 --- a/src/nix/store-gc.cc +++ b/src/nix/store-gc.cc @@ -29,8 +29,8 @@ struct CmdStoreGC : StoreCommand, MixDryRun std::string doc() override { return - #include "store-gc.md" - ; +#include "store-gc.md" + ; } void run(ref store) override diff --git a/src/nix/store-info.cc b/src/nix/store-info.cc index c4c63ae3a..2132dc465 100644 --- a/src/nix/store-info.cc +++ b/src/nix/store-info.cc @@ -17,8 +17,8 @@ struct CmdInfoStore : StoreCommand, MixJSON std::string doc() override { return - #include "store-info.md" - ; +#include "store-info.md" + ; } void run(ref store) override @@ -32,9 +32,7 @@ struct CmdInfoStore : StoreCommand, MixJSON notice("Trusted: %s", *trusted); } else { nlohmann::json res; - Finally printRes([&]() { - printJSON(res); - }); + Finally printRes([&]() { printJSON(res); }); res["url"] = store->getUri(); store->connect(); diff --git a/src/nix/store-repair.cc b/src/nix/store-repair.cc index edd699981..cd243691c 100644 --- a/src/nix/store-repair.cc +++ b/src/nix/store-repair.cc @@ -13,8 +13,8 @@ struct CmdStoreRepair : StorePathsCommand std::string doc() override { return - #include "store-repair.md" - ; +#include "store-repair.md" + ; } void run(ref store, StorePaths && storePaths) override diff --git a/src/nix/store.cc b/src/nix/store.cc index 80f9363ca..45e505d06 100644 --- a/src/nix/store.cc +++ b/src/nix/store.cc @@ -4,10 +4,11 @@ using namespace nix; struct CmdStore : NixMultiCommand { - CmdStore() : NixMultiCommand("store", RegisterCommand::getCommandsFor({"store"})) + CmdStore() + : NixMultiCommand("store", RegisterCommand::getCommandsFor({"store"})) { aliases = { - {"ping", { AliasStatus::Deprecated, {"info"}}}, + {"ping", {AliasStatus::Deprecated, {"info"}}}, }; } @@ -16,7 +17,10 @@ struct CmdStore : NixMultiCommand return "manipulate a Nix store"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } }; static auto rCmdStore = registerCommand("store"); diff --git a/src/nix/unix/daemon.cc b/src/nix/unix/daemon.cc index a14632c2f..cb105a385 100644 --- a/src/nix/unix/daemon.cc +++ b/src/nix/unix/daemon.cc @@ -36,11 +36,11 @@ #include #ifdef __linux__ -#include "nix/util/cgroup.hh" +# include "nix/util/cgroup.hh" #endif #if defined(__APPLE__) || defined(__FreeBSD__) -#include +# include #endif using namespace nix; @@ -59,10 +59,13 @@ using namespace nix::daemon; * exposed in a header); all authentication and authorization happens in * `daemon.cc`. */ -struct AuthorizationSettings : Config { +struct AuthorizationSettings : Config +{ Setting trustedUsers{ - this, {"root"}, "trusted-users", + this, + {"root"}, + "trusted-users", R"( A list of user names, separated by whitespace. These users will have additional rights when connecting to the Nix daemon, such as the ability to specify additional [substituters](#conf-substituters), or to import unsigned realisations or unsigned input-addressed store objects. @@ -80,7 +83,9 @@ struct AuthorizationSettings : Config { * Who we trust to use the daemon in safe ways */ Setting allowedUsers{ - this, {"*"}, "allowed-users", + this, + {"*"}, + "allowed-users", R"( A list user names, separated by whitespace. These users are allowed to connect to the Nix daemon. @@ -100,8 +105,9 @@ AuthorizationSettings authorizationSettings; static GlobalConfig::Register rSettings(&authorizationSettings); #ifndef __linux__ -#define SPLICE_F_MOVE 0 -static ssize_t splice(int fd_in, void *off_in, int fd_out, void *off_out, size_t len, unsigned int flags) +# define SPLICE_F_MOVE 0 + +static ssize_t splice(int fd_in, void * off_in, int fd_out, void * off_out, size_t len, unsigned int flags) { // We ignore most parameters, we just have them for conformance with the linux syscall std::vector buf(8192); @@ -119,17 +125,16 @@ static ssize_t splice(int fd_in, void *off_in, int fd_out, void *off_out, size_t } #endif - static void sigChldHandler(int sigNo) { // Ensure we don't modify errno of whatever we've interrupted auto saved_errno = errno; // Reap all dead children. - while (waitpid(-1, 0, WNOHANG) > 0) ; + while (waitpid(-1, 0, WNOHANG) > 0) + ; errno = saved_errno; } - static void setSigChldAction(bool autoReap) { struct sigaction act, oact; @@ -149,12 +154,12 @@ static void setSigChldAction(bool autoReap) */ static bool matchUser(std::string_view user, const struct group & gr) { - for (char * * mem = gr.gr_mem; *mem; mem++) - if (user == std::string_view(*mem)) return true; + for (char ** mem = gr.gr_mem; *mem; mem++) + if (user == std::string_view(*mem)) + return true; return false; } - /** * Does the given user (specified by user name and primary group name) * match the given user/group whitelist? @@ -179,16 +184,18 @@ static bool matchUser(const std::string & user, const std::string & group, const for (auto & i : users) if (i.substr(0, 1) == "@") { - if (group == i.substr(1)) return true; + if (group == i.substr(1)) + return true; struct group * gr = getgrnam(i.c_str() + 1); - if (!gr) continue; - if (matchUser(user, *gr)) return true; + if (!gr) + continue; + if (matchUser(user, *gr)) + return true; } return false; } - struct PeerInfo { bool pidKnown; @@ -199,47 +206,44 @@ struct PeerInfo gid_t gid; }; - /** * Get the identity of the caller, if possible. */ static PeerInfo getPeerInfo(int remote) { - PeerInfo peer = { false, 0, false, 0, false, 0 }; + PeerInfo peer = {false, 0, false, 0, false, 0}; #if defined(SO_PEERCRED) -# if defined(__OpenBSD__) - struct sockpeercred cred; -# else - ucred cred; -# endif +# if defined(__OpenBSD__) + struct sockpeercred cred; +# else + ucred cred; +# endif socklen_t credLen = sizeof(cred); if (getsockopt(remote, SOL_SOCKET, SO_PEERCRED, &cred, &credLen) == -1) throw SysError("getting peer credentials"); - peer = { true, cred.pid, true, cred.uid, true, cred.gid }; + peer = {true, cred.pid, true, cred.uid, true, cred.gid}; #elif defined(LOCAL_PEERCRED) -# if !defined(SOL_LOCAL) -# define SOL_LOCAL 0 -# endif +# if !defined(SOL_LOCAL) +# define SOL_LOCAL 0 +# endif xucred cred; socklen_t credLen = sizeof(cred); if (getsockopt(remote, SOL_LOCAL, LOCAL_PEERCRED, &cred, &credLen) == -1) throw SysError("getting peer credentials"); - peer = { false, 0, true, cred.cr_uid, false, 0 }; + peer = {false, 0, true, cred.cr_uid, false, 0}; #endif return peer; } - #define SD_LISTEN_FDS_START 3 - /** * Open a store without a path info cache. */ @@ -281,10 +285,9 @@ static std::pair authPeer(const PeerInfo & peer) if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup) throw Error("user '%1%' is not allowed to connect to the Nix daemon", user); - return { trusted, std::move(user) }; + return {trusted, std::move(user)}; } - /** * Run a server. The loop opens a socket and accepts new connections from that * socket. @@ -318,7 +321,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) // Get rid of children automatically; don't let them become zombies. setSigChldAction(true); - #ifdef __linux__ +#ifdef __linux__ if (settings.useCgroups) { experimentalFeatureSettings.require(Xp::Cgroups); @@ -337,7 +340,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) // Move daemon into the new cgroup. writeFile(daemonCgroupPath + "/cgroup.procs", fmt("%d", getpid())); } - #endif +#endif // Loop accepting connections. while (1) { @@ -347,17 +350,17 @@ static void daemonLoop(std::optional forceTrustClientOpt) struct sockaddr_un remoteAddr; socklen_t remoteAddrLen = sizeof(remoteAddr); - AutoCloseFD remote = accept(fdSocket.get(), - (struct sockaddr *) &remoteAddr, &remoteAddrLen); + AutoCloseFD remote = accept(fdSocket.get(), (struct sockaddr *) &remoteAddr, &remoteAddrLen); checkInterrupt(); if (!remote) { - if (errno == EINTR) continue; + if (errno == EINTR) + continue; throw SysError("accepting connection"); } unix::closeOnExec(remote.get()); - PeerInfo peer { .pidKnown = false }; + PeerInfo peer{.pidKnown = false}; TrustedFlag trusted; std::string user; @@ -370,7 +373,8 @@ static void daemonLoop(std::optional forceTrustClientOpt) user = _user; }; - printInfo((std::string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""), + printInfo( + (std::string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""), peer.pidKnown ? std::to_string(peer.pid) : "", peer.uidKnown ? user : ""); @@ -380,32 +384,30 @@ static void daemonLoop(std::optional forceTrustClientOpt) options.dieWithParent = false; options.runExitHandlers = true; options.allowVfork = false; - startProcess([&]() { - fdSocket = -1; + startProcess( + [&]() { + fdSocket = -1; - // Background the daemon. - if (setsid() == -1) - throw SysError("creating a new session"); + // Background the daemon. + if (setsid() == -1) + throw SysError("creating a new session"); - // Restore normal handling of SIGCHLD. - setSigChldAction(false); + // Restore normal handling of SIGCHLD. + setSigChldAction(false); - // For debugging, stuff the pid into argv[1]. - if (peer.pidKnown && savedArgv[1]) { - auto processName = std::to_string(peer.pid); - strncpy(savedArgv[1], processName.c_str(), strlen(savedArgv[1])); - } + // For debugging, stuff the pid into argv[1]. + if (peer.pidKnown && savedArgv[1]) { + auto processName = std::to_string(peer.pid); + strncpy(savedArgv[1], processName.c_str(), strlen(savedArgv[1])); + } - // Handle the connection. - processConnection( - openUncachedStore(), - FdSource(remote.get()), - FdSink(remote.get()), - trusted, - NotRecursive); + // Handle the connection. + processConnection( + openUncachedStore(), FdSource(remote.get()), FdSink(remote.get()), trusted, NotRecursive); - exit(0); - }, options); + exit(0); + }, + options); } catch (Interrupted & e) { return; @@ -426,7 +428,8 @@ static void daemonLoop(std::optional forceTrustClientOpt) * * Loops until standard input disconnects, or an error is encountered. */ -static void forwardStdioConnection(RemoteStore & store) { +static void forwardStdioConnection(RemoteStore & store) +{ auto conn = store.openConnectionWrapper(); int from = conn->from.fd; int to = conn->to.fd; @@ -467,11 +470,7 @@ static void forwardStdioConnection(RemoteStore & store) { */ static void processStdioConnection(ref store, TrustedFlag trustClient) { - processConnection( - store, - FdSource(STDIN_FILENO), - FdSink(STDOUT_FILENO), - trustClient, NotRecursive); + processConnection(store, FdSource(STDIN_FILENO), FdSink(STDOUT_FILENO), trustClient, NotRecursive); } /** @@ -507,7 +506,7 @@ static void runDaemon(bool stdio, std::optional forceTrustClientOpt daemonLoop(forceTrustClientOpt); } -static int main_nix_daemon(int argc, char * * argv) +static int main_nix_daemon(int argc, char ** argv) { { auto stdio = false; @@ -535,7 +534,8 @@ static int main_nix_daemon(int argc, char * * argv) } else if (*arg == "--process-ops") { experimentalFeatureSettings.require(Xp::MountedSSHStore); processOps = true; - } else return false; + } else + return false; return true; }); @@ -564,27 +564,22 @@ struct CmdDaemon : Command addFlag({ .longName = "force-trusted", .description = "Force the daemon to trust connecting clients.", - .handler = {[&]() { - isTrustedOpt = Trusted; - }}, + .handler = {[&]() { isTrustedOpt = Trusted; }}, .experimentalFeature = Xp::DaemonTrustOverride, }); addFlag({ .longName = "force-untrusted", - .description = "Force the daemon to not trust connecting clients. The connection is processed by the receiving daemon before forwarding commands.", - .handler = {[&]() { - isTrustedOpt = NotTrusted; - }}, + .description = + "Force the daemon to not trust connecting clients. The connection is processed by the receiving daemon before forwarding commands.", + .handler = {[&]() { isTrustedOpt = NotTrusted; }}, .experimentalFeature = Xp::DaemonTrustOverride, }); addFlag({ .longName = "default-trust", .description = "Use Nix's default trust.", - .handler = {[&]() { - isTrustedOpt = std::nullopt; - }}, + .handler = {[&]() { isTrustedOpt = std::nullopt; }}, .experimentalFeature = Xp::DaemonTrustOverride, }); @@ -595,9 +590,7 @@ struct CmdDaemon : Command This is useful for the `mounted-ssh://` store where some actions need to be performed on the remote end but as connected user, and not as the user of the underlying daemon on the remote end. )", - .handler = {[&]() { - processOps = true; - }}, + .handler = {[&]() { processOps = true; }}, .experimentalFeature = Xp::MountedSSHStore, }); } @@ -607,13 +600,16 @@ struct CmdDaemon : Command return "daemon to perform store operations on behalf of non-root clients"; } - Category category() override { return catUtility; } + Category category() override + { + return catUtility; + } std::string doc() override { return - #include "daemon.md" - ; +#include "daemon.md" + ; } void run() override diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 648241104..3037d1986 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -30,7 +30,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand .longName = "nix-store-paths-url", .description = "The URL of the file that contains the store paths of the latest Nix release.", .labels = {"url"}, - .handler = {&(std::string&) settings.upgradeNixStorePathUrl}, + .handler = {&(std::string &) settings.upgradeNixStorePathUrl}, }); } @@ -50,11 +50,14 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand std::string doc() override { return - #include "upgrade-nix.md" - ; +#include "upgrade-nix.md" + ; } - Category category() override { return catNixInstallation; } + Category category() override + { + return catNixInstallation; + } void run(ref store) override { @@ -81,7 +84,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand } { - Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", store->printStorePath(storePath))); + Activity act( + *logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", store->printStorePath(storePath))); auto program = store->printStorePath(storePath) + "/bin/nix-env"; auto s = runProgram(program, false, {"--version"}); if (s.find("Nix") == std::string::npos) @@ -91,11 +95,16 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand logger->stop(); { - Activity act(*logger, lvlInfo, actUnknown, + Activity act( + *logger, + lvlInfo, + actUnknown, fmt("installing '%s' into profile %s...", store->printStorePath(storePath), profileDir)); // FIXME: don't call an external process. - runProgram(getNixBin("nix-env").string(), false, + runProgram( + getNixBin("nix-env").string(), + false, {"--profile", profileDir.string(), "-i", store->printStorePath(storePath), "--no-sandbox"}); } @@ -118,7 +127,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand auto profileDir = where.parent_path(); // Resolve profile to /nix/var/nix/profiles/ link. - while (canonPath(profileDir.string()).find("/profiles/") == std::string::npos && std::filesystem::is_symlink(profileDir)) + while (canonPath(profileDir.string()).find("/profiles/") == std::string::npos + && std::filesystem::is_symlink(profileDir)) profileDir = readLink(profileDir.string()); printInfo("found profile %s", profileDir); @@ -126,7 +136,9 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Path userEnv = canonPath(profileDir.string(), true); if (std::filesystem::exists(profileDir / "manifest.json")) - throw Error("directory %s is managed by 'nix profile' and currently cannot be upgraded by 'nix upgrade-nix'", profileDir); + throw Error( + "directory %s is managed by 'nix profile' and currently cannot be upgraded by 'nix upgrade-nix'", + profileDir); if (!std::filesystem::exists(profileDir / "manifest.nix")) throw Error("directory %s does not appear to be part of a Nix profile", profileDir); @@ -143,7 +155,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); // FIXME: use nixos.org? - auto req = FileTransferRequest((std::string&) settings.upgradeNixStorePathUrl); + auto req = FileTransferRequest((std::string &) settings.upgradeNixStorePathUrl); auto res = getFileTransfer()->download(req); auto state = std::make_unique(LookupPath{}, store, fetchSettings, evalSettings); @@ -152,7 +164,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Bindings & bindings(*state->allocBindings(0)); auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first; - return store->parseStorePath(state->forceString(*v2, noPos, "while evaluating the path tho latest nix version")); + return store->parseStorePath( + state->forceString(*v2, noPos, "while evaluating the path tho latest nix version")); } }; diff --git a/src/nix/verify.cc b/src/nix/verify.cc index eb2cde93c..d5e9ab0d3 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -57,8 +57,8 @@ struct CmdVerify : StorePathsCommand std::string doc() override { return - #include "verify.md" - ; +#include "verify.md" + ; } void run(ref store, StorePaths && storePaths) override @@ -77,9 +77,7 @@ struct CmdVerify : StorePathsCommand std::atomic failed{0}; std::atomic active{0}; - auto update = [&]() { - act.progress(done, storePaths.size(), active, failed); - }; + auto update = [&]() { act.progress(done, storePaths.size(), active, failed); }; ThreadPool pool; @@ -108,7 +106,8 @@ struct CmdVerify : StorePathsCommand if (hash.first != info->narHash) { corrupted++; act2.result(resCorruptedPath, store->printStorePath(info->path)); - printError("path '%s' was modified! expected hash '%s', got '%s'", + printError( + "path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), info->narHash.to_string(HashFormat::Nix32, true), hash.first.to_string(HashFormat::Nix32, true)); @@ -130,21 +129,25 @@ struct CmdVerify : StorePathsCommand auto doSigs = [&](StringSet sigs) { for (const auto & sig : sigs) { - if (!sigsSeen.insert(sig).second) continue; + if (!sigsSeen.insert(sig).second) + continue; if (validSigs < ValidPathInfo::maxSigs && info->checkSignature(*store, publicKeys, sig)) validSigs++; } }; - if (info->isContentAddressed(*store)) validSigs = ValidPathInfo::maxSigs; + if (info->isContentAddressed(*store)) + validSigs = ValidPathInfo::maxSigs; doSigs(info->sigs); for (auto & store2 : substituters) { - if (validSigs >= actualSigsNeeded) break; + if (validSigs >= actualSigsNeeded) + break; try { auto info2 = store2->queryPathInfo(info->path); - if (info2->isContentAddressed(*store)) validSigs = ValidPathInfo::maxSigs; + if (info2->isContentAddressed(*store)) + validSigs = ValidPathInfo::maxSigs; doSigs(info2->sigs); } catch (InvalidPath &) { } catch (Error & e) { @@ -161,7 +164,6 @@ struct CmdVerify : StorePathsCommand act2.result(resUntrustedPath, store->printStorePath(info->path)); printError("path '%s' is untrusted", store->printStorePath(info->path)); } - } done++; @@ -179,10 +181,7 @@ struct CmdVerify : StorePathsCommand pool.process(); - throw Exit( - (corrupted ? 1 : 0) | - (untrusted ? 2 : 0) | - (failed ? 4 : 0)); + throw Exit((corrupted ? 1 : 0) | (untrusted ? 2 : 0) | (failed ? 4 : 0)); } }; diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 3aac45d34..7869e33a7 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -7,15 +7,9 @@ using namespace nix; -static std::string hilite(const std::string & s, size_t pos, size_t len, - const std::string & colour = ANSI_RED) +static std::string hilite(const std::string & s, size_t pos, size_t len, const std::string & colour = ANSI_RED) { - return - std::string(s, 0, pos) - + colour - + std::string(s, pos, len) - + ANSI_NORMAL - + std::string(s, pos + len); + return std::string(s, 0, pos) + colour + std::string(s, pos, len) + ANSI_NORMAL + std::string(s, pos + len); } static std::string filterPrintable(const std::string & s) @@ -49,13 +43,15 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions addFlag({ .longName = "all", .shortName = 'a', - .description = "Show all edges in the dependency graph leading from *package* to *dependency*, rather than just a shortest path.", + .description = + "Show all edges in the dependency graph leading from *package* to *dependency*, rather than just a shortest path.", .handler = {&all, true}, }); addFlag({ .longName = "precise", - .description = "For each edge in the dependency graph, show the files in the parent that cause the dependency.", + .description = + "For each edge in the dependency graph, show the files in the parent that cause the dependency.", .handler = {&precise, true}, }); } @@ -68,11 +64,14 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions std::string doc() override { return - #include "why-depends.md" - ; +#include "why-depends.md" + ; } - Category category() override { return catSecondary; } + Category category() override + { + return catSecondary; + } void run(ref store) override { @@ -127,11 +126,12 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions std::map graph; for (auto & path : closure) - graph.emplace(path, Node { - .path = path, - .refs = store->queryPathInfo(path)->references, - .dist = path == dependencyPath ? 0 : inf - }); + graph.emplace( + path, + Node{ + .path = path, + .refs = store->queryPathInfo(path)->references, + .dist = path == dependencyPath ? 0 : inf}); // Transpose the graph. for (auto & node : graph) @@ -159,7 +159,6 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions queue.push(&node2); } } - } } @@ -169,26 +168,29 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions and `dependency`. */ std::function printNode; - struct BailOut { }; + struct BailOut + {}; printNode = [&](Node & node, const std::string & firstPad, const std::string & tailPad) { CanonPath pathS(node.path.to_string()); assert(node.dist != inf); if (precise) { - logger->cout("%s%s%s%s" ANSI_NORMAL, + logger->cout( + "%s%s%s%s" ANSI_NORMAL, firstPad, node.visited ? "\e[38;5;244m" : "", firstPad != "" ? "→ " : "", pathS.abs()); } - if (node.path == dependencyPath && !all - && packagePath != dependencyPath) + if (node.path == dependencyPath && !all && packagePath != dependencyPath) throw BailOut(); - if (node.visited) return; - if (precise) node.visited = true; + if (node.visited) + return; + if (precise) + node.visited = true; /* Sort the references by distance to `dependency` to ensure that the shortest path is printed first. */ @@ -196,9 +198,11 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions StringSet hashes; for (auto & ref : node.refs) { - if (ref == node.path && packagePath != dependencyPath) continue; + if (ref == node.path && packagePath != dependencyPath) + continue; auto & node2 = graph.at(ref); - if (node2.dist == inf) continue; + if (node2.dist == inf) + continue; refs.emplace(node2.dist, &node2); hashes.insert(std::string(node2.path.hashPart())); } @@ -233,11 +237,13 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (pos != std::string::npos) { size_t margin = 32; auto pos2 = pos >= margin ? pos - margin : 0; - hits[hash].emplace_back(fmt("%s: …%s…", + hits[hash].emplace_back( + fmt("%s: …%s…", p2, - hilite(filterPrintable( - std::string(contents, pos2, pos - pos2 + hash.size() + margin)), - pos - pos2, StorePath::HashLen, + hilite( + filterPrintable(std::string(contents, pos2, pos - pos2 + hash.size() + margin)), + pos - pos2, + StorePath::HashLen, getColour(hash)))); } } @@ -249,15 +255,16 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions for (auto & hash : hashes) { auto pos = target.find(hash); if (pos != std::string::npos) - hits[hash].emplace_back(fmt("%s -> %s", p2, - hilite(target, pos, StorePath::HashLen, getColour(hash)))); + hits[hash].emplace_back( + fmt("%s -> %s", p2, hilite(target, pos, StorePath::HashLen, getColour(hash)))); } } }; // FIXME: should use scanForReferences(). - if (precise) visitPath(pathS); + if (precise) + visitPath(pathS); for (auto & ref : refs) { std::string hash(ref.second->path.hashPart()); @@ -266,15 +273,16 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions for (auto & hit : hits[hash]) { bool first = hit == *hits[hash].begin(); - logger->cout("%s%s%s", tailPad, - (first ? (last ? treeLast : treeConn) : (last ? treeNull : treeLine)), - hit); - if (!all) break; + logger->cout( + "%s%s%s", tailPad, (first ? (last ? treeLast : treeConn) : (last ? treeNull : treeLine)), hit); + if (!all) + break; } if (!precise) { auto pathS = store->printStorePath(ref.second->path); - logger->cout("%s%s%s%s" ANSI_NORMAL, + logger->cout( + "%s%s%s%s" ANSI_NORMAL, firstPad, ref.second->visited ? "\e[38;5;244m" : "", last ? treeLast : treeConn, @@ -282,9 +290,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions node.visited = true; } - printNode(*ref.second, - tailPad + (last ? treeNull : treeLine), - tailPad + (last ? treeNull : treeLine)); + printNode(*ref.second, tailPad + (last ? treeNull : treeLine), tailPad + (last ? treeNull : treeLine)); } }; @@ -294,7 +300,8 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions logger->cout("%s", store->printStorePath(graph.at(packagePath).path)); } printNode(graph.at(packagePath), "", ""); - } catch (BailOut & ) { } + } catch (BailOut &) { + } } }; diff --git a/tests/functional/plugins/plugintest.cc b/tests/functional/plugins/plugintest.cc index 0b1a01a6e..e8f80a4aa 100644 --- a/tests/functional/plugins/plugintest.cc +++ b/tests/functional/plugins/plugintest.cc @@ -5,15 +5,14 @@ using namespace nix; struct MySettings : Config { - Setting settingSet{this, false, "setting-set", - "Whether the plugin-defined setting was set"}; + Setting settingSet{this, false, "setting-set", "Whether the plugin-defined setting was set"}; }; MySettings mySettings; static GlobalConfig::Register rs(&mySettings); -static void prim_anotherNull (EvalState & state, const PosIdx pos, Value ** args, Value & v) +static void prim_anotherNull(EvalState & state, const PosIdx pos, Value ** args, Value & v) { if (mySettings.settingSet) v.mkNull(); diff --git a/tests/functional/test-libstoreconsumer/main.cc b/tests/functional/test-libstoreconsumer/main.cc index 0dc5a5a46..a372886ea 100644 --- a/tests/functional/test-libstoreconsumer/main.cc +++ b/tests/functional/test-libstoreconsumer/main.cc @@ -5,7 +5,7 @@ using namespace nix; -int main (int argc, char **argv) +int main(int argc, char ** argv) { try { if (argc != 2) { @@ -21,12 +21,8 @@ int main (int argc, char **argv) // build the derivation - std::vector paths { - DerivedPath::Built { - .drvPath = makeConstantStorePathRef(store->parseStorePath(drvPath)), - .outputs = OutputsSpec::Names{"out"} - } - }; + std::vector paths{DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(store->parseStorePath(drvPath)), .outputs = OutputsSpec::Names{"out"}}}; const auto results = store->buildPathsWithResults(paths, bmNormal, store); diff --git a/tests/nixos/ca-fd-leak/sender.c b/tests/nixos/ca-fd-leak/sender.c index 2ec79947a..639b88900 100644 --- a/tests/nixos/ca-fd-leak/sender.c +++ b/tests/nixos/ca-fd-leak/sender.c @@ -9,7 +9,8 @@ #include #include -int main(int argc, char **argv) { +int main(int argc, char ** argv) +{ assert(argc == 2); @@ -25,12 +26,12 @@ int main(int argc, char **argv) { // executed in, just busyloop here. int res = -1; while (res < 0) { - res = connect(sock, (const struct sockaddr *)&data, - offsetof(struct sockaddr_un, sun_path) - + strlen(argv[1]) - + 1); - if (res < 0 && errno != ECONNREFUSED) perror("connect"); - if (errno != ECONNREFUSED) break; + res = connect( + sock, (const struct sockaddr *) &data, offsetof(struct sockaddr_un, sun_path) + strlen(argv[1]) + 1); + if (res < 0 && errno != ECONNREFUSED) + perror("connect"); + if (errno != ECONNREFUSED) + break; } // Write our message header. @@ -39,27 +40,28 @@ int main(int argc, char **argv) { msg.msg_controllen = 128; // Write an SCM_RIGHTS message containing the output path. - struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg); + struct cmsghdr * hdr = CMSG_FIRSTHDR(&msg); hdr->cmsg_len = CMSG_LEN(sizeof(int)); hdr->cmsg_level = SOL_SOCKET; hdr->cmsg_type = SCM_RIGHTS; int fd = open(getenv("out"), O_RDWR | O_CREAT, 0640); - memcpy(CMSG_DATA(hdr), (void *)&fd, sizeof(int)); + memcpy(CMSG_DATA(hdr), (void *) &fd, sizeof(int)); msg.msg_controllen = CMSG_SPACE(sizeof(int)); // Write a single null byte too. - msg.msg_iov = (struct iovec*) malloc(sizeof(struct iovec)); - msg.msg_iov[0].iov_base = (void*) ""; + msg.msg_iov = (struct iovec *) malloc(sizeof(struct iovec)); + msg.msg_iov[0].iov_base = (void *) ""; msg.msg_iov[0].iov_len = 1; msg.msg_iovlen = 1; // Send it to the othher side of this connection. res = sendmsg(sock, &msg, 0); - if (res < 0) perror("sendmsg"); + if (res < 0) + perror("sendmsg"); int buf; // Wait for the server to close the socket, implying that it has // received the commmand. - recv(sock, (void *)&buf, sizeof(int), 0); + recv(sock, (void *) &buf, sizeof(int), 0); } diff --git a/tests/nixos/ca-fd-leak/smuggler.c b/tests/nixos/ca-fd-leak/smuggler.c index 7279c48bf..655b8f8f1 100644 --- a/tests/nixos/ca-fd-leak/smuggler.c +++ b/tests/nixos/ca-fd-leak/smuggler.c @@ -7,7 +7,8 @@ #include #include -int main(int argc, char **argv) { +int main(int argc, char ** argv) +{ assert(argc == 2); @@ -18,21 +19,21 @@ int main(int argc, char **argv) { data.sun_family = AF_UNIX; data.sun_path[0] = 0; strncpy(data.sun_path + 1, argv[1], sizeof(data.sun_path) - 1); - int res = bind(sock, (const struct sockaddr *)&data, - offsetof(struct sockaddr_un, sun_path) - + strlen(argv[1]) - + 1); - if (res < 0) perror("bind"); + int res = bind(sock, (const struct sockaddr *) &data, offsetof(struct sockaddr_un, sun_path) + strlen(argv[1]) + 1); + if (res < 0) + perror("bind"); res = listen(sock, 1); - if (res < 0) perror("listen"); + if (res < 0) + perror("listen"); int smuggling_fd = -1; // Accept the connection a first time to receive the file descriptor. fprintf(stderr, "%s\n", "Waiting for the first connection"); int a = accept(sock, 0, 0); - if (a < 0) perror("accept"); + if (a < 0) + perror("accept"); struct msghdr msg = {0}; msg.msg_control = malloc(128); @@ -41,13 +42,12 @@ int main(int argc, char **argv) { // Receive the file descriptor as sent by the smuggler. recvmsg(a, &msg, 0); - struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg); + struct cmsghdr * hdr = CMSG_FIRSTHDR(&msg); while (hdr) { - if (hdr->cmsg_level == SOL_SOCKET - && hdr->cmsg_type == SCM_RIGHTS) { + if (hdr->cmsg_level == SOL_SOCKET && hdr->cmsg_type == SCM_RIGHTS) { // Grab the copy of the file descriptor. - memcpy((void *)&smuggling_fd, CMSG_DATA(hdr), sizeof(int)); + memcpy((void *) &smuggling_fd, CMSG_DATA(hdr), sizeof(int)); } hdr = CMSG_NXTHDR(&msg, hdr); @@ -58,11 +58,14 @@ int main(int argc, char **argv) { // Wait for a second connection, which will tell us that the build is // done a = accept(sock, 0, 0); - if (a < 0) perror("accept"); + if (a < 0) + perror("accept"); fprintf(stderr, "%s\n", "Got a second connection, rewriting the file"); // Write a new content to the file - if (ftruncate(smuggling_fd, 0)) perror("ftruncate"); + if (ftruncate(smuggling_fd, 0)) + perror("ftruncate"); const char * new_content = "Pwned\n"; int written_bytes = write(smuggling_fd, new_content, strlen(new_content)); - if (written_bytes != strlen(new_content)) perror("write"); + if (written_bytes != strlen(new_content)) + perror("write"); } diff --git a/tests/nixos/user-sandboxing/attacker.c b/tests/nixos/user-sandboxing/attacker.c index 3bd729c04..3377a5fd0 100644 --- a/tests/nixos/user-sandboxing/attacker.c +++ b/tests/nixos/user-sandboxing/attacker.c @@ -9,74 +9,74 @@ #define SYS_fchmodat2 452 -int fchmodat2(int dirfd, const char *pathname, mode_t mode, int flags) { - return syscall(SYS_fchmodat2, dirfd, pathname, mode, flags); +int fchmodat2(int dirfd, const char * pathname, mode_t mode, int flags) +{ + return syscall(SYS_fchmodat2, dirfd, pathname, mode, flags); } -int main(int argc, char **argv) { - if (argc <= 1) { - // stage 1: place the setuid-builder executable +int main(int argc, char ** argv) +{ + if (argc <= 1) { + // stage 1: place the setuid-builder executable - // make the build directory world-accessible first - chmod(".", 0755); + // make the build directory world-accessible first + chmod(".", 0755); - if (fchmodat2(AT_FDCWD, "attacker", 06755, AT_SYMLINK_NOFOLLOW) < 0) { - perror("Setting the suid bit on attacker"); - exit(-1); - } - - } else { - // stage 2: corrupt the victim derivation while it's building - - // prevent the kill - if (setresuid(-1, -1, getuid())) { - perror("setresuid"); - exit(-1); - } - - if (fork() == 0) { - - // wait for the victim to build - int fd = inotify_init(); - inotify_add_watch(fd, argv[1], IN_CREATE); - int dirfd = open(argv[1], O_DIRECTORY); - if (dirfd < 0) { - perror("opening the global build directory"); - exit(-1); - } - char buf[4096]; - fprintf(stderr, "Entering the inotify loop\n"); - for (;;) { - ssize_t len = read(fd, buf, sizeof(buf)); - struct inotify_event *ev; - for (char *pe = buf; pe < buf + len; - pe += sizeof(struct inotify_event) + ev->len) { - ev = (struct inotify_event *)pe; - fprintf(stderr, "folder %s created\n", ev->name); - // wait a bit to prevent racing against the creation - sleep(1); - int builddir = openat(dirfd, ev->name, O_DIRECTORY); - if (builddir < 0) { - perror("opening the build directory"); - continue; - } - int resultfile = openat(builddir, "build/result", O_WRONLY | O_TRUNC); - if (resultfile < 0) { - perror("opening the hijacked file"); - continue; - } - int writeres = write(resultfile, "bad\n", 4); - if (writeres < 0) { - perror("writing to the hijacked file"); - continue; - } - fprintf(stderr, "Hijacked the build for %s\n", ev->name); - return 0; + if (fchmodat2(AT_FDCWD, "attacker", 06755, AT_SYMLINK_NOFOLLOW) < 0) { + perror("Setting the suid bit on attacker"); + exit(-1); } - } + + } else { + // stage 2: corrupt the victim derivation while it's building + + // prevent the kill + if (setresuid(-1, -1, getuid())) { + perror("setresuid"); + exit(-1); + } + + if (fork() == 0) { + + // wait for the victim to build + int fd = inotify_init(); + inotify_add_watch(fd, argv[1], IN_CREATE); + int dirfd = open(argv[1], O_DIRECTORY); + if (dirfd < 0) { + perror("opening the global build directory"); + exit(-1); + } + char buf[4096]; + fprintf(stderr, "Entering the inotify loop\n"); + for (;;) { + ssize_t len = read(fd, buf, sizeof(buf)); + struct inotify_event * ev; + for (char * pe = buf; pe < buf + len; pe += sizeof(struct inotify_event) + ev->len) { + ev = (struct inotify_event *) pe; + fprintf(stderr, "folder %s created\n", ev->name); + // wait a bit to prevent racing against the creation + sleep(1); + int builddir = openat(dirfd, ev->name, O_DIRECTORY); + if (builddir < 0) { + perror("opening the build directory"); + continue; + } + int resultfile = openat(builddir, "build/result", O_WRONLY | O_TRUNC); + if (resultfile < 0) { + perror("opening the hijacked file"); + continue; + } + int writeres = write(resultfile, "bad\n", 4); + if (writeres < 0) { + perror("writing to the hijacked file"); + continue; + } + fprintf(stderr, "Hijacked the build for %s\n", ev->name); + return 0; + } + } + } + + exit(0); } - - exit(0); - } } - From fb493ad7ca2826662d3a69b94b0ddae7dbb1e209 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 18 Jul 2025 12:47:27 -0400 Subject: [PATCH 051/382] Update .git-blame-ignore-revs to ignore the mass reformatting --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..bda571a5e --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# bulk initial re-formatting with clang-format +e4f62e46088919428a68bd8014201dc8e379fed7 # !autorebase ./maintainers/format.sh --until-stable From ffc9bfb66d0b8d5da4e94ab1ac4580657b6352eb Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:20:13 +0300 Subject: [PATCH 052/382] lib{store,flake}-tests: Add test for spaces in URIs These cases do not seem to be covered by the test suite at all. --- src/libflake-tests/flakeref.cc | 7 +++++++ .../data/store-reference/local_3.txt | 1 + src/libstore-tests/store-reference.cc | 14 ++++++++++++++ 3 files changed, 22 insertions(+) create mode 100644 src/libstore-tests/data/store-reference/local_3.txt diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index eafe74a2d..b8f1ef7c9 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -48,6 +48,13 @@ TEST(parseFlakeRef, path) ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?revCount=123"); ASSERT_EQ(fragment, "bla"); } + + { + auto s = "/foo bar/baz?dir=bla space"; + auto flakeref = parseFlakeRef(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "path:/foo%20bar/baz?dir=bla%20space"); + ASSERT_EQ(flakeref.toAttrs().at("dir"), fetchers::Attr("bla space")); + } } TEST(to_string, doesntReencodeUrl) diff --git a/src/libstore-tests/data/store-reference/local_3.txt b/src/libstore-tests/data/store-reference/local_3.txt new file mode 100644 index 000000000..2a67a3426 --- /dev/null +++ b/src/libstore-tests/data/store-reference/local_3.txt @@ -0,0 +1 @@ +local://?root=/foo bar/baz \ No newline at end of file diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index f8c3587d2..01b75f3d2 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -85,10 +85,24 @@ static StoreReference localExample_2{ }, }; +static StoreReference localExample_3{ + .variant = + StoreReference::Specified{ + .scheme = "local", + }, + .params = + { + {"root", "/foo bar/baz"}, + }, +}; + URI_TEST(local_1, localExample_1) URI_TEST(local_2, localExample_2) +/* Test path with spaces */ +URI_TEST(local_3, localExample_3) + URI_TEST_READ(local_shorthand_1, localExample_1) URI_TEST_READ(local_shorthand_2, localExample_2) From d9053390cec37cfb199cb3db256e43657a097465 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:20:24 +0300 Subject: [PATCH 053/382] libutil-test-support: Add HasSubstrIgnoreANSIMatcher This matcher is useful for checking error messages, which always contain ANSI escapes. --- .../include/nix/util/tests/gmock-matchers.hh | 56 +++++++++++++++++++ .../include/nix/util/tests/meson.build | 1 + 2 files changed, 57 insertions(+) create mode 100644 src/libutil-test-support/include/nix/util/tests/gmock-matchers.hh diff --git a/src/libutil-test-support/include/nix/util/tests/gmock-matchers.hh b/src/libutil-test-support/include/nix/util/tests/gmock-matchers.hh new file mode 100644 index 000000000..27d765e9e --- /dev/null +++ b/src/libutil-test-support/include/nix/util/tests/gmock-matchers.hh @@ -0,0 +1,56 @@ +#pragma once +///@file + +#include "nix/util/terminal.hh" +#include + +namespace nix::testing { + +namespace internal { + +/** + * GMock matcher that matches substring while stripping off all ANSI escapes. + * Useful for checking exceptions messages in unit tests. + */ +class HasSubstrIgnoreANSIMatcher +{ +public: + explicit HasSubstrIgnoreANSIMatcher(std::string substring) + : substring(std::move(substring)) + { + } + + bool MatchAndExplain(const char * s, ::testing::MatchResultListener * listener) const + { + return s != nullptr && MatchAndExplain(std::string(s), listener); + } + + template + bool MatchAndExplain(const MatcheeStringType & s, [[maybe_unused]] ::testing::MatchResultListener * listener) const + { + return filterANSIEscapes(s, /*filterAll=*/true).find(substring) != substring.npos; + } + + void DescribeTo(::std::ostream * os) const + { + *os << "has substring " << substring; + } + + void DescribeNegationTo(::std::ostream * os) const + { + *os << "has no substring " << substring; + } + +private: + std::string substring; +}; + +} // namespace internal + +inline ::testing::PolymorphicMatcher +HasSubstrIgnoreANSIMatcher(const std::string & substring) +{ + return ::testing::MakePolymorphicMatcher(internal::HasSubstrIgnoreANSIMatcher(substring)); +} + +} // namespace nix::testing diff --git a/src/libutil-test-support/include/nix/util/tests/meson.build b/src/libutil-test-support/include/nix/util/tests/meson.build index f77dedff7..e6697b517 100644 --- a/src/libutil-test-support/include/nix/util/tests/meson.build +++ b/src/libutil-test-support/include/nix/util/tests/meson.build @@ -4,6 +4,7 @@ include_dirs = [include_directories('../../..')] headers = files( 'characterization.hh', + 'gmock-matchers.hh', 'gtest-with-params.hh', 'hash.hh', 'nix_api_util.hh', From ad449c0288a539dc8443671bbd879d7d95cecb74 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:20:36 +0300 Subject: [PATCH 054/382] libutil: Refactor percentDecode,percentEncode to use Boost.URL The myriad of hand-rolled URL parsing and validation code is a constant source of problems. Regexes are not a great way of writing parsers and there's a history of getting them wrong. Boost.URL is a good library we can outsource most of the heavy lifting to. --- packaging/dependencies.nix | 1 + src/libutil-tests/url.cc | 10 ++++++++++ src/libutil/meson.build | 2 +- src/libutil/url.cc | 41 ++++++++++++-------------------------- 4 files changed, 25 insertions(+), 29 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 7ce3bf125..dda9ef8dc 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -62,6 +62,7 @@ scope: { "--with-context" "--with-coroutine" "--with-iostreams" + "--with-url" ]; enableIcu = false; }).overrideAttrs diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 2a2bba880..8f2033ded 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -1,5 +1,7 @@ #include "nix/util/url.hh" +#include "nix/util/tests/gmock-matchers.hh" #include +#include namespace nix { @@ -289,6 +291,14 @@ TEST(percentDecode, trailingPercent) ASSERT_EQ(d, s); } +TEST(percentDecode, incompleteEncoding) +{ + ASSERT_THAT( + []() { percentDecode("%1"); }, + ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("error: invalid URI parameter '%1': incomplete pct-encoding"))); +} + /* ---------------------------------------------------------------------------- * percentEncode * --------------------------------------------------------------------------*/ diff --git a/src/libutil/meson.build b/src/libutil/meson.build index f48c8f3d7..55419265a 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -57,7 +57,7 @@ deps_private += blake3 boost = dependency( 'boost', - modules : ['context', 'coroutine', 'iostreams'], + modules : ['context', 'coroutine', 'iostreams', 'url'], include_type: 'system', version: '>=1.82.0' ) diff --git a/src/libutil/url.cc b/src/libutil/url.cc index eac0b188e..67043285c 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -4,6 +4,8 @@ #include "nix/util/split.hh" #include "nix/util/canon-path.hh" +#include + namespace nix { std::regex refRegex(refRegexS, std::regex::ECMAScript); @@ -48,21 +50,17 @@ ParsedURL parseURL(const std::string & url) std::string percentDecode(std::string_view in) { - std::string decoded; - for (size_t i = 0; i < in.size();) { - if (in[i] == '%') { - if (i + 2 >= in.size()) - throw BadURL("invalid URI parameter '%s'", in); - try { - decoded += std::stoul(std::string(in, i + 1, 2), 0, 16); - i += 3; - } catch (...) { - throw BadURL("invalid URI parameter '%s'", in); - } - } else - decoded += in[i++]; - } - return decoded; + auto pctView = boost::urls::make_pct_string_view(in); + if (pctView.has_value()) + return pctView->decode(); + auto error = pctView.error(); + throw BadURL("invalid URI parameter '%s': %s", in, error.message()); +} + +std::string percentEncode(std::string_view s, std::string_view keep) +{ + return boost::urls::encode( + s, [keep](char c) { return boost::urls::unreserved_chars(c) || keep.find(c) != keep.npos; }); } StringMap decodeQuery(const std::string & query) @@ -85,19 +83,6 @@ StringMap decodeQuery(const std::string & query) const static std::string allowedInQuery = ":@/?"; const static std::string allowedInPath = ":@/"; -std::string percentEncode(std::string_view s, std::string_view keep) -{ - std::string res; - for (auto & c : s) - // unreserved + keep - if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || strchr("-._~", c) - || keep.find(c) != std::string::npos) - res += c; - else - res += fmt("%%%02X", c & 0xFF); - return res; -} - std::string encodeQuery(const StringMap & ss) { std::string res; From d020f21a2a5dcd771988694cf634841fbcf310cd Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:20:46 +0300 Subject: [PATCH 055/382] libutil: Use default operator== for ParsedURL The default comparison operator can be generated by the compiler since C++20. --- src/libutil/include/nix/util/url.hh | 2 +- src/libutil/url.cc | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 8980b4ce3..e29226720 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -15,7 +15,7 @@ struct ParsedURL std::string to_string() const; - bool operator==(const ParsedURL & other) const noexcept; + bool operator==(const ParsedURL & other) const noexcept = default; /** * Remove `.` and `..` path elements. diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 67043285c..7f31d0f1c 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -110,12 +110,6 @@ std::ostream & operator<<(std::ostream & os, const ParsedURL & url) return os; } -bool ParsedURL::operator==(const ParsedURL & other) const noexcept -{ - return scheme == other.scheme && authority == other.authority && path == other.path && query == other.query - && fragment == other.fragment; -} - ParsedURL ParsedURL::canonicalise() { ParsedURL res(*this); From bd1d2d1041a321284efcf22e11beb86ede08648d Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:20:59 +0300 Subject: [PATCH 056/382] libutil: Use Boost.URL in parseURL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Boost.URL is a significantly more RFC-compliant parser than what libutil currently has a bundle of incomprehensible regexes. One aspect of this change is that RFC4007 ZoneId IPv6 literals are represented in URIs according to RFC6874 [1]. Previously they were represented naively like so: [fe80::818c:da4d:8975:415c\%enp0s25]. This is not entirely correct, because the percent itself has to be pct-encoded: > "%" is always treated as an escape character in a URI, so, according to the established URI syntax [RFC3986] any occurrences of literal "%" symbols in a URI MUST be percent-encoded and represented in the form "%25". Thus, the scoped address fe80::a%en1 would appear in a URI as http://[fe80::a%25en1]. [1]: https://datatracker.ietf.org/doc/html/rfc6874 Co-authored-by: Jörg Thalheim --- src/libutil-tests/url.cc | 4 +- src/libutil/include/nix/util/url-parts.hh | 11 --- src/libutil/include/nix/util/url.hh | 11 +++ src/libutil/url.cc | 100 +++++++++++++++------- 4 files changed, 81 insertions(+), 45 deletions(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 8f2033ded..5e9b81f46 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -124,9 +124,9 @@ TEST(parseURL, parseIPv4Address) ASSERT_EQ(parsed, expected); } -TEST(parseURL, parseScopedRFC4007IPv6Address) +TEST(parseURL, parseScopedRFC6874IPv6Address) { - auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080"; + auto s = "http://[fe80::818c:da4d:8975:415c\%25enp0s25]:8080"; auto parsed = parseURL(s); ParsedURL expected{ diff --git a/src/libutil/include/nix/util/url-parts.hh b/src/libutil/include/nix/util/url-parts.hh index bf1215b6d..72c901b5d 100644 --- a/src/libutil/include/nix/util/url-parts.hh +++ b/src/libutil/include/nix/util/url-parts.hh @@ -8,21 +8,10 @@ namespace nix { // URI stuff. const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; -const static std::string schemeNameRegex = "(?:[a-z][a-z0-9+.-]*)"; -const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+(?:%\\w+)?"; -const static std::string ipv6AddressRegex = "(?:\\[" + ipv6AddressSegmentRegex + "\\]|" + ipv6AddressSegmentRegex + ")"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])"; -const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)"; -const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")"; -const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)"; -const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?"; const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])"; -const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*"; -const static std::string segmentRegex = "(?:" + pcharRegex + "*)"; -const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)"; -const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)"; /// A Git ref (i.e. branch or tag name). /// \todo check that this is correct. diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index e29226720..1c51ab797 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -34,6 +34,17 @@ StringMap decodeQuery(const std::string & query); std::string encodeQuery(const StringMap & query); +/** + * Parse a Nix URL into a ParsedURL. + * + * Nix URI is mostly compliant with RFC3986, but with some deviations: + * - Literal spaces are allowed and don't have to be percent encoded. + * This is mostly done for backward compatibility. + * + * @note IPv6 ZoneId literals (RFC4007) are represented in URIs according to RFC6874. + * + * @throws BadURL + */ ParsedURL parseURL(const std::string & url); /** diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 7f31d0f1c..2f9c7736a 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -12,40 +12,70 @@ std::regex refRegex(refRegexS, std::regex::ECMAScript); std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript); std::regex revRegex(revRegexS, std::regex::ECMAScript); -ParsedURL parseURL(const std::string & url) +/** + * Drop trailing shevron for output installable syntax. + * + * FIXME: parseURL shouldn't really be used for parsing the OutputSpec, but it does + * get used. That code should actually use ExtendedOutputsSpec::parseOpt. + */ +static std::string_view dropShevronSuffix(std::string_view url) { - static std::regex uriRegex( - "((" + schemeNameRegex + "):" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex - + ")))" + "(?:\\?(" + queryRegex + "))?" + "(?:#(" + fragmentRegex + "))?", - std::regex::ECMAScript); + auto shevron = url.rfind("^"); + if (shevron == std::string_view::npos) + return url; + return url.substr(0, shevron); +} - std::smatch match; +/** + * Percent encode spaces in the url. + */ +static std::string percentEncodeSpaces(std::string_view url) +{ + return replaceStrings(std::string(url), " ", percentEncode(" ")); +} - if (std::regex_match(url, match, uriRegex)) { - std::string scheme = match[2]; - auto authority = match[3].matched ? std::optional(match[3]) : std::nullopt; - std::string path = match[4].matched ? match[4] : match[5]; - auto & query = match[6]; - auto & fragment = match[7]; +ParsedURL parseURL(const std::string & url) +try { + /* Drop the shevron suffix used for the flakerefs. Shevron character is reserved and + shouldn't appear in normal URIs. */ + auto unparsedView = dropShevronSuffix(url); + /* For back-compat literal spaces are allowed. */ + auto withFixedSpaces = percentEncodeSpaces(unparsedView); + auto urlView = boost::urls::url_view(withFixedSpaces); - auto transportIsFile = parseUrlScheme(scheme).transport == "file"; + if (!urlView.has_scheme()) + throw BadURL("'%s' doesn't have a scheme", url); - if (authority && *authority != "" && transportIsFile) - throw BadURL("file:// URL '%s' has unexpected authority '%s'", url, *authority); + auto scheme = urlView.scheme(); + auto authority = [&]() -> std::optional { + if (urlView.has_authority()) + return percentDecode(urlView.authority().buffer()); + return std::nullopt; + }(); - if (transportIsFile && path.empty()) - path = "/"; + auto transportIsFile = parseUrlScheme(scheme).transport == "file"; + if (authority && *authority != "" && transportIsFile) + throw BadURL("file:// URL '%s' has unexpected authority '%s'", url, *authority); - return ParsedURL{ - .scheme = scheme, - .authority = authority, - .path = percentDecode(path), - .query = decodeQuery(query), - .fragment = percentDecode(std::string(fragment))}; - } + auto path = urlView.path(); /* Does pct-decoding */ + auto fragment = urlView.fragment(); /* Does pct-decoding */ - else - throw BadURL("'%s' is not a valid URL", url); + if (transportIsFile && path.empty()) + path = "/"; + + /* Get the raw query. Store URI supports smuggling doubly nested queries, where + the inner &/? are pct-encoded. */ + auto query = std::string_view(urlView.encoded_query()); + + return ParsedURL{ + .scheme = scheme, + .authority = authority, + .path = path, + .query = decodeQuery(std::string(query)), + .fragment = fragment, + }; +} catch (boost::system::system_error & e) { + throw BadURL("'%s' is not a valid URL: %s", url, e.code().message()); } std::string percentDecode(std::string_view in) @@ -64,20 +94,25 @@ std::string percentEncode(std::string_view s, std::string_view keep) } StringMap decodeQuery(const std::string & query) -{ +try { + /* For back-compat literal spaces are allowed. */ + auto withFixedSpaces = percentEncodeSpaces(query); + StringMap result; - for (const auto & s : tokenizeString(query, "&")) { - auto e = s.find('='); - if (e == std::string::npos) { - warn("dubious URI query '%s' is missing equal sign '%s', ignoring", s, "="); + auto encodedQuery = boost::urls::params_encoded_view(withFixedSpaces); + for (auto && [key, value, value_specified] : encodedQuery) { + if (!value_specified) { + warn("dubious URI query '%s' is missing equal sign '%s', ignoring", std::string_view(key), "="); continue; } - result.emplace(s.substr(0, e), percentDecode(std::string_view(s).substr(e + 1))); + result.emplace(key.decode(), value.decode()); } return result; +} catch (boost::system::system_error & e) { + throw BadURL("invalid URI query '%s': %s", query, e.code().message()); } const static std::string allowedInQuery = ":@/?"; @@ -150,6 +185,7 @@ std::string fixGitURL(const std::string & url) // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 bool isValidSchemeName(std::string_view s) { + const static std::string schemeNameRegex = "(?:[a-z][a-z0-9+.-]*)"; static std::regex regex(schemeNameRegex, std::regex::ECMAScript); return std::regex_match(s.begin(), s.end(), regex, std::regex_constants::match_default); From a54284cbc706474d091284d8e7470b803a8ff001 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 18 Jul 2025 21:21:12 +0300 Subject: [PATCH 057/382] rl-next: Add release note about IPv6 Scoped Addresses in URIs --- doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md diff --git a/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md b/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md new file mode 100644 index 000000000..d5bc4736f --- /dev/null +++ b/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md @@ -0,0 +1,6 @@ +--- +synopsis: "Represent IPv6 RFC4007 ZoneId literals in conformance with RFC6874" +prs: [13445] +--- + +Prior versions of Nix since [#4646](https://github.com/NixOS/nix/pull/4646) accepted [IPv6 scoped addresses](https://datatracker.ietf.org/doc/html/rfc4007) in URIs like [store references](@docroot@/store/types/index.md#store-url-format) in the textual representation with a literal percent character: `[fe80::1%18]`. This was ambiguous, because the the percent literal `%` is reserved by [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986), since it's used to indicate percent encoding. Nix now requires that the percent `%` symbol is percent-encoded as `%25`. This implements [RFC6874](https://datatracker.ietf.org/doc/html/rfc6874), which defines the representation of zone identifiers in URIs. The example from above now has to be specified as `[fe80::1%2518]`. From 0468c1d69abff4a484e07efb81c5f6c5bb43bdcf Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 24 Apr 2025 01:55:22 +0200 Subject: [PATCH 058/382] flake.nix: Create package sets independent of overlay This was carefully refactored by inlining some Nixpkgs definitions like `generateSplicesForMkScope`, so the memoization properties should remain the same. --- flake.nix | 192 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 173 insertions(+), 19 deletions(-) diff --git a/flake.nix b/flake.nix index 69bd2a21a..81ce57c68 100644 --- a/flake.nix +++ b/flake.nix @@ -131,31 +131,107 @@ } ); - overlayFor = - getStdenv: final: prev: + /** + Produce the `nixComponents` and `nixDependencies` package sets (scopes) for + a given `pkgs` and `getStdenv`. + */ + packageSetsFor = let - stdenv = getStdenv final; + /** + Removes a prefix from the attribute names of a set of splices. + This is a completely uninteresting and exists for compatibility only. + + Example: + ```nix + renameSplicesFrom "pkgs" { pkgsBuildBuild = ...; ... } + => { buildBuild = ...; ... } + ``` + */ + renameSplicesFrom = prefix: x: { + buildBuild = x."${prefix}BuildBuild"; + buildHost = x."${prefix}BuildHost"; + buildTarget = x."${prefix}BuildTarget"; + hostHost = x."${prefix}HostHost"; + hostTarget = x."${prefix}HostTarget"; + targetTarget = x."${prefix}TargetTarget"; + }; + + /** + Adds a prefix to the attribute names of a set of splices. + This is a completely uninteresting and exists for compatibility only. + + Example: + ```nix + renameSplicesTo "self" { buildBuild = ...; ... } + => { selfBuildBuild = ...; ... } + ``` + */ + renameSplicesTo = prefix: x: { + "${prefix}BuildBuild" = x.buildBuild; + "${prefix}BuildHost" = x.buildHost; + "${prefix}BuildTarget" = x.buildTarget; + "${prefix}HostHost" = x.hostHost; + "${prefix}HostTarget" = x.hostTarget; + "${prefix}TargetTarget" = x.targetTarget; + }; + + /** + Takes a function `f` and returns a function that applies `f` pointwise to each splice. + + Example: + ```nix + mapSplices (x: x * 10) { buildBuild = 1; buildHost = 2; ... } + => { buildBuild = 10; buildHost = 20; ... } + ``` + */ + mapSplices = + f: + { + buildBuild, + buildHost, + buildTarget, + hostHost, + hostTarget, + targetTarget, + }: + { + buildBuild = f buildBuild; + buildHost = f buildHost; + buildTarget = f buildTarget; + hostHost = f hostHost; + hostTarget = f hostTarget; + targetTarget = f targetTarget; + }; + in - { - nixStable = prev.nix; + args@{ + pkgs, + getStdenv ? pkgs: pkgs.stdenv, + }: + let + nixComponentsSplices = mapSplices ( + pkgs': (packageSetsFor (args // { pkgs = pkgs'; })).nixComponents + ) (renameSplicesFrom "pkgs" pkgs); + nixDependenciesSplices = mapSplices ( + pkgs': (packageSetsFor (args // { pkgs = pkgs'; })).nixDependencies + ) (renameSplicesFrom "pkgs" pkgs); # A new scope, so that we can use `callPackage` to inject our own interdependencies # without "polluting" the top level "`pkgs`" attrset. # This also has the benefit of providing us with a distinct set of packages # we can iterate over. - # The `2` suffix is here because otherwise it interferes with `nixVersions.latest`, which is used in daemon compat tests. - nixComponents2 = + nixComponents = lib.makeScopeWithSplicing' { - inherit (final) splicePackages; - inherit (final.nixDependencies2) newScope; + inherit (pkgs) splicePackages; + inherit (nixDependencies) newScope; } { - otherSplices = final.generateSplicesForMkScope "nixComponents2"; + otherSplices = renameSplicesTo "self" nixComponentsSplices; f = import ./packaging/components.nix { - inherit (final) lib; + inherit (pkgs) lib; inherit officialRelease; - pkgs = final; + inherit pkgs; src = self; maintainers = [ ]; }; @@ -163,21 +239,51 @@ # The dependencies are in their own scope, so that they don't have to be # in Nixpkgs top level `pkgs` or `nixComponents2`. - # The `2` suffix is here because otherwise it interferes with `nixVersions.latest`, which is used in daemon compat tests. - nixDependencies2 = + nixDependencies = lib.makeScopeWithSplicing' { - inherit (final) splicePackages; - inherit (final) newScope; # layered directly on pkgs, unlike nixComponents2 above + inherit (pkgs) splicePackages; + inherit (pkgs) newScope; # layered directly on pkgs, unlike nixComponents2 above } { - otherSplices = final.generateSplicesForMkScope "nixDependencies2"; + otherSplices = renameSplicesTo "self" nixDependenciesSplices; f = import ./packaging/dependencies.nix { - inherit inputs stdenv; - pkgs = final; + inherit inputs pkgs; + stdenv = getStdenv pkgs; }; }; + # If the package set is largely empty, we should(?) return empty sets + # This is what most package sets in Nixpkgs do. Otherwise, we get + # an error message that indicates that some stdenv attribute is missing, + # and indeed it will be missing, as seemingly `pkgsTargetTarget` is + # very incomplete. + fixup = lib.mapAttrs (k: v: if !(pkgs ? nix) then { } else v); + in + fixup { + inherit nixDependencies; + inherit nixComponents; + }; + + overlayFor = + getStdenv: final: prev: + let + packageSets = packageSetsFor { + inherit getStdenv; + pkgs = final; + }; + in + { + nixStable = prev.nix; + + # The `2` suffix is here because otherwise it interferes with `nixVersions.latest`, which is used in daemon compat tests. + nixComponents2 = packageSets.nixComponents; + + # The dependencies are in their own scope, so that they don't have to be + # in Nixpkgs top level `pkgs` or `nixComponents2`. + # The `2` suffix is here because otherwise it interferes with `nixVersions.latest`, which is used in daemon compat tests. + nixDependencies2 = packageSets.nixDependencies; + nix = final.nixComponents2.nix-cli; }; @@ -465,5 +571,53 @@ default = self.devShells.${system}.native; } ); + + lib = { + /** + Creates a package set for a given Nixpkgs instance and stdenv. + + # Inputs + + - `pkgs`: The Nixpkgs instance to use. + + - `getStdenv`: _Optional_ A function that takes a package set and returns the stdenv to use. + This needs to be a function in order to support cross compilation - the `pkgs` passed to `getStdenv` can be `pkgsBuildHost` or any other variation needed. + + # Outputs + + The return value is a fresh Nixpkgs scope containing all the packages that are defined in the Nix repository, + as well as some internals and parameters, which may be subject to change. + + # Example + + ```console + nix repl> :lf NixOS/nix + nix-repl> ps = lib.makeComponents { pkgs = import inputs.nixpkgs { crossSystem = "riscv64-linux"; }; } + nix-repl> ps + { + appendPatches = «lambda appendPatches @ ...»; + callPackage = «lambda callPackageWith @ ...»; + overrideAllMesonComponents = «lambda overrideSource @ ...»; + overrideSource = «lambda overrideSource @ ...»; + # ... + nix-everything + # ... + nix-store + nix-store-c + # ... + } + ``` + */ + makeComponents = + { + pkgs, + getStdenv ? pkgs: pkgs.stdenv, + }: + + let + packageSets = packageSetsFor { inherit getStdenv pkgs; }; + in + packageSets.nixComponents; + }; }; } From e70dac82448bdd2817fab4c3b904a68f93086251 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 24 Apr 2025 02:15:19 +0200 Subject: [PATCH 059/382] doc: Add brief comments to the components Users can access these through the `lib.makeComponents` return value, so it's helpful to briefly explain some of them. This doesn't replace `meta.description`, but supplements it. (TODO: improve `meta.description`) --- packaging/components.nix | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packaging/components.nix b/packaging/components.nix index b40bd45b0..661857833 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -365,18 +365,33 @@ in nix-cmd = callPackage ../src/libcmd/package.nix { }; + /** + The Nix command line interface. Note that this does not include its tests, whereas `nix-everything` does. + */ nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; }; nix-functional-tests = callPackage ../tests/functional/package.nix { version = fineVersion; }; + /** + The manual as would be published on https://nix.dev/reference/nix-manual + */ nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; }; + /** + Doxygen pages for C++ code + */ nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; }; + /** + Doxygen pages for the public C API + */ nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; }; nix-perl-bindings = callPackage ../src/perl/package.nix { }; + /** + Combined package that has the CLI, libraries, and (assuming non-cross, no overrides) it requires that all tests succeed. + */ nix-everything = callPackage ../packaging/everything.nix { } // { # Note: no `passthru.overrideAllMesonComponents` etc # This would propagate into `nix.overrideAttrs f`, but then discard From 9b9e49e8e475e8fdef7866306e43451a0be26438 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 24 Apr 2025 02:16:02 +0200 Subject: [PATCH 060/382] overlays.default: Only set `nix` We should keep users' pkgs tidy. --- flake.nix | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index 81ce57c68..2b93429e9 100644 --- a/flake.nix +++ b/flake.nix @@ -289,9 +289,21 @@ in { - # A Nixpkgs overlay that overrides the 'nix' and - # 'nix-perl-bindings' packages. - overlays.default = overlayFor (p: p.stdenv); + overlays.internal = overlayFor (p: p.stdenv); + + /** + A Nixpkgs overlay that sets `nix` to something like `packages..nix-everything`, + except dependencies aren't taken from (flake) `nix.inputs.nixpkgs`, but from the Nixpkgs packages + where the overlay is used. + */ + overlays.default = + final: prev: + let + packageSets = packageSetsFor { pkgs = final; }; + in + { + nix = packageSets.nixComponents.nix-everything; + }; hydraJobs = import ./packaging/hydra.nix { inherit From 9c58c8f739799725c6d43e1b94731f1cce5a900b Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 18:23:13 +0300 Subject: [PATCH 061/382] ci: Add build profiling job This adds a GHA jobs to help analyze build times and its regressions. It is based on `clangStdenv` with `-ftime-trace` together with `ClangBuildAnalyzer` to prepare markdown summary for individual components. This also has the minor benefit of dogfooding CA and impure derivations. --- .github/workflows/ci.yml | 23 +++++++ ci/gha/profile-build/default.nix | 101 +++++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+) create mode 100644 ci/gha/profile-build/default.nix diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index da6f35907..6f5167834 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -221,3 +221,26 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} - uses: DeterminateSystems/magic-nix-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh + + profile_build: + needs: tests + runs-on: ubuntu-24.04 + timeout-minutes: 60 + if: >- + github.event_name == 'push' && + github.ref_name == 'master' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: ./.github/actions/install-nix-action + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + dogfood: true + extra_nix_config: | + experimental-features = flakes nix-command ca-derivations impure-derivations + max-jobs = 1 + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: | + nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md + cat build-time-report.md >> $GITHUB_STEP_SUMMARY diff --git a/ci/gha/profile-build/default.nix b/ci/gha/profile-build/default.nix new file mode 100644 index 000000000..2b7261b71 --- /dev/null +++ b/ci/gha/profile-build/default.nix @@ -0,0 +1,101 @@ +{ + nixFlake ? builtins.getFlake ("git+file://" + toString ../../..), + system ? builtins.currentSystem, + pkgs ? nixFlake.inputs.nixpkgs.legacyPackages.${system}, +}: + +let + inherit (pkgs) lib; + + nixComponentsInstrumented = + (nixFlake.lib.makeComponents { + inherit pkgs; + getStdenv = p: p.clangStdenv; + }).overrideScope + ( + _: _: { + mesonComponentOverrides = finalAttrs: prevAttrs: { + outputs = (prevAttrs.outputs or [ "out" ]) ++ [ "buildprofile" ]; + nativeBuildInputs = [ pkgs.clangbuildanalyzer ] ++ prevAttrs.nativeBuildInputs or [ ]; + __impure = true; + + env = { + CFLAGS = "-ftime-trace"; + CXXFLAGS = "-ftime-trace"; + }; + + preBuild = '' + ClangBuildAnalyzer --start $PWD + ''; + + postBuild = '' + ClangBuildAnalyzer --stop $PWD $buildprofile + ''; + }; + } + ); + + componentsToProfile = { + "nix-util" = { }; + "nix-util-c" = { }; + "nix-util-test-support" = { }; + "nix-util-tests" = { }; + "nix-store" = { }; + "nix-store-c" = { }; + "nix-store-test-support" = { }; + "nix-store-tests" = { }; + "nix-fetchers" = { }; + "nix-fetchers-c" = { }; + "nix-fetchers-tests" = { }; + "nix-expr" = { }; + "nix-expr-c" = { }; + "nix-expr-test-support" = { }; + "nix-expr-tests" = { }; + "nix-flake" = { }; + "nix-flake-c" = { }; + "nix-flake-tests" = { }; + "nix-main" = { }; + "nix-main-c" = { }; + "nix-cmd" = { }; + "nix-cli" = { }; + }; + + componentDerivationsToProfile = builtins.intersectAttrs componentsToProfile nixComponentsInstrumented; + componentBuildProfiles = lib.mapAttrs ( + n: v: lib.getOutput "buildprofile" v + ) componentDerivationsToProfile; + + buildTimeReport = + pkgs.runCommand "build-time-report" + { + __impure = true; + __structuredAttrs = true; + nativeBuildInputs = [ pkgs.clangbuildanalyzer ]; + inherit componentBuildProfiles; + } + '' + { + echo "# Build time performance profile for components:" + echo + echo "This reports the build profile collected via \`-ftime-trace\` for each component." + echo + } >> $out + + for name in "''\${!componentBuildProfiles[@]}"; do + { + echo "
$name" + echo + echo '````' + ClangBuildAnalyzer --analyze "''\${componentBuildProfiles[$name]}" + echo '````' + echo + echo "
" + } >> $out + done + ''; +in + +{ + inherit buildTimeReport; + inherit componentDerivationsToProfile; +} From e920e287347481f8bcfee3d126d0193678b2c619 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 21:16:20 +0300 Subject: [PATCH 062/382] libutil: Speed up builds by using precompiled headers --- src/libutil/meson.build | 1 + src/libutil/pch/precompiled-headers.hh | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 src/libutil/pch/precompiled-headers.hh diff --git a/src/libutil/meson.build b/src/libutil/meson.build index f48c8f3d7..ced9c424d 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -191,6 +191,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, + cpp_pch : 'pch/precompiled-headers.hh' ) install_headers(headers, subdir : 'nix/util', preserve_path : true) diff --git a/src/libutil/pch/precompiled-headers.hh b/src/libutil/pch/precompiled-headers.hh new file mode 100644 index 000000000..75e905386 --- /dev/null +++ b/src/libutil/pch/precompiled-headers.hh @@ -0,0 +1,7 @@ +#include "nix/util/util.hh" +#include "nix/util/file-system.hh" +#include "nix/util/serialise.hh" +#include "nix/util/signals.hh" +#include "nix/util/source-accessor.hh" + +#include From 33d031095fa5ab4dfcbd9e4627f23b999119863b Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 21:24:46 +0300 Subject: [PATCH 063/382] libstore: Speed up builds by using precompiled headers --- src/libstore/meson.build | 1 + src/libstore/pch/precompiled-headers.hh | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 src/libstore/pch/precompiled-headers.hh diff --git a/src/libstore/meson.build b/src/libstore/meson.build index d82bcddc1..375959643 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -348,6 +348,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, + cpp_pch : ['pch/precompiled-headers.hh'] ) install_headers(headers, subdir : 'nix/store', preserve_path : true) diff --git a/src/libstore/pch/precompiled-headers.hh b/src/libstore/pch/precompiled-headers.hh new file mode 100644 index 000000000..ea0dd5d70 --- /dev/null +++ b/src/libstore/pch/precompiled-headers.hh @@ -0,0 +1,8 @@ +#include "nix/store/store-api.hh" +#include "nix/store/realisation.hh" +#include "nix/store/derived-path.hh" +#include "nix/store/derivations.hh" +#include "nix/store/local-store.hh" +#include "nix/util/util.hh" + +#include From af6c84b5238a7bfd20aeb2195af86c51cb2e73cf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 21:33:15 +0300 Subject: [PATCH 064/382] libexpr: Speed up builds by using precompiled headers --- src/libexpr/meson.build | 1 + src/libexpr/pch/precompiled-headers.hh | 1 + 2 files changed, 2 insertions(+) create mode 100644 src/libexpr/pch/precompiled-headers.hh diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index f5adafae0..fe795a607 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -178,6 +178,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, + cpp_pch : ['pch/precompiled-headers.hh'] ) install_headers(headers, subdir : 'nix/expr', preserve_path : true) diff --git a/src/libexpr/pch/precompiled-headers.hh b/src/libexpr/pch/precompiled-headers.hh new file mode 100644 index 000000000..1c9417b37 --- /dev/null +++ b/src/libexpr/pch/precompiled-headers.hh @@ -0,0 +1 @@ +#include "nix/expr/eval.hh" From 46469983e7e3b395907d1a66787c66c989417f24 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 21:39:47 +0300 Subject: [PATCH 065/382] libcmd: Speed up builds by using precompiled headers --- src/libcmd/meson.build | 1 + src/libcmd/pch/precompiled-headers.hh | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 src/libcmd/pch/precompiled-headers.hh diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 216d4df9c..5c3dd91ee 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -92,6 +92,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, + cpp_pch : ['pch/precompiled-headers.hh'] ) install_headers(headers, subdir : 'nix/cmd', preserve_path : true) diff --git a/src/libcmd/pch/precompiled-headers.hh b/src/libcmd/pch/precompiled-headers.hh new file mode 100644 index 000000000..6f9947e9b --- /dev/null +++ b/src/libcmd/pch/precompiled-headers.hh @@ -0,0 +1,4 @@ +#include "nix/cmd/installables.hh" +#include "nix/expr/eval.hh" +#include "nix/util/util.hh" +#include "nix/flake/flake.hh" From 9dae9f6cab6f545fc3d155c745ac80798b560702 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 20 Jul 2025 21:45:39 +0300 Subject: [PATCH 066/382] nix-cli: Speed up builds by using precompiled headers --- src/nix/meson.build | 1 + src/nix/pch/precompiled-headers.hh | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 src/nix/pch/precompiled-headers.hh diff --git a/src/nix/meson.build b/src/nix/meson.build index 0ba8bdd46..586ee15c3 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -186,6 +186,7 @@ this_exe = executable( include_directories : include_dirs, link_args: linker_export_flags, install : true, + cpp_pch : ['pch/precompiled-headers.hh'] ) meson.override_find_program('nix', this_exe) diff --git a/src/nix/pch/precompiled-headers.hh b/src/nix/pch/precompiled-headers.hh new file mode 100644 index 000000000..5b320c861 --- /dev/null +++ b/src/nix/pch/precompiled-headers.hh @@ -0,0 +1,3 @@ +#include "nix/cmd/command.hh" +#include "nix/expr/eval.hh" +#include "nix/main/shared.hh" From 6db61900028ec641f12b1d36fe4ece5a9bdaa66f Mon Sep 17 00:00:00 2001 From: OPNA2608 Date: Mon, 21 Jul 2025 19:03:49 +0200 Subject: [PATCH 067/382] treewide: Fix Meson CPU names for powerpc CPUs --- nix-meson-build-support/default-system-cpu/meson.build | 9 +++++++++ src/libstore/meson.build | 4 +++- tests/functional/meson.build | 4 +++- tests/functional/nix-meson-build-support | 1 + tests/functional/package.nix | 1 + 5 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 nix-meson-build-support/default-system-cpu/meson.build create mode 120000 tests/functional/nix-meson-build-support diff --git a/nix-meson-build-support/default-system-cpu/meson.build b/nix-meson-build-support/default-system-cpu/meson.build new file mode 100644 index 000000000..fd447aa01 --- /dev/null +++ b/nix-meson-build-support/default-system-cpu/meson.build @@ -0,0 +1,9 @@ +nix_system_cpu = { + 'ppc64' : 'powerpc64', + 'ppc64le' : 'powerpc64le', + 'ppc' : 'powerpc', + 'ppcle' : 'powerpcle', +}.get( + host_machine.cpu_family(), + host_machine.cpu_family(), +) diff --git a/src/libstore/meson.build b/src/libstore/meson.build index d82bcddc1..e9e78b750 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -21,10 +21,12 @@ configdata_priv = configuration_data() # TODO rename, because it will conflict with downstream projects configdata_priv.set_quoted('PACKAGE_VERSION', meson.project_version()) +subdir('nix-meson-build-support/default-system-cpu') + # Used in public header. configdata_pub.set_quoted( 'NIX_LOCAL_SYSTEM', - host_machine.cpu_family() + '-' + host_machine.system(), + nix_system_cpu + '-' + host_machine.system(), description : 'This is the system name Nix expects for local running instance of Nix.\n\n' + 'See the "system" setting for additional details', diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 501ed45c7..b03507c91 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -23,6 +23,8 @@ dot = find_program('dot', native : true, required : false) nix_bin_dir = fs.parent(nix.full_path()) +subdir('nix-meson-build-support/default-system-cpu') + test_confdata = { 'bindir': nix_bin_dir, 'coreutils': fs.parent(coreutils.full_path()), @@ -30,7 +32,7 @@ test_confdata = { 'bash': bash.full_path(), 'sandbox_shell': busybox.found() ? busybox.full_path() : '', 'PACKAGE_VERSION': meson.project_version(), - 'system': host_machine.cpu_family() + '-' + host_machine.system(), + 'system': nix_system_cpu + '-' + host_machine.system(), } # Just configures `common/vars-and-functions.sh.in`. diff --git a/tests/functional/nix-meson-build-support b/tests/functional/nix-meson-build-support new file mode 120000 index 000000000..0b140f56b --- /dev/null +++ b/tests/functional/nix-meson-build-support @@ -0,0 +1 @@ +../../nix-meson-build-support \ No newline at end of file diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 43f2f25a2..716e21fe4 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -39,6 +39,7 @@ mkMesonDerivation ( workDir = ./.; fileset = fileset.unions [ + ../../nix-meson-build-support ../../scripts/nix-profile.sh.in ../../.version ../../tests/functional From 9bc6c30d97639a649ff04698d9a51a5e40ad6b80 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 22 Jul 2025 02:39:06 +0300 Subject: [PATCH 068/382] meson: Further optimize compile times with PCH template instantiations This is a follow-up to 6ec50ba73664838993d645dc78c936542eb2012c, which also almost halves the compile times on clang for subprojects that use PCH. `-fpch-instantiate-templates` is a clang-only option to force the instantiation of templates once in the PCH itself, not all of the translation units that it gets included to. This really cuts down on the overhead from nlohmann::json and std::format code: 48244 ms: nlohmann::basic_json<>::parse (76 times, avg 634 ms) 36193 ms: nlohmann::basic_json<>::basic_json (310 times, avg 116 ms) 28307 ms: nlohmann::detail::parser, nlohmann::detail::i... (76 times, avg 372 ms) 20334 ms: nlohmann::detail::parser, nlohmann::detail::i... (76 times, avg 267 ms) 17387 ms: nlohmann::basic_json<>::json_value::json_value (389 times, avg 44 ms) 16822 ms: std::vformat_to> (76 times, avg 221 ms) 16771 ms: std::__format::__do_vformat_to, char... (76 times, avg 220 ms) 12160 ms: std::vformat_to> (76 times, avg 160 ms) 12127 ms: std::__format::__do_vformat_to, w... (76 times, avg 159 ms) 10397 ms: nlohmann::detail::json_sax_dom_callback_parser::data::data (76 times, avg 119 ms) Initially done by Jade Lovelace in https://gerrit.lix.systems/c/lix/+/1842. We are doing basically the same, but unconditionally. It would be a huge pain to add a pch option for all subprojects to just support the usecase of using clangd in a gcc devshell. In total, this basically halves the frontend times for nix-util and nix-store to the point that the most expensive part of the build is linking. (nix-store before): ``` **** Time summary: Compilation (77 times): Parsing (frontend): 243.4 s Codegen & opts (backend): 140.3 s ``` (nix-store after): ``` **** Time summary: Compilation (77 times): Parsing (frontend): 120.2 s Codegen & opts (backend): 141.2 s ``` --- nix-meson-build-support/common/meson.build | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index b9140256a..df8b958c0 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -18,3 +18,14 @@ add_project_arguments( '-Wno-deprecated-declarations', language : 'cpp', ) + +# This is a clang-only option for improving build times. +# It forces the instantiation of templates in the PCH itself and +# not every translation unit it's included in. +# It's available starting from clang 11, which is old enough to not +# bother checking the version. +# This feature helps in particular with the expensive nlohmann::json template +# instantiations in libutil and libstore. +if cxx.get_id() == 'clang' + add_project_arguments('-fpch-instantiate-templates', language : 'cpp') +endif From 48083028ac075f251442bab34013c92394903aee Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Fri, 11 Jul 2025 15:49:27 -0700 Subject: [PATCH 069/382] Add a test case for failing git cache builtins.fetchGit is not using the cached Git directory if packed-references are used. This is because the ref file for the fetchGit `refs/heads/master` is used to check the mtime for whether to cache or not. Let's at least codify this failure in a test case. --- tests/functional/git/meson.build | 6 ++ tests/functional/git/packed-refs-no-cache.sh | 70 ++++++++++++++++++++ tests/functional/meson.build | 1 + 3 files changed, 77 insertions(+) create mode 100644 tests/functional/git/meson.build create mode 100644 tests/functional/git/packed-refs-no-cache.sh diff --git a/tests/functional/git/meson.build b/tests/functional/git/meson.build new file mode 100644 index 000000000..af6882698 --- /dev/null +++ b/tests/functional/git/meson.build @@ -0,0 +1,6 @@ +suites += { + 'name' : 'git', + 'deps' : [], + 'tests' : [ 'packed-refs-no-cache.sh' ], + 'workdir' : meson.current_source_dir(), +} diff --git a/tests/functional/git/packed-refs-no-cache.sh b/tests/functional/git/packed-refs-no-cache.sh new file mode 100644 index 000000000..0f39da775 --- /dev/null +++ b/tests/functional/git/packed-refs-no-cache.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +source ../common.sh + +requireGit + +clearStoreIfPossible + +# Intentionally not in a canonical form +# See https://github.com/NixOS/nix/issues/6195 +repo=$TEST_ROOT/./git + +export _NIX_FORCE_HTTP=1 + +rm -rf "$repo" "${repo}-tmp" "$TEST_HOME/.cache/nix" + +git init --initial-branch="master" "$repo" +git -C "$repo" config user.email "nix-tests@example.com" +git -C "$repo" config user.name "Nix Tests" + +echo "hello world" > "$repo/hello_world" +git -C "$repo" add hello_world +git -C "$repo" commit -m 'My first commit.' + +# We now do an eval +nix eval --impure --raw --expr "builtins.fetchGit { url = file://$repo; }" + +# test that our eval even worked by checking for the presence of the file +[[ $(nix eval --impure --raw --expr "builtins.readFile ((builtins.fetchGit { url = file://$repo; }) + \"/hello_world\")") = 'hello world' ]] + +# Validate that refs/heads/master exists +shopt -s nullglob +matches=("$TEST_HOME/.cache/nix/gitv3/*/refs/heads/master") +shopt -u nullglob + +if [[ ${#matches[@]} -eq 0 ]]; then + echo "refs/heads/master does not exist." + exit 1 +fi +# pack refs +git -C "$TEST_HOME"/.cache/nix/gitv3/*/ pack-refs --all + +shopt -s nullglob +matches=("$TEST_HOME"/.cache/nix/gitv3/*/refs/heads/master) +shopt -u nullglob + +# ensure refs/heads/master is now gone +if [[ ${#matches[@]} -ne 0 ]]; then + echo "refs/heads/master still exists after pack-refs" + exit 1 +fi + +# create a new commit +echo "hello again" > "$repo/hello_again" +git -C "$repo" add hello_again +git -C "$repo" commit -m 'Second commit.' + +# re-eval — this should return the path to the cached version +store_path=$(nix eval --tarball-ttl 3600 --impure --raw --expr "(builtins.fetchGit { url = file://$repo; }).outPath") +echo "Fetched store path: $store_path" + +# Validate that the new file is *not* there +# FIXME: This is a broken test case and we should swap the assertion here. +if [[ -e "$store_path/hello_again" ]]; then + echo "ERROR: Cached fetchGit should not include the new commit." + exit 0 +else + echo "PASS: New commit was not fetched due to caching (as expected)." + exit 1 +fi \ No newline at end of file diff --git a/tests/functional/meson.build b/tests/functional/meson.build index b03507c91..0e2004219 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -213,6 +213,7 @@ endif subdir('ca') subdir('dyn-drv') subdir('flakes') +subdir('git') subdir('git-hashing') subdir('local-overlay-store') From 0c32b0c8c32b558f6b2c3782172181ba2f2c3558 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Fri, 11 Jul 2025 15:57:27 -0700 Subject: [PATCH 070/382] Added comment to test case --- tests/functional/git/packed-refs-no-cache.sh | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/functional/git/packed-refs-no-cache.sh b/tests/functional/git/packed-refs-no-cache.sh index 0f39da775..54e0ab901 100644 --- a/tests/functional/git/packed-refs-no-cache.sh +++ b/tests/functional/git/packed-refs-no-cache.sh @@ -1,5 +1,16 @@ #!/usr/bin/env bash +# Please see https://github.com/NixOS/nix/issues/13457 +# for a higher description of the purpose of the test. +# tl;dr;fetchGit will utilize the git cache and avoid refetching when possible. +# It relies on the presence of either the commit when rev is provided +# or checks if the ref refs/heads/ if ref is provided. +# +# Unfortunately, git can occasionally "pack references" which moves the references +# from individual files to a single unifies file. +# When this occurs, nix can no longer check for the presence of the ref to check +# for the mtime and will refetch unnecessarily. + source ../common.sh requireGit @@ -67,4 +78,4 @@ if [[ -e "$store_path/hello_again" ]]; then else echo "PASS: New commit was not fetched due to caching (as expected)." exit 1 -fi \ No newline at end of file +fi From ebd311b7b70731225d94f0e1645fa7b08452765d Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 22 Jul 2025 03:27:27 +0300 Subject: [PATCH 071/382] meson: Correctly handle endianness for PowerPC CPU families I've missed this while reviewing 6db61900028ec641f12b1d36fe4ece5a9bdaa66f. I only built big endian ppc64, so that didn't occur to me. From meson manual: > Those porting from autotools should note that Meson does not add > endianness to the name of the cpu_family. For example, autotools will > call little endian PPC64 "ppc64le", Meson will not, you must also check > the .endian() value of the machine for this information. This code should handle that correctly. --- .../default-system-cpu/meson.build | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/nix-meson-build-support/default-system-cpu/meson.build b/nix-meson-build-support/default-system-cpu/meson.build index fd447aa01..2221265f0 100644 --- a/nix-meson-build-support/default-system-cpu/meson.build +++ b/nix-meson-build-support/default-system-cpu/meson.build @@ -1,9 +1,10 @@ -nix_system_cpu = { - 'ppc64' : 'powerpc64', - 'ppc64le' : 'powerpc64le', - 'ppc' : 'powerpc', - 'ppcle' : 'powerpcle', -}.get( +powerpc_system_cpus = [ 'ppc64', 'ppc' ] + +nix_system_cpu = {'ppc64' : 'powerpc64', 'ppc' : 'powerpc'}.get( host_machine.cpu_family(), host_machine.cpu_family(), ) + +if powerpc_system_cpus.contains(host_machine.cpu_family()) and host_machine.endian() == 'little' + nix_system_cpu += 'le' +endif From 33ceea60998646e8f0b21f473d5cb799d90ca387 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Mon, 21 Jul 2025 21:10:41 -0700 Subject: [PATCH 072/382] Unpeel reference for git+file If the reference for git+file is an annotated tag, the revision will differ than when it's fetched using other fetchers such as `github:` since Github seems to automatiacally peel to the underlying commit. Turns out that rev-parse has the capability through it's syntax to request the underlying commit by "peeling" using the `^{commit}` syntax. This is safe to apply in all scenarios where the goal is to get an underlying commit. fixes #11266 --- src/libfetchers/git-utils.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index f45360f71..a758848b2 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -360,7 +360,13 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this Hash resolveRef(std::string ref) override { Object object; - if (git_revparse_single(Setter(object), *this, ref.c_str())) + + // Using the rev-parse notation which libgit2 supports, make sure we peel + // the ref ultimately down to the underlying commit. + // This is to handle the case where it may be an annotated tag which itself has + // an object_id. + std::string peeledRef = ref + "^{commit}"; + if (git_revparse_single(Setter(object), *this, peeledRef.c_str())) throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message); auto oid = git_object_id(object.get()); return toHash(*oid); From 98858148dc8da11533d5c2bbae51f0bc9d7e6b04 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Mon, 21 Jul 2025 21:56:11 -0700 Subject: [PATCH 073/382] Add unit test --- src/libfetchers-tests/git-utils.cc | 60 +++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/src/libfetchers-tests/git-utils.cc b/src/libfetchers-tests/git-utils.cc index c2c7f9da0..bfba3d679 100644 --- a/src/libfetchers-tests/git-utils.cc +++ b/src/libfetchers-tests/git-utils.cc @@ -3,20 +3,28 @@ #include #include #include +#include #include +#include +#include #include #include "nix/util/fs-sink.hh" #include "nix/util/serialise.hh" #include "nix/fetchers/git-lfs-fetch.hh" +#include +#include + namespace nix { class GitUtilsTest : public ::testing::Test { // We use a single repository for all tests. - std::filesystem::path tmpDir; std::unique_ptr delTmpDir; +protected: + std::filesystem::path tmpDir; + public: void SetUp() override { @@ -115,4 +123,54 @@ TEST_F(GitUtilsTest, sink_hardlink) } }; +TEST_F(GitUtilsTest, peel_reference) +{ + // Create a commit in the repo + git_repository * rawRepo = nullptr; + ASSERT_EQ(git_repository_open(&rawRepo, tmpDir.string().c_str()), 0); + + // Create a blob + git_oid blob_oid; + const char * blob_content = "hello world"; + ASSERT_EQ(git_blob_create_from_buffer(&blob_oid, rawRepo, blob_content, strlen(blob_content)), 0); + + // Create a tree with that blob + git_treebuilder * builder = nullptr; + ASSERT_EQ(git_treebuilder_new(&builder, rawRepo, nullptr), 0); + ASSERT_EQ(git_treebuilder_insert(nullptr, builder, "file.txt", &blob_oid, GIT_FILEMODE_BLOB), 0); + + git_oid tree_oid; + ASSERT_EQ(git_treebuilder_write(&tree_oid, builder), 0); + git_treebuilder_free(builder); + + git_tree * tree = nullptr; + ASSERT_EQ(git_tree_lookup(&tree, rawRepo, &tree_oid), 0); + + // Create a commit + git_signature * sig = nullptr; + ASSERT_EQ(git_signature_now(&sig, "nix", "nix@example.com"), 0); + + git_oid commit_oid; + ASSERT_EQ(git_commit_create_v(&commit_oid, rawRepo, "HEAD", sig, sig, nullptr, "initial commit", tree, 0), 0); + + // Lookup our commit + git_object * commit_object = nullptr; + ASSERT_EQ(git_object_lookup(&commit_object, rawRepo, &commit_oid, GIT_OBJECT_COMMIT), 0); + + // Create annotated tag + git_oid tag_oid; + ASSERT_EQ(git_tag_create(&tag_oid, rawRepo, "v1", commit_object, sig, "annotated tag", 0), 0); + + auto repo = openRepo(); + + // Use resolveRef to get peeled object + auto resolved = repo->resolveRef("refs/tags/v1"); + + // Now assert that we have unpeeled it! + ASSERT_STREQ(resolved.gitRev().c_str(), git_oid_tostr_s(&commit_oid)); + + git_signature_free(sig); + git_repository_free(rawRepo); +} + } // namespace nix From aadfb682d41881e261b929266dc3192794562a3c Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Mon, 21 Jul 2025 22:01:05 -0700 Subject: [PATCH 074/382] Fix fetchGit functional tests to peel as well --- tests/functional/fetchGit.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index dc5d8f818..e7c9c77a5 100755 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -236,10 +236,10 @@ path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$rep # Specifying a ref without a rev shouldn't pick a cached rev for a different ref export _NIX_FORCE_HTTP=1 rev_tag1_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag1\"; }).rev") -rev_tag1=$(git -C $repo rev-parse refs/tags/tag1) +rev_tag1=$(git -C $repo rev-parse refs/tags/tag1^{commit}) [[ $rev_tag1_nix = $rev_tag1 ]] rev_tag2_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag2\"; }).rev") -rev_tag2=$(git -C $repo rev-parse refs/tags/tag2) +rev_tag2=$(git -C $repo rev-parse refs/tags/tag2^{commit}) [[ $rev_tag2_nix = $rev_tag2 ]] unset _NIX_FORCE_HTTP From ff6ab3b213d7b55495a719e8b4b02cd2336b455d Mon Sep 17 00:00:00 2001 From: Jens Petersen Date: Tue, 22 Jul 2025 22:58:55 +0800 Subject: [PATCH 075/382] meson: libfetchers needs libgit2 1.9+ MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit libfetchers uses `git_mempack_write_thin_pack` which was introduced in libgit2-1.9.0 This avoids error like: ../src/libfetchers/git-utils.cc: In member function ‘virtual void nix::GitRepoImpl::flush()’: ../src/libfetchers/git-utils.cc:270:13: error: ‘git_mempack_write_thin_pack’ was not declared in this scope 270 | git_mempack_write_thin_pack(mempack_backend, packBuilder.get()) | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ on older libgit2 (like 1.7.2 in Centos Stream 10) --- src/libfetchers/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 321146ca4..b549735e5 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -27,7 +27,7 @@ subdir('nix-meson-build-support/subprojects') nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') deps_public += nlohmann_json -libgit2 = dependency('libgit2') +libgit2 = dependency('libgit2', version : '>= 1.9') deps_private += libgit2 subdir('nix-meson-build-support/common') From f0c7fbcdab5006db46c73c642bea7c5de396fe3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9gory=20marti?= Date: Tue, 22 Jul 2025 17:39:29 +0200 Subject: [PATCH 076/382] Add /etc/ssl/certs/ca-certificates.crt in docker.nix --- docker.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker.nix b/docker.nix index 410e4a178..defd491f6 100644 --- a/docker.nix +++ b/docker.nix @@ -282,7 +282,10 @@ let # may get replaced by pkgs.dockerTools.caCertificates mkdir -p $out/etc/ssl/certs + # Old NixOS compatibility. ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs + # NixOS canonical location + ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs/ca-certificates.crt cat $passwdContentsPath > $out/etc/passwd echo "" >> $out/etc/passwd From 143b87c1a95e05e681c97f5d8aa010b18104b1d5 Mon Sep 17 00:00:00 2001 From: Dmitry Bogatov Date: Tue, 22 Jul 2025 20:00:00 -0400 Subject: [PATCH 077/382] Fix documentation of the NAR archive structure For regular, non-executable files, there is no str("") between str("regular") and str("contents"). Note that str("") is exactly 8 zero bytes, while just "" is actual empty string (0 bytes). --- doc/manual/source/protocols/nix-archive.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/protocols/nix-archive.md b/doc/manual/source/protocols/nix-archive.md index 640b527f1..02a8dd464 100644 --- a/doc/manual/source/protocols/nix-archive.md +++ b/doc/manual/source/protocols/nix-archive.md @@ -24,7 +24,7 @@ nar-obj-inner | str("type"), str("directory") directory ; -regular = [ str("executable"), str("") ], str("contents"), str(contents); +regular = [ str("executable") ], str("contents"), str(contents); symlink = str("target"), str(target); From bd29dc67b7ca85799ee8d9faaea1e4c656d3f6e2 Mon Sep 17 00:00:00 2001 From: Alan Urmancheev <108410815+alurm@users.noreply.github.com> Date: Wed, 23 Jul 2025 17:32:52 +0400 Subject: [PATCH 078/382] doc/advanced-attributes.md: remove stray backslashes in the rendered output They have appeared literally because the was an empty line after them. --- doc/manual/source/language/advanced-attributes.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/manual/source/language/advanced-attributes.md b/doc/manual/source/language/advanced-attributes.md index 34c3b636b..c9d64f060 100644 --- a/doc/manual/source/language/advanced-attributes.md +++ b/doc/manual/source/language/advanced-attributes.md @@ -160,7 +160,6 @@ See the [corresponding section in the derivation output page](@docroot@/store/de ## Other output modifications - [`unsafeDiscardReferences`]{#adv-attr-unsafeDiscardReferences}\ - When using [structured attributes](#adv-attr-structuredAttrs), the attribute `unsafeDiscardReferences` is an attribute set with a boolean value for each output name. If set to `true`, it disables scanning the output for runtime dependencies. @@ -195,7 +194,6 @@ See the [corresponding section in the derivation output page](@docroot@/store/de [`builder`]: ./derivations.md#attr-builder - [`requiredSystemFeatures`]{#adv-attr-requiredSystemFeatures}\ - If a derivation has the `requiredSystemFeatures` attribute, then Nix will only build it on a machine that has the corresponding features set in its [`system-features` configuration](@docroot@/command-ref/conf-file.md#conf-system-features). For example, setting From c6d0d33e4dc96e262cde5a05cde2540a84bd8ad7 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 19 Jul 2025 14:47:09 +0300 Subject: [PATCH 079/382] ci: Simplify installer tests --- .github/workflows/ci.yml | 10 +++++----- scripts/serve-installer-for-github-actions | 22 ---------------------- 2 files changed, 5 insertions(+), 27 deletions(-) delete mode 100755 scripts/serve-installer-for-github-actions diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f5167834..f1faeb505 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,13 +81,13 @@ jobs: with: name: installer-${{matrix.os}} path: out - - name: Serving installer - id: serving_installer - run: ./scripts/serve-installer-for-github-actions + - name: Looking up the installer tarball URL + id: installer-tarball-url + run: echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT" - uses: cachix/install-nix-action@v31 with: - install_url: 'http://localhost:8126/install' - install_options: "--tarball-url-prefix http://localhost:8126/" + install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }} + install_options: ${{ format('--tarball-url-prefix {0}', steps.installer-tarball-url.outputs.installer-url) }} - run: sudo apt install fish zsh if: matrix.os == 'linux' - run: brew install fish diff --git a/scripts/serve-installer-for-github-actions b/scripts/serve-installer-for-github-actions deleted file mode 100755 index 2efd2aa32..000000000 --- a/scripts/serve-installer-for-github-actions +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail -if [[ ! -d out ]]; then - echo "run prepare-installer-for-github-actions first" - exit 1 -fi -cd out -PORT=${PORT:-8126} -nohup python -m http.server "$PORT" >/dev/null 2>&1 & -pid=$! - -while ! curl -s "http://localhost:$PORT"; do - sleep 1 - if ! kill -0 $pid; then - echo "Failed to start http server" - exit 1 - fi -done - -echo 'To install nix, run the following command:' -echo "sh <(curl http://localhost:$PORT/install) --tarball-url-prefix http://localhost:$PORT" From 54dc5314e85b2803c1d870fde61ec4105a35adee Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 24 Jul 2025 18:06:06 +0200 Subject: [PATCH 080/382] Fix nix_system_cpu on i686-linux Fixes #13532. --- nix-meson-build-support/default-system-cpu/meson.build | 2 +- tests/functional/misc.sh | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/nix-meson-build-support/default-system-cpu/meson.build b/nix-meson-build-support/default-system-cpu/meson.build index 2221265f0..4a0e89e10 100644 --- a/nix-meson-build-support/default-system-cpu/meson.build +++ b/nix-meson-build-support/default-system-cpu/meson.build @@ -1,6 +1,6 @@ powerpc_system_cpus = [ 'ppc64', 'ppc' ] -nix_system_cpu = {'ppc64' : 'powerpc64', 'ppc' : 'powerpc'}.get( +nix_system_cpu = {'ppc64' : 'powerpc64', 'ppc' : 'powerpc', 'x86' : 'i686'}.get( host_machine.cpu_family(), host_machine.cpu_family(), ) diff --git a/tests/functional/misc.sh b/tests/functional/misc.sh index cb4d4139f..b94a5fc57 100755 --- a/tests/functional/misc.sh +++ b/tests/functional/misc.sh @@ -44,3 +44,7 @@ out="$(expectStderr 0 nix-instantiate --option foobar baz --expr '{}')" out="$(expectStderr 0 nix-instantiate '{}' --option foobar baz --expr )" [[ "$(echo "$out" | grep foobar | wc -l)" = 1 ]] + +if [[ $(uname) = Linux && $(uname -m) = i686 ]]; then + [[ $(nix config show system) = i686-linux ]] +fi From f0695e177f22e888fa3d4a2b362efb63cbfdc561 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 25 Jul 2025 00:05:15 +0300 Subject: [PATCH 081/382] ci: Make it actually possible to disable dogfooding Github composite actions are a real treat. Boolean inputs are not actually booleans but rather strings [1]. [1]: https://www.github.com/actions/runner/issues/2238 --- .github/actions/install-nix-action/action.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index 28103f589..bc2c022c7 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -19,7 +19,7 @@ runs: - name: "Download nix install artifact from master" shell: bash id: download-nix-installer - if: ${{ inputs.dogfood }} + if: inputs.dogfood == 'true' run: | RUN_ID=$(gh run list --repo "$DOGFOOD_REPO" --workflow ci.yml --branch master --status success --json databaseId --jq ".[0].databaseId") @@ -45,6 +45,6 @@ runs: - uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1 with: # Ternary operator in GHA: https://www.github.com/actions/runner/issues/409#issuecomment-752775072 - install_url: ${{ inputs.dogfood && format('{0}/install', steps.download-nix-installer.outputs.installer-path) || inputs.install_url }} - install_options: ${{ inputs.dogfood && format('--tarball-url-prefix {0}', steps.download-nix-installer.outputs.installer-path) || '' }} + install_url: ${{ inputs.dogfood == 'true' && format('{0}/install', steps.download-nix-installer.outputs.installer-path) || inputs.install_url }} + install_options: ${{ inputs.dogfood == 'true' && format('--tarball-url-prefix {0}', steps.download-nix-installer.outputs.installer-path) || '' }} extra_nix_config: ${{ inputs.extra_nix_config }} From 35835e0644f3300d0af1129a20676eb12171b2ce Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 25 Jul 2025 10:04:49 +0200 Subject: [PATCH 082/382] Fix documentation link The file was renamed. We've also moved to nix.dev, but that was redirected properly. Closes #13488 --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f0d64d44a..293b05953 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1812,7 +1812,7 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See -https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", +https://nix.dev/manual/nix/stable/language/syntax.html#functions.)", symbols[i.name]) .atPos(i.pos) .withFrame(*fun.lambda().env, *fun.lambda().fun) From b7782809cbe2b2308d4cf54f22a59390c00305ea Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 25 Jul 2025 10:09:22 +0200 Subject: [PATCH 083/382] docs, messages: Resolve nixos.org/manual/nix/stable redirect Loads a bit quicker --- doc/manual/source/release-notes/rl-2.8.md | 2 +- scripts/install-nix-from-tarball.sh | 6 +++--- src/libstore-c/nix_api_store.h | 4 ++-- src/nix/nix.md | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/manual/source/release-notes/rl-2.8.md b/doc/manual/source/release-notes/rl-2.8.md index 9778e8c3a..686445208 100644 --- a/doc/manual/source/release-notes/rl-2.8.md +++ b/doc/manual/source/release-notes/rl-2.8.md @@ -48,6 +48,6 @@ * `nix run` is now stricter in what it accepts: members of the `apps` flake output are now required to be apps (as defined in [the - manual](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-run.html#apps)), + manual](https://nix.dev/manual/nix/stable/command-ref/new-cli/nix3-run.html#apps)), and members of `packages` or `legacyPackages` must be derivations (not apps). diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index ec3264793..38fe3af74 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -40,7 +40,7 @@ fi # Determine if we could use the multi-user installer or not if [ "$(uname -s)" = "Linux" ]; then - echo "Note: a multi-user installation is possible. See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2 + echo "Note: a multi-user installation is possible. See https://nix.dev/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2 fi case "$(uname -s)" in @@ -96,7 +96,7 @@ while [ $# -gt 0 ]; do echo " providing multi-user support and better isolation for local builds." echo " Both for security and reproducibility, this method is recommended if" echo " supported on your platform." - echo " See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation" + echo " See https://nix.dev/manual/nix/stable/installation/installing-binary.html#multi-user-installation" echo "" echo " --no-daemon: Simple, single-user installation that does not require root and is" echo " trivial to uninstall." @@ -144,7 +144,7 @@ if ! [ -e "$dest" ]; then fi if ! [ -w "$dest" ]; then - echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nixos.org/manual/nix/stable/installation/multi-user.html. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2 + echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nix.dev/manual/nix/stable/installation/multi-user.html. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2 exit 1 fi diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index e55bc3f59..ad3d7b22a 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -57,14 +57,14 @@ nix_err nix_libstore_init_no_load_config(nix_c_context * context); * ignores `NIX_REMOTE` and the `store` option. For this reason, `NULL` is most likely the better choice. * * For supported store URLs, see [*Store URL format* in the Nix Reference - * Manual](https://nixos.org/manual/nix/stable/store/types/#store-url-format). + * Manual](https://nix.dev/manual/nix/stable/store/types/#store-url-format). * @endparblock * * @param[in] params @parblock * optional, null-terminated array of key-value pairs, e.g. {{"endpoint", * "https://s3.local"}}. * - * See [*Store Types* in the Nix Reference Manual](https://nixos.org/manual/nix/stable/store/types). + * See [*Store Types* in the Nix Reference Manual](https://nix.dev/manual/nix/stable/store/types). * @endparblock * * @return a Store pointer, NULL in case of errors diff --git a/src/nix/nix.md b/src/nix/nix.md index b88bd9a94..10a2aaee8 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -44,7 +44,7 @@ R""( Nix is a tool for building software, configurations and other artifacts in a reproducible and declarative way. For more information, see the [Nix homepage](https://nixos.org/) or the [Nix -manual](https://nixos.org/manual/nix/stable/). +manual](https://nix.dev/manual/nix/stable/). # Installables From 49f411c08c7a7c07f5a48291368ab553bbdb0fca Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 25 Jul 2025 10:15:13 +0200 Subject: [PATCH 084/382] Update developer facing links to nix.dev Not broken, but it's a tiny bit quicker --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/ISSUE_TEMPLATE/feature_request.md | 2 +- .github/ISSUE_TEMPLATE/installer.md | 2 +- .github/ISSUE_TEMPLATE/missing_documentation.md | 2 +- CONTRIBUTING.md | 2 +- src/libcmd/include/nix/cmd/common-eval-args.hh | 2 +- src/libflake/include/nix/flake/flakeref.hh | 6 +++--- src/libutil/include/nix/util/args.hh | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index a5005f8a0..af94c3e9e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -45,7 +45,7 @@ assignees: '' - [ ] checked [latest Nix manual] \([source]) - [ ] checked [open bug issues and pull requests] for possible duplicates -[latest Nix manual]: https://nixos.org/manual/nix/unstable/ +[latest Nix manual]: https://nix.dev/manual/nix/development/ [source]: https://github.com/NixOS/nix/tree/master/doc/manual/source [open bug issues and pull requests]: https://github.com/NixOS/nix/labels/bug diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index c75a46951..fe9f9dd20 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -30,7 +30,7 @@ assignees: '' - [ ] checked [latest Nix manual] \([source]) - [ ] checked [open feature issues and pull requests] for possible duplicates -[latest Nix manual]: https://nixos.org/manual/nix/unstable/ +[latest Nix manual]: https://nix.dev/manual/nix/development/ [source]: https://github.com/NixOS/nix/tree/master/doc/manual/source [open feature issues and pull requests]: https://github.com/NixOS/nix/labels/feature diff --git a/.github/ISSUE_TEMPLATE/installer.md b/.github/ISSUE_TEMPLATE/installer.md index ed5e1ce87..070e0bd9b 100644 --- a/.github/ISSUE_TEMPLATE/installer.md +++ b/.github/ISSUE_TEMPLATE/installer.md @@ -38,7 +38,7 @@ assignees: '' - [ ] checked [latest Nix manual] \([source]) - [ ] checked [open installer issues and pull requests] for possible duplicates -[latest Nix manual]: https://nixos.org/manual/nix/unstable/ +[latest Nix manual]: https://nix.dev/manual/nix/development/ [source]: https://github.com/NixOS/nix/tree/master/doc/manual/source [open installer issues and pull requests]: https://github.com/NixOS/nix/labels/installer diff --git a/.github/ISSUE_TEMPLATE/missing_documentation.md b/.github/ISSUE_TEMPLATE/missing_documentation.md index 6c334b722..4e05b626d 100644 --- a/.github/ISSUE_TEMPLATE/missing_documentation.md +++ b/.github/ISSUE_TEMPLATE/missing_documentation.md @@ -22,7 +22,7 @@ assignees: '' - [ ] checked [latest Nix manual] \([source]) - [ ] checked [open documentation issues and pull requests] for possible duplicates -[latest Nix manual]: https://nixos.org/manual/nix/unstable/ +[latest Nix manual]: https://nix.dev/manual/nix/development/ [source]: https://github.com/NixOS/nix/tree/master/doc/manual/source [open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ad8678962..7231730bb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -89,7 +89,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). ## Making changes to the Nix manual -The Nix reference manual is hosted on https://nixos.org/manual/nix. +The Nix reference manual is hosted on https://nix.dev/manual/nix. The underlying source files are located in [`doc/manual/source`](./doc/manual/source). For small changes you can [use GitHub to edit these files](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files) For larger changes see the [Nix reference manual](https://nix.dev/manual/nix/development/development/contributing.html). diff --git a/src/libcmd/include/nix/cmd/common-eval-args.hh b/src/libcmd/include/nix/cmd/common-eval-args.hh index 2a0499477..62518ba0e 100644 --- a/src/libcmd/include/nix/cmd/common-eval-args.hh +++ b/src/libcmd/include/nix/cmd/common-eval-args.hh @@ -82,7 +82,7 @@ private: }; /** - * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + * @param baseDir Optional [base directory](https://nix.dev/manual/nix/development/glossary#gloss-base-directory) */ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); diff --git a/src/libflake/include/nix/flake/flakeref.hh b/src/libflake/include/nix/flake/flakeref.hh index 12d337230..c8c536bce 100644 --- a/src/libflake/include/nix/flake/flakeref.hh +++ b/src/libflake/include/nix/flake/flakeref.hh @@ -81,7 +81,7 @@ struct FlakeRef std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef); /** - * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + * @param baseDir Optional [base directory](https://nix.dev/manual/nix/development/glossary.html#gloss-base-directory) */ FlakeRef parseFlakeRef( const fetchers::Settings & fetchSettings, @@ -92,7 +92,7 @@ FlakeRef parseFlakeRef( bool preserveRelativePaths = false); /** - * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + * @param baseDir Optional [base directory](https://nix.dev/manual/nix/development/glossary.html#gloss-base-directory) */ std::pair parseFlakeRefWithFragment( const fetchers::Settings & fetchSettings, @@ -103,7 +103,7 @@ std::pair parseFlakeRefWithFragment( bool preserveRelativePaths = false); /** - * @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) + * @param baseDir Optional [base directory](https://nix.dev/manual/nix/development/glossary.html#gloss-base-directory) */ std::tuple parseFlakeRefWithFragmentAndExtendedOutputsSpec( const fetchers::Settings & fetchSettings, diff --git a/src/libutil/include/nix/util/args.hh b/src/libutil/include/nix/util/args.hh index 5e64ae1d9..443db445f 100644 --- a/src/libutil/include/nix/util/args.hh +++ b/src/libutil/include/nix/util/args.hh @@ -51,8 +51,8 @@ public: } /** - * @brief Get the [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) for the - * command. + * @brief Get the [base directory](https://nix.dev/manual/nix/development/glossary.html#gloss-base-directory) for + * the command. * * @return Generally the working directory, but in case of a shebang * interpreter, returns the directory of the script. From d21e3f88ecd9a5dfd557ee34e13858e065fd5465 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 24 Jul 2025 14:44:05 -0400 Subject: [PATCH 085/382] Implement support for Git hashing with SHA-256 SHA-256 is Git's next hash algorithm. The world is still basically stuck on SHA-1 with git, but shouldn't be. We can at least do our part to get ready. On the C++ implementation side, only a little bit of generalization was needed, and that was fairly straight-forward. The tests (unit and system) were actually bigger, and care was taken to make sure they were all cover both algorithms equally. --- maintainers/flake-module.nix | 3 - src/libstore/store-api.cc | 7 +- src/libutil-tests/data/git/check-data.sh | 44 +++--- .../data/git/{tree.bin => tree-sha1.bin} | Bin .../data/git/{tree.txt => tree-sha1.txt} | 0 src/libutil-tests/data/git/tree-sha256.bin | Bin 0 -> 181 bytes src/libutil-tests/data/git/tree-sha256.txt | 4 + src/libutil-tests/git.cc | 148 +++++++++++++----- src/libutil/git.cc | 23 ++- src/libutil/hash.cc | 17 -- src/libutil/include/nix/util/git.hh | 11 +- src/libutil/include/nix/util/hash.hh | 25 ++- tests/functional/fixed.nix | 7 +- tests/functional/git-hashing/common.sh | 2 + tests/functional/git-hashing/fixed.sh | 6 +- tests/functional/git-hashing/meson.build | 3 +- tests/functional/git-hashing/simple-common.sh | 96 ++++++++++++ tests/functional/git-hashing/simple-sha1.sh | 27 ++++ tests/functional/git-hashing/simple-sha256.sh | 29 ++++ tests/functional/git-hashing/simple.sh | 79 ---------- 20 files changed, 350 insertions(+), 181 deletions(-) mode change 100644 => 100755 src/libutil-tests/data/git/check-data.sh rename src/libutil-tests/data/git/{tree.bin => tree-sha1.bin} (100%) rename src/libutil-tests/data/git/{tree.txt => tree-sha1.txt} (100%) create mode 100644 src/libutil-tests/data/git/tree-sha256.bin create mode 100644 src/libutil-tests/data/git/tree-sha256.txt create mode 100644 tests/functional/git-hashing/simple-common.sh create mode 100755 tests/functional/git-hashing/simple-sha1.sh create mode 100755 tests/functional/git-hashing/simple-sha256.sh delete mode 100755 tests/functional/git-hashing/simple.sh diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index ee9a8bdad..b6686f134 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -262,8 +262,6 @@ ''^tests/functional/gc-concurrent\.sh$'' ''^tests/functional/gc-concurrent2\.builder\.sh$'' ''^tests/functional/gc-non-blocking\.sh$'' - ''^tests/functional/git-hashing/common\.sh$'' - ''^tests/functional/git-hashing/simple\.sh$'' ''^tests/functional/hash-convert\.sh$'' ''^tests/functional/impure-derivations\.sh$'' ''^tests/functional/impure-eval\.sh$'' @@ -339,7 +337,6 @@ ''^tests/functional/user-envs\.builder\.sh$'' ''^tests/functional/user-envs\.sh$'' ''^tests/functional/why-depends\.sh$'' - ''^src/libutil-tests/data/git/check-data\.sh$'' ]; }; }; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b80d82b99..1465d9b42 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -107,8 +107,11 @@ static std::string makeType(const MixStoreDirMethods & store, std::string && typ StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const { - if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1) - throw Error("Git file ingestion must use SHA-1 hash"); + if (info.method == FileIngestionMethod::Git + && !(info.hash.algo == HashAlgorithm::SHA1 || info.hash.algo == HashAlgorithm::SHA256)) { + throw Error( + "Git file ingestion must use SHA-1 or SHA-256 hash, but instead using: %s", printHashAlgo(info.hash.algo)); + } if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) { return makeStorePath(makeType(*this, "source", info.references), info.hash, name); diff --git a/src/libutil-tests/data/git/check-data.sh b/src/libutil-tests/data/git/check-data.sh old mode 100644 new mode 100755 index b3f59c4f1..d3be5c41a --- a/src/libutil-tests/data/git/check-data.sh +++ b/src/libutil-tests/data/git/check-data.sh @@ -2,30 +2,34 @@ set -eu -o pipefail -export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/git-hashing/check-data -mkdir -p $TEST_ROOT +TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/git-hashing/check-data +export TEST_ROOT +mkdir -p "$TEST_ROOT" -repo="$TEST_ROOT/scratch" -git init "$repo" +for hash in sha1 sha256; do + repo="$TEST_ROOT/scratch-$hash" + git init "$repo" --object-format="$hash" -git -C "$repo" config user.email "you@example.com" -git -C "$repo" config user.name "Your Name" + git -C "$repo" config user.email "you@example.com" + git -C "$repo" config user.name "Your Name" -# `-w` to write for tree test -freshlyAddedHash=$(git -C "$repo" hash-object -w -t blob --stdin < "./hello-world.bin") -encodingHash=$(sha1sum -b < "./hello-world-blob.bin" | head -c 40) + # `-w` to write for tree test + freshlyAddedHash=$(git -C "$repo" hash-object -w -t blob --stdin < "./hello-world.bin") + encodingHash=$("${hash}sum" -b < "./hello-world-blob.bin" | sed 's/ .*//') -# If the hashes match, then `hello-world-blob.bin` must be the encoding -# of `hello-world.bin`. -[[ "$encodingHash" == "$freshlyAddedHash" ]] + # If the hashes match, then `hello-world-blob.bin` must be the encoding + # of `hello-world.bin`. + [[ "$encodingHash" == "$freshlyAddedHash" ]] -# Create empty directory object for tree test -echo -n | git -C "$repo" hash-object -w -t tree --stdin + # Create empty directory object for tree test + echo -n | git -C "$repo" hash-object -w -t tree --stdin -# Relies on both child hashes already existing in the git store -freshlyAddedHash=$(git -C "$repo" mktree < "./tree.txt") -encodingHash=$(sha1sum -b < "./tree.bin" | head -c 40) + # Relies on both child hashes already existing in the git store + tree=tree-${hash} + freshlyAddedHash=$(git -C "$repo" mktree < "${tree}.txt") + encodingHash=$("${hash}sum" -b < "${tree}.bin" | sed 's/ .*//') -# If the hashes match, then `tree.bin` must be the encoding of the -# directory denoted by `tree.txt` interpreted as git directory listing. -[[ "$encodingHash" == "$freshlyAddedHash" ]] + # If the hashes match, then `tree.bin` must be the encoding of the + # directory denoted by `tree.txt` interpreted as git directory listing. + [[ "$encodingHash" == "$freshlyAddedHash" ]] +done diff --git a/src/libutil-tests/data/git/tree.bin b/src/libutil-tests/data/git/tree-sha1.bin similarity index 100% rename from src/libutil-tests/data/git/tree.bin rename to src/libutil-tests/data/git/tree-sha1.bin diff --git a/src/libutil-tests/data/git/tree.txt b/src/libutil-tests/data/git/tree-sha1.txt similarity index 100% rename from src/libutil-tests/data/git/tree.txt rename to src/libutil-tests/data/git/tree-sha1.txt diff --git a/src/libutil-tests/data/git/tree-sha256.bin b/src/libutil-tests/data/git/tree-sha256.bin new file mode 100644 index 0000000000000000000000000000000000000000..87380f4126e6eb45aa817cda8e5dbb3ec2479a42 GIT binary patch literal 181 zcmXRZN=;QTG&f=}G%zqTF;Q^K&u2K7@O5p)g04dzMej?lceD4iS3eSZzR0c9N-B10 z?e7yC4Gj#;O-&V&9E%7lH8C(SFi=QJjAF?9INMPvCOGrsL6$4Ay{+$8ZV2CMEwW|j XPRqL`_s#?>8XAF>7M7Mq5HuA4hrv-b literal 0 HcmV?d00001 diff --git a/src/libutil-tests/data/git/tree-sha256.txt b/src/libutil-tests/data/git/tree-sha256.txt new file mode 100644 index 000000000..93568a063 --- /dev/null +++ b/src/libutil-tests/data/git/tree-sha256.txt @@ -0,0 +1,4 @@ +100644 blob ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1 Foo +100755 blob ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1 bAr +040000 tree 6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321 baZ +120000 blob ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1 quuX diff --git a/src/libutil-tests/git.cc b/src/libutil-tests/git.cc index 389f8583d..d9926b9b6 100644 --- a/src/libutil-tests/git.cc +++ b/src/libutil-tests/git.cc @@ -97,7 +97,7 @@ TEST_F(GitTest, blob_write) * so that we can check our test data in a small shell script test test * (`src/libutil-tests/data/git/check-data.sh`). */ -const static Tree tree = { +const static Tree treeSha1 = { { "Foo", { @@ -133,9 +133,48 @@ const static Tree tree = { }, }; -TEST_F(GitTest, tree_read) +/** + * Same conceptual object as `treeSha1`, just different hash algorithm. + * See that one for details. + */ +const static Tree treeSha256 = { + { + "Foo", + { + .mode = Mode::Regular, + .hash = Hash::parseAny( + "ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1", HashAlgorithm::SHA256), + }, + }, + { + "bAr", + { + .mode = Mode::Executable, + .hash = Hash::parseAny( + "ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1", HashAlgorithm::SHA256), + }, + }, + { + "baZ/", + { + .mode = Mode::Directory, + .hash = Hash::parseAny( + "6ef19b41225c5369f1c104d45d8d85efa9b057b53b14b4b9b939dd74decc5321", HashAlgorithm::SHA256), + }, + }, + { + "quuX", + { + .mode = Mode::Symlink, + .hash = Hash::parseAny( + "ce60f5ad78a08ac24872ef74d78b078f077be212e7a246893a1a5d957dfbc8b1", HashAlgorithm::SHA256), + }, + }, +}; + +static auto mkTreeReadTest(HashAlgorithm hashAlgo, Tree tree, const ExperimentalFeatureSettings & mockXpSettings) { - readTest("tree.bin", [&](const auto & encoded) { + return [hashAlgo, tree, mockXpSettings](const auto & encoded) { StringSource in{encoded}; NullFileSystemObjectSink out; Tree got; @@ -144,6 +183,7 @@ TEST_F(GitTest, tree_read) out, CanonPath::root, in, + hashAlgo, [&](auto & name, auto entry) { auto name2 = std::string{name.rel()}; if (entry.mode == Mode::Directory) @@ -153,14 +193,33 @@ TEST_F(GitTest, tree_read) mockXpSettings); ASSERT_EQ(got, tree); + }; +} + +TEST_F(GitTest, tree_sha1_read) +{ + readTest("tree-sha1.bin", mkTreeReadTest(HashAlgorithm::SHA1, treeSha1, mockXpSettings)); +} + +TEST_F(GitTest, tree_sha256_read) +{ + readTest("tree-sha256.bin", mkTreeReadTest(HashAlgorithm::SHA256, treeSha256, mockXpSettings)); +} + +TEST_F(GitTest, tree_sha1_write) +{ + writeTest("tree-sha1.bin", [&]() { + StringSink s; + dumpTree(treeSha1, s, mockXpSettings); + return s.s; }); } -TEST_F(GitTest, tree_write) +TEST_F(GitTest, tree_sha256_write) { - writeTest("tree.bin", [&]() { + writeTest("tree-sha256.bin", [&]() { StringSink s; - dumpTree(tree, s, mockXpSettings); + dumpTree(treeSha256, s, mockXpSettings); return s.s; }); } @@ -202,51 +261,54 @@ TEST_F(GitTest, both_roundrip) }, }; - std::map cas; + for (const auto hashAlgo : {HashAlgorithm::SHA1, HashAlgorithm::SHA256}) { + std::map cas; - std::function dumpHook; - dumpHook = [&](const SourcePath & path) { - StringSink s; - HashSink hashSink{HashAlgorithm::SHA1}; - TeeSink s2{s, hashSink}; - auto mode = dump(path, s2, dumpHook, defaultPathFilter, mockXpSettings); - auto hash = hashSink.finish().first; - cas.insert_or_assign(hash, std::move(s.s)); - return TreeEntry{ - .mode = mode, - .hash = hash, + std::function dumpHook; + dumpHook = [&](const SourcePath & path) { + StringSink s; + HashSink hashSink{hashAlgo}; + TeeSink s2{s, hashSink}; + auto mode = dump(path, s2, dumpHook, defaultPathFilter, mockXpSettings); + auto hash = hashSink.finish().first; + cas.insert_or_assign(hash, std::move(s.s)); + return TreeEntry{ + .mode = mode, + .hash = hash, + }; }; - }; - auto root = dumpHook({files}); + auto root = dumpHook({files}); - auto files2 = make_ref(); + auto files2 = make_ref(); - MemorySink sinkFiles2{*files2}; + MemorySink sinkFiles2{*files2}; - std::function mkSinkHook; - mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) { - StringSource in{cas[hash]}; - parse( - sinkFiles2, - prefix, - in, - blobMode, - [&](const CanonPath & name, const auto & entry) { - mkSinkHook( - prefix / name, - entry.hash, - // N.B. this cast would not be acceptable in real - // code, because it would make an assert reachable, - // but it should harmless in this test. - static_cast(entry.mode)); - }, - mockXpSettings); - }; + std::function mkSinkHook; + mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) { + StringSource in{cas[hash]}; + parse( + sinkFiles2, + prefix, + in, + blobMode, + hashAlgo, + [&](const CanonPath & name, const auto & entry) { + mkSinkHook( + prefix / name, + entry.hash, + // N.B. this cast would not be acceptable in real + // code, because it would make an assert reachable, + // but it should harmless in this test. + static_cast(entry.mode)); + }, + mockXpSettings); + }; - mkSinkHook(CanonPath::root, root.hash, BlobMode::Regular); + mkSinkHook(CanonPath::root, root.hash, BlobMode::Regular); - ASSERT_EQ(files->root, files2->root); + EXPECT_EQ(files->root, files2->root); + } } TEST(GitLsRemote, parseSymrefLineWithReference) diff --git a/src/libutil/git.cc b/src/libutil/git.cc index e87d5550b..bee354da4 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -59,7 +59,7 @@ void parseBlob( { xpSettings.require(Xp::GitHashing); - unsigned long long size = std::stoi(getStringUntil(source, 0)); + const unsigned long long size = std::stoi(getStringUntil(source, 0)); auto doRegularFile = [&](bool executable) { sink.createRegularFile(sinkPath, [&](auto & crf) { @@ -114,10 +114,11 @@ void parseTree( FileSystemObjectSink & sink, const CanonPath & sinkPath, Source & source, + HashAlgorithm hashAlgo, std::function hook, const ExperimentalFeatureSettings & xpSettings) { - unsigned long long size = std::stoi(getStringUntil(source, 0)); + const unsigned long long size = std::stoi(getStringUntil(source, 0)); unsigned long long left = size; sink.createDirectory(sinkPath); @@ -137,10 +138,15 @@ void parseTree( left -= name.size(); left -= 1; - std::string hashs = getString(source, 20); - left -= 20; + const auto hashSize = regularHashSize(hashAlgo); + std::string hashs = getString(source, hashSize); + left -= hashSize; - Hash hash(HashAlgorithm::SHA1); + if (!(hashAlgo == HashAlgorithm::SHA1 || hashAlgo == HashAlgorithm::SHA256)) { + throw Error("Unsupported hash algorithm for git trees: %s", printHashAlgo(hashAlgo)); + } + + Hash hash(hashAlgo); std::copy(hashs.begin(), hashs.end(), hash.hash); hook( @@ -171,6 +177,7 @@ void parse( const CanonPath & sinkPath, Source & source, BlobMode rootModeIfBlob, + HashAlgorithm hashAlgo, std::function hook, const ExperimentalFeatureSettings & xpSettings) { @@ -183,7 +190,7 @@ void parse( parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings); break; case ObjectType::Tree: - parseTree(sink, sinkPath, source, hook, xpSettings); + parseTree(sink, sinkPath, source, hashAlgo, hook, xpSettings); break; default: assert(false); @@ -210,9 +217,9 @@ std::optional convertMode(SourceAccessor::Type type) } } -void restore(FileSystemObjectSink & sink, Source & source, std::function hook) +void restore(FileSystemObjectSink & sink, Source & source, HashAlgorithm hashAlgo, std::function hook) { - parse(sink, CanonPath::root, source, BlobMode::Regular, [&](CanonPath name, TreeEntry entry) { + parse(sink, CanonPath::root, source, BlobMode::Regular, hashAlgo, [&](CanonPath name, TreeEntry entry) { auto [accessor, from] = hook(entry.hash); auto stat = accessor->lstat(from); auto gotOpt = convertMode(stat.type); diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 8ee725d2d..38ef5dd90 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -20,23 +20,6 @@ namespace nix { -static size_t regularHashSize(HashAlgorithm type) -{ - switch (type) { - case HashAlgorithm::BLAKE3: - return blake3HashSize; - case HashAlgorithm::MD5: - return md5HashSize; - case HashAlgorithm::SHA1: - return sha1HashSize; - case HashAlgorithm::SHA256: - return sha256HashSize; - case HashAlgorithm::SHA512: - return sha512HashSize; - } - unreachable(); -} - const StringSet hashAlgorithms = {"blake3", "md5", "sha1", "sha256", "sha512"}; const StringSet hashFormats = {"base64", "nix32", "base16", "sri"}; diff --git a/src/libutil/include/nix/util/git.hh b/src/libutil/include/nix/util/git.hh index 97008c53a..5140c76c4 100644 --- a/src/libutil/include/nix/util/git.hh +++ b/src/libutil/include/nix/util/git.hh @@ -94,10 +94,14 @@ void parseBlob( BlobMode blobMode, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); +/** + * @param hashAlgo must be `HashAlgo::SHA1` or `HashAlgo::SHA256` for now. + */ void parseTree( FileSystemObjectSink & sink, const CanonPath & sinkPath, Source & source, + HashAlgorithm hashAlgo, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -107,12 +111,15 @@ void parseTree( * @param rootModeIfBlob How to interpret a root blob, for which there is no * disambiguating dir entry to answer that questino. If the root it not * a blob, this is ignored. + * + * @param hashAlgo must be `HashAlgo::SHA1` or `HashAlgo::SHA256` for now. */ void parse( FileSystemObjectSink & sink, const CanonPath & sinkPath, Source & source, BlobMode rootModeIfBlob, + HashAlgorithm hashAlgo, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -131,8 +138,10 @@ using RestoreHook = SourcePath(Hash); /** * Wrapper around `parse` and `RestoreSink` + * + * @param hashAlgo must be `HashAlgo::SHA1` or `HashAlgo::SHA256` for now. */ -void restore(FileSystemObjectSink & sink, Source & source, std::function hook); +void restore(FileSystemObjectSink & sink, Source & source, HashAlgorithm hashAlgo, std::function hook); /** * Dumps a single file to a sink diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index 4237d7660..daacd7adf 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -12,11 +12,26 @@ MakeError(BadHash, Error); enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512, BLAKE3 }; -const int blake3HashSize = 32; -const int md5HashSize = 16; -const int sha1HashSize = 20; -const int sha256HashSize = 32; -const int sha512HashSize = 64; +/** + * @return the size of a hash for the given algorithm + */ +constexpr inline size_t regularHashSize(HashAlgorithm type) +{ + switch (type) { + case HashAlgorithm::BLAKE3: + return 32; + case HashAlgorithm::MD5: + return 16; + case HashAlgorithm::SHA1: + return 20; + case HashAlgorithm::SHA256: + return 32; + case HashAlgorithm::SHA512: + return 64; + default: + assert(false); + } +} extern const StringSet hashAlgorithms; diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index eab3ee707..1a743ee39 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -74,5 +74,10 @@ rec { nar-not-recursive = f2 "foo" ./fixed.builder2.sh "nar" "md5" "3670af73070fa14077ad74e0f5ea4e42"; # Experimental feature - git = f2 "foo" ./fixed.builder2.sh "git" "sha1" "cd44baf36915d5dec8374232ea7e2057f3b4494e"; + + git-sha1 = f2 "foo" ./fixed.builder2.sh "git" "sha1" "cd44baf36915d5dec8374232ea7e2057f3b4494e"; + + git-sha256 = + f2 "foo" ./fixed.builder2.sh "git" "sha256" + "3c957653f90c34c0a8badf343b61393936cddf4a2ca93f64b21f02303ddedcc2"; } diff --git a/tests/functional/git-hashing/common.sh b/tests/functional/git-hashing/common.sh index 29c518fea..2d41c0ff8 100644 --- a/tests/functional/git-hashing/common.sh +++ b/tests/functional/git-hashing/common.sh @@ -1,3 +1,5 @@ +# shellcheck shell=bash + source ../common.sh TODO_NixOS # Need to enable git hashing feature and make sure test is ok for store we don't clear diff --git a/tests/functional/git-hashing/fixed.sh b/tests/functional/git-hashing/fixed.sh index f33d95cfa..080bc6485 100755 --- a/tests/functional/git-hashing/fixed.sh +++ b/tests/functional/git-hashing/fixed.sh @@ -5,4 +5,8 @@ source common.sh # Store layer needs bugfix requireDaemonNewerThan "2.27pre20250122" -nix-build ../fixed.nix -A git --no-out-link +nix-build ../fixed.nix -A git-sha1 --no-out-link + +if isDaemonNewer "2.31pre20250724"; then + nix-build ../fixed.nix -A git-sha256 --no-out-link +fi diff --git a/tests/functional/git-hashing/meson.build b/tests/functional/git-hashing/meson.build index d6a782cdc..342c2799c 100644 --- a/tests/functional/git-hashing/meson.build +++ b/tests/functional/git-hashing/meson.build @@ -2,7 +2,8 @@ suites += { 'name': 'git-hashing', 'deps': [], 'tests': [ - 'simple.sh', + 'simple-sha1.sh', + 'simple-sha256.sh', 'fixed.sh', ], 'workdir': meson.current_source_dir(), diff --git a/tests/functional/git-hashing/simple-common.sh b/tests/functional/git-hashing/simple-common.sh new file mode 100644 index 000000000..08b5c0e71 --- /dev/null +++ b/tests/functional/git-hashing/simple-common.sh @@ -0,0 +1,96 @@ +# shellcheck shell=bash + +source common.sh + +# Assert is set +[[ ${hashAlgo+x} ]] + +repo="$TEST_ROOT/scratch" + +initRepo () { + git init "$repo" --object-format="$hashAlgo" + + git -C "$repo" config user.email "you@example.com" + git -C "$repo" config user.name "Your Name" +} + +# Compare Nix's and git's implementation of git hashing +try () { + local expected="$1" + + local hash + hash=$(nix hash path --mode git --format base16 --algo "$hashAlgo" "$TEST_ROOT/hash-path") + [[ "$hash" == "$expected" ]] + + git -C "$repo" rm -rf hash-path || true + cp -r "$TEST_ROOT/hash-path" "$repo/hash-path" + git -C "$repo" add hash-path + git -C "$repo" commit -m "x" + git -C "$repo" status + local hash2 + hash2=$(git -C "$repo" rev-parse HEAD:hash-path) + [[ "$hash2" = "$expected" ]] +} + +# Check Nix added object has matching git hash +try2 () { + local hashPath="$1" + local expected="$2" + + local path + path=$(nix store add --mode git --hash-algo "$hashAlgo" "$repo/$hashPath") + + git -C "$repo" add "$hashPath" + git -C "$repo" commit -m "x" + git -C "$repo" status + local hashFromGit + hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath") + [[ "$hashFromGit" == "$expected" ]] + + local caFromNix + caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca") + [[ "fixed:git:$hashAlgo:$(nix hash convert --to nix32 "$hashAlgo:$hashFromGit")" = "$caFromNix" ]] +} + +test0 () { + rm -rf "$TEST_ROOT/hash-path" + echo "Hello World" > "$TEST_ROOT/hash-path" +} + +test1 () { + rm -rf "$TEST_ROOT/hash-path" + mkdir "$TEST_ROOT/hash-path" + echo "Hello World" > "$TEST_ROOT/hash-path/hello" + echo "Run Hello World" > "$TEST_ROOT/hash-path/executable" + chmod +x "$TEST_ROOT/hash-path/executable" +} + +test2 () { + rm -rf "$repo/dummy1" + echo Hello World! > "$repo/dummy1" +} + +test3 () { + rm -rf "$repo/dummy2" + mkdir -p "$repo/dummy2" + echo Hello World! > "$repo/dummy2/hello" +} + +test4 () { + rm -rf "$repo/dummy3" + mkdir -p "$repo/dummy3" + mkdir -p "$repo/dummy3/dir" + touch "$repo/dummy3/dir/file" + echo Hello World! > "$repo/dummy3/dir/file" + touch "$repo/dummy3/dir/executable" + chmod +x "$repo/dummy3/dir/executable" + echo Run Hello World! > "$repo/dummy3/dir/executable" +} + +test5 () { + rm -rf "$repo/dummy4" + mkdir -p "$repo/dummy4" + mkdir -p "$repo/dummy4/dir" + touch "$repo/dummy4/dir/file" + ln -s './hello/world.txt' "$repo/dummy4/dir/symlink" +} diff --git a/tests/functional/git-hashing/simple-sha1.sh b/tests/functional/git-hashing/simple-sha1.sh new file mode 100755 index 000000000..a883ea848 --- /dev/null +++ b/tests/functional/git-hashing/simple-sha1.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +hashAlgo=sha1 + +source simple-common.sh + +initRepo + +# blob +test0 +try "557db03de997c86a4a028e1ebd3a1ceb225be238" + +# tree with children +test1 +try "e5c0a11a556801a5c9dcf330ca9d7e2c572697f4" + +test2 +try2 dummy1 "980a0d5f19a64b4b30a87d4206aade58726b60e3" + +test3 +try2 dummy2 "8b8e43b937854f4083ea56777821abda2799e850" + +test4 +try2 dummy3 "f227adfaf60d2778aabbf93df6dd061272d2dc85" + +test5 +try2 dummy4 "06f3e789820fc488d602358f03e3a1cbf993bf33" diff --git a/tests/functional/git-hashing/simple-sha256.sh b/tests/functional/git-hashing/simple-sha256.sh new file mode 100755 index 000000000..c7da71e00 --- /dev/null +++ b/tests/functional/git-hashing/simple-sha256.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +hashAlgo=sha256 + +source simple-common.sh + +requireDaemonNewerThan 2.31pre20250724 + +initRepo + +# blob +test0 +try "7c5c8610459154bdde4984be72c48fb5d9c1c4ac793a6b5976fe38fd1b0b1284" + +# tree with children +test1 +try "cd79952f42462467d0ea574b0283bb6eb77e15b2b86891e29f2b981650365474" + +test2 +try2 dummy1 "f5b5cec05fb6f9302b507a48c1573e6f36075e954d97caa8667f784e9cdb0d13" + +test3 +try2 dummy2 "399d851c74ceac2c2b61b53b13dcf5e88df3b6135c7df1f248a323c3c2f9aa78" + +test4 +try2 dummy3 "d3ae8fc87e76b9b871bd06a58c925c5fb5f83b5393f9f58e4f6dba3f59470289" + +test5 +try2 dummy4 "8c090dd057e8e01ffe1fec24a3133dfe52ba4eda822e67ee7fefc2af7c6a2906" diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh deleted file mode 100755 index e02d8b297..000000000 --- a/tests/functional/git-hashing/simple.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env bash - -source common.sh - -repo="$TEST_ROOT/scratch" -git init "$repo" - -git -C "$repo" config user.email "you@example.com" -git -C "$repo" config user.name "Your Name" - -# Compare Nix's and git's implementation of git hashing -try () { - local hash=$(nix hash path --mode git --format base16 --algo sha1 $TEST_ROOT/hash-path) - [[ "$hash" == "$1" ]] - - git -C "$repo" rm -rf hash-path || true - cp -r "$TEST_ROOT/hash-path" "$TEST_ROOT/scratch/hash-path" - git -C "$repo" add hash-path - git -C "$repo" commit -m "x" - git -C "$repo" status - local hash2=$(git -C "$TEST_ROOT/scratch" rev-parse HEAD:hash-path) - [[ "$hash2" = "$1" ]] -} - -# blob -rm -rf $TEST_ROOT/hash-path -echo "Hello World" > $TEST_ROOT/hash-path -try "557db03de997c86a4a028e1ebd3a1ceb225be238" - -# tree with children -rm -rf $TEST_ROOT/hash-path -mkdir $TEST_ROOT/hash-path -echo "Hello World" > $TEST_ROOT/hash-path/hello -echo "Run Hello World" > $TEST_ROOT/hash-path/executable -chmod +x $TEST_ROOT/hash-path/executable -try "e5c0a11a556801a5c9dcf330ca9d7e2c572697f4" - -# Check Nix added object has matching git hash -try2 () { - local hashPath="$1" - local expected="$2" - - local path=$(nix store add --mode git --hash-algo sha1 "$repo/$hashPath") - - git -C "$repo" add "$hashPath" - git -C "$repo" commit -m "x" - git -C "$repo" status - local hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath") - [[ "$hashFromGit" == "$2" ]] - - local caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca") - [[ "fixed:git:sha1:$(nix hash convert --to nix32 "sha1:$hashFromGit")" = "$caFromNix" ]] -} - -rm -rf "$repo/dummy1" -echo Hello World! > "$repo/dummy1" -try2 dummy1 "980a0d5f19a64b4b30a87d4206aade58726b60e3" - -rm -rf "$repo/dummy2" -mkdir -p "$repo/dummy2" -echo Hello World! > "$repo/dummy2/hello" -try2 dummy2 "8b8e43b937854f4083ea56777821abda2799e850" - -rm -rf "$repo/dummy3" -mkdir -p "$repo/dummy3" -mkdir -p "$repo/dummy3/dir" -touch "$repo/dummy3/dir/file" -echo Hello World! > "$repo/dummy3/dir/file" -touch "$repo/dummy3/dir/executable" -chmod +x "$repo/dummy3/dir/executable" -echo Run Hello World! > "$repo/dummy3/dir/executable" -try2 dummy3 "f227adfaf60d2778aabbf93df6dd061272d2dc85" - -rm -rf "$repo/dummy4" -mkdir -p "$repo/dummy4" -mkdir -p "$repo/dummy4/dir" -touch "$repo/dummy4/dir/file" -ln -s './hello/world.txt' "$repo/dummy4/dir/symlink" -try2 dummy4 "06f3e789820fc488d602358f03e3a1cbf993bf33" From 5e407e6abb5738f72b8b5aeeaa003728fe584c2f Mon Sep 17 00:00:00 2001 From: Jason Yundt Date: Sat, 26 Jul 2025 06:49:50 -0400 Subject: [PATCH 086/382] Sometimes build nix-manual when cross compiling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before this change, if you were cross compiling Nix, then the nix-manual subproject would never get built. In some situations, it makes sense to not build the nix-manual subproject when cross compiling. For example, if the build system is x86_64 and the host system is riscv64, then it makes sense to not build the manual. Building the manual requires executing certain build artifacts, and you can’t run x86_64 executables on riscv64 systems. That being said, there are some situations where it does make sense to build the nix-manual subproject when cross compiling. For example, if the build system is x86_64 and the host system is i686, then it doesn’t make sense to not build the manual. You can run i686 executables on x86_64 systems just fine. This change makes it so that the nix-manual subproject will sometimes get built when cross compiling. Specifically, the nix-manual subproject will get built as long as the doc-gen option is enabled and the build system is capable of running host binaries. --- The main motivation behind this change is to fix this Nixpkgs issue [1]. Building pkgs.nixStatic counts as cross compiling Nix, and pkgs.nixStatic is supposed to produce a man output. Building pkgs.nixStatic currently fails because it isn’t actually producing a man output. That issue will go away once this commit gets backported to Nix 2.28.x. [1]: --- meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meson.build b/meson.build index 4a3a517fb..28f7ccbbb 100644 --- a/meson.build +++ b/meson.build @@ -29,7 +29,7 @@ subproject('nix') if get_option('doc-gen') subproject('internal-api-docs') subproject('external-api-docs') - if not meson.is_cross_build() + if meson.can_run_host_binaries() subproject('nix-manual') endif endif From 60d124b36ec57e4fae9987ead09abb8270c784e5 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 27 Jul 2025 00:44:40 +0300 Subject: [PATCH 087/382] meson: Fix nix_system_cpu for MIPS and 32 bit ARM systems Prior patches in 54dc5314e85b2803c1d870fde61ec4105a35adee and 6db61900028ec641f12b1d36fe4ece5a9bdaa66f fixed the default system double for i686 and ppc/ppc64. This also patch also covers 32 bit arm and mips. ARM cpu names are taken from host_machine.cpu() for a lack of a better option, but host_machine.cpu_family() is preferred, since that is supposed to be somewhat standard for cross files. Endianness is handled correctly by looking at host_machine.endian(). This also updates the documentation to be up to date to how system cpu is translated from the host_machine specification. --- doc/manual/source/development/building.md | 20 +++++++++++-------- .../default-system-cpu/meson.build | 13 ++++++++++-- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/doc/manual/source/development/building.md b/doc/manual/source/development/building.md index 167463837..33b7b2d5c 100644 --- a/doc/manual/source/development/building.md +++ b/doc/manual/source/development/building.md @@ -215,14 +215,18 @@ nix build .#nix-everything-x86_64-w64-mingw32 For historic reasons and backward-compatibility, some CPU and OS identifiers are translated as follows: -| `config.guess` | Nix | -|----------------------------|---------------------| -| `amd64` | `x86_64` | -| `i*86` | `i686` | -| `arm6` | `arm6l` | -| `arm7` | `arm7l` | -| `linux-gnu*` | `linux` | -| `linux-musl*` | `linux` | +| `host_machine.cpu_family()` | `host_machine.endian()` | Nix | +|-----------------------------|-------------------------|---------------------| +| `x86` | | `i686` | +| `arm` | | `host_machine.cpu()`| +| `ppc` | `little` | `powerpcle` | +| `ppc64` | `little` | `powerpc64le` | +| `ppc` | `big` | `powerpc` | +| `ppc64` | `big` | `powerpc64` | +| `mips` | `little` | `mipsel` | +| `mips64` | `little` | `mips64el` | +| `mips` | `big` | `mips` | +| `mips64` | `big` | `mips64` | ## Compilation environments diff --git a/nix-meson-build-support/default-system-cpu/meson.build b/nix-meson-build-support/default-system-cpu/meson.build index 4a0e89e10..f63b07975 100644 --- a/nix-meson-build-support/default-system-cpu/meson.build +++ b/nix-meson-build-support/default-system-cpu/meson.build @@ -1,10 +1,19 @@ -powerpc_system_cpus = [ 'ppc64', 'ppc' ] +# This attempts to translate meson cpu_family and cpu_name specified via +# --cross-file [1] into a nix *system double*. Nixpkgs mostly respects ([2]) the +# conventions outlined in [1]. +# +# [1]: https://mesonbuild.com/Reference-tables.html#cpu-families +# [2]: https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/lib/meson.nix nix_system_cpu = {'ppc64' : 'powerpc64', 'ppc' : 'powerpc', 'x86' : 'i686'}.get( host_machine.cpu_family(), host_machine.cpu_family(), ) -if powerpc_system_cpus.contains(host_machine.cpu_family()) and host_machine.endian() == 'little' +if (host_machine.cpu_family() in [ 'ppc64', 'ppc' ]) and host_machine.endian() == 'little' nix_system_cpu += 'le' +elif host_machine.cpu_family() in [ 'mips64', 'mips' ] and host_machine.endian() == 'little' + nix_system_cpu += 'el' +elif host_machine.cpu_family() == 'arm' + nix_system_cpu = host_machine.cpu() endif From 9913ec55bae880d5b5181c80b5d2abbec023cfe3 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 28 Jul 2025 03:26:42 +0300 Subject: [PATCH 088/382] flake: Factor out checks into ci/gha/tests for reusability This moves out the checks that get run in GHA CI into ci/gha/tests folder and splits those into `topLevel` and `componentTests` attributes. The idea behind this is to make it easier to parametrize tests that can be run with sanitizers in order to run those as a matrix of jobs. The same can be said for static builds. Existing stdenv selection infrastructure via `lib.makeComponents` would also allow us to switch over to using `clangStdenv` to significantly speed up pre-merge CI (though the default stdenv would still be used for non-overridable topLevel checks, like installer artifacts). --- ci/gha/tests/default.nix | 89 +++++++++++++++++++++++++++++++++++++ flake.nix | 96 +++++++++------------------------------- 2 files changed, 109 insertions(+), 76 deletions(-) create mode 100644 ci/gha/tests/default.nix diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix new file mode 100644 index 000000000..39d1502df --- /dev/null +++ b/ci/gha/tests/default.nix @@ -0,0 +1,89 @@ +{ + nixFlake ? builtins.getFlake ("git+file://" + toString ../../..), + system ? builtins.currentSystem, + pkgs ? nixFlake.inputs.nixpkgs.legacyPackages.${system}, + getStdenv ? p: p.stdenv, + componentTestsPrefix ? "", + withSanitizers ? false, +}: + +let + inherit (pkgs) lib; + hydraJobs = nixFlake.hydraJobs; + packages' = nixFlake.packages.${system}; +in + +{ + /** + Top-level tests for the flake outputs, as they would be built by hydra. + These tests generally can't be overridden to run with sanitizers. + */ + topLevel = { + installerScriptForGHA = hydraJobs.installerScriptForGHA.${system}; + installTests = hydraJobs.installTests.${system}; + nixpkgsLibTests = hydraJobs.tests.nixpkgsLibTests.${system}; + rl-next = pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' + LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${../../../doc/manual/rl-next} >$out + ''; + repl-completion = pkgs.callPackage ../../../tests/repl-completion.nix { inherit (packages') nix; }; + + /** + Checks for our packaging expressions. + This shouldn't build anything significant; just check that things + (including derivations) are _set up_ correctly. + */ + packaging-overriding = + let + nix = packages'.nix; + in + assert (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src.patches == [ pkgs.emptyFile ]; + if pkgs.stdenv.buildPlatform.isDarwin then + lib.warn "packaging-overriding check currently disabled because of a permissions issue on macOS" pkgs.emptyFile + else + # If this fails, something might be wrong with how we've wired the scope, + # or something could be broken in Nixpkgs. + pkgs.testers.testEqualContents { + assertion = "trivial patch does not change source contents"; + expected = "${../../..}"; + actual = + # Same for all components; nix-util is an arbitrary pick + (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src; + }; + }; + + componentTests = + let + nixComponents = + (nixFlake.lib.makeComponents { + inherit pkgs; + inherit getStdenv; + }).overrideScope + ( + _: _: { + mesonComponentOverrides = finalAttrs: prevAttrs: { + mesonFlags = + (prevAttrs.mesonFlags or [ ]) + ++ lib.optionals withSanitizers [ + # Run all tests with UBSAN enabled. Running both with ubsan and + # without doesn't seem to have much immediate benefit for doubling + # the GHA CI workaround. + # + # TODO: Work toward enabling "address,undefined" if it seems feasible. + # This would maybe require dropping Boost coroutines and ignoring intentional + # memory leaks with detect_leaks=0. + (lib.mesonOption "b_sanitize" "undefined") + ]; + }; + } + ); + in + (lib.concatMapAttrs ( + pkgName: pkg: + lib.concatMapAttrs (testName: test: { + "${componentTestsPrefix}${pkgName}-${testName}" = test; + }) (pkg.tests or { }) + ) nixComponents) + // lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) { + "${componentTestsPrefix}nix-functional-tests" = nixComponents.nix-functional-tests; + }; +} diff --git a/flake.nix b/flake.nix index 2b93429e9..03d77e055 100644 --- a/flake.nix +++ b/flake.nix @@ -320,43 +320,11 @@ checks = forAllSystems ( system: - { - installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; - installTests = self.hydraJobs.installTests.${system}; - nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; - rl-next = - let - pkgs = nixpkgsFor.${system}.native; - in - pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' - LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out - ''; - repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; - - /** - Checks for our packaging expressions. - This shouldn't build anything significant; just check that things - (including derivations) are _set up_ correctly. - */ - packaging-overriding = - let - pkgs = nixpkgsFor.${system}.native; - nix = self.packages.${system}.nix; - in - assert (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src.patches == [ pkgs.emptyFile ]; - if pkgs.stdenv.buildPlatform.isDarwin then - lib.warn "packaging-overriding check currently disabled because of a permissions issue on macOS" pkgs.emptyFile - else - # If this fails, something might be wrong with how we've wired the scope, - # or something could be broken in Nixpkgs. - pkgs.testers.testEqualContents { - assertion = "trivial patch does not change source contents"; - expected = "${./.}"; - actual = - # Same for all components; nix-util is an arbitrary pick - (nix.appendPatches [ pkgs.emptyFile ]).libs.nix-util.src; - }; - } + (import ./ci/gha/tests { + inherit system; + pkgs = nixpkgsFor.${system}.native; + nixFlake = self; + }).topLevel // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { dockerImage = self.hydraJobs.dockerImage.${system}; } @@ -371,28 +339,8 @@ flatMapAttrs ( { - # Run all tests with UBSAN enabled. Running both with ubsan and - # without doesn't seem to have much immediate benefit for doubling - # the GHA CI workaround. - # - # TODO: Work toward enabling "address,undefined" if it seems feasible. - # This would maybe require dropping Boost coroutines and ignoring intentional - # memory leaks with detect_leaks=0. - "" = rec { - nixpkgs = nixpkgsFor.${system}.native; - nixComponents = nixpkgs.nixComponents2.overrideScope ( - nixCompFinal: nixCompPrev: { - mesonComponentOverrides = _finalAttrs: prevAttrs: { - mesonFlags = - (prevAttrs.mesonFlags or [ ]) - # TODO: Macos builds instrumented with ubsan take very long - # to run functional tests. - ++ lib.optionals (!nixpkgs.stdenv.hostPlatform.isDarwin) [ - (lib.mesonOption "b_sanitize" "undefined") - ]; - }; - } - ); + "" = { + pkgs = nixpkgsFor.${system}.native; }; } // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { @@ -400,27 +348,23 @@ # https://github.com/NixOS/nixpkgs/issues/320448 # TODO: disabled to speed up GHA CI. # "static-" = { - # nixpkgs = nixpkgsFor.${system}.native.pkgsStatic; + # pkgs = nixpkgsFor.${system}.native.pkgsStatic; # }; + "sanitized-" = { + pkgs = nixpkgsFor.${system}.native; + withSanitizers = true; + }; } ) ( - nixpkgsPrefix: - { - nixpkgs, - nixComponents ? nixpkgs.nixComponents2, - }: - flatMapAttrs nixComponents ( - pkgName: pkg: - flatMapAttrs pkg.tests or { } ( - testName: test: { - "${nixpkgsPrefix}${pkgName}-${testName}" = test; - } - ) - ) - // lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) { - "${nixpkgsPrefix}nix-functional-tests" = nixComponents.nix-functional-tests; - } + nixpkgsPrefix: args: + (import ./ci/gha/tests ( + args + // { + nixFlake = self; + componentTestsPrefix = nixpkgsPrefix; + } + )).componentTests ) // devFlake.checks.${system} or { } ); From 5fe6c537030ffc0353473e25998e81fb37980a98 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 28 Jul 2025 18:53:49 +0200 Subject: [PATCH 089/382] nix flake prefetch-inputs: Add This command fetches all inputs of a flake in parallel. Example runtime for $ chmod -R u+w /tmp/nix2; rm -rf /tmp/nix2; rm ~/.cache/nix/fetcher-cache-v3.sqlite*; rm -rf ~/.cache/nix/tarball-cache/ ~/.cache/nix/gitv3/; time nix flake prefetch-inputs --store /tmp/nix2 https://api.flakehub.com/f/pinned/informalsystems/cosmos.nix/0.3.0/018ce9ed-d0be-7ce5-81b6-a3c6e3ae1187/source.tar.gz with http-connections = 1: real 4m11.859s user 2m6.931s sys 0m25.619s and http-connections = 25 (the default): real 0m57.146s user 2m49.506s sys 0m36.008s --- src/nix/flake-prefetch-inputs.cc | 72 ++++++++++++++++++++++++++++++++ src/nix/flake-prefetch-inputs.md | 17 ++++++++ src/nix/meson.build | 1 + 3 files changed, 90 insertions(+) create mode 100644 src/nix/flake-prefetch-inputs.cc create mode 100644 src/nix/flake-prefetch-inputs.md diff --git a/src/nix/flake-prefetch-inputs.cc b/src/nix/flake-prefetch-inputs.cc new file mode 100644 index 000000000..096eaf539 --- /dev/null +++ b/src/nix/flake-prefetch-inputs.cc @@ -0,0 +1,72 @@ +#include "flake-command.hh" +#include "nix/fetchers/fetch-to-store.hh" +#include "nix/util/thread-pool.hh" +#include "nix/store/filetransfer.hh" +#include "nix/util/exit.hh" + +#include + +using namespace nix; +using namespace nix::flake; + +struct CmdFlakePrefetchInputs : FlakeCommand +{ + std::string description() override + { + return "fetch the inputs of a flake"; + } + + std::string doc() override + { + return +#include "flake-prefetch-inputs.md" + ; + } + + void run(nix::ref store) override + { + auto flake = lockFlake(); + + ThreadPool pool{fileTransferSettings.httpConnections}; + + struct State + { + std::set done; + }; + + Sync state_; + + std::atomic nrFailed{0}; + + std::function visit; + visit = [&](const Node & node) { + if (!state_.lock()->done.insert(&node).second) + return; + + if (auto lockedNode = dynamic_cast(&node)) { + try { + Activity act(*logger, lvlInfo, actUnknown, fmt("fetching '%s'", lockedNode->lockedRef)); + auto accessor = lockedNode->lockedRef.input.getAccessor(store).first; + fetchToStore( + fetchSettings, *store, accessor, FetchMode::Copy, lockedNode->lockedRef.input.getName()); + } catch (Error & e) { + printError("%s", e.what()); + nrFailed++; + } + } + + for (auto & [inputName, input] : node.inputs) { + if (auto inputNode = std::get_if<0>(&input)) + pool.enqueue(std::bind(visit, **inputNode)); + } + }; + + pool.enqueue(std::bind(visit, *flake.lockFile.root)); + + pool.process(); + + throw Exit(nrFailed ? 1 : 0); + } +}; + +static auto rCmdFlakePrefetchInputs = registerCommand2({"flake", "prefetch-inputs"}); diff --git a/src/nix/flake-prefetch-inputs.md b/src/nix/flake-prefetch-inputs.md new file mode 100644 index 000000000..a69f7d367 --- /dev/null +++ b/src/nix/flake-prefetch-inputs.md @@ -0,0 +1,17 @@ +R""( + +# Examples + +* Fetch the inputs of the `hydra` flake: + + ```console + # nix flake prefetch-inputs github:NixOS/hydra + ``` + +# Description + +Fetch the inputs of a flake. This ensures that they are already available for any subsequent evaluation of the flake. + +This operation is recursive: it will fetch not just the direct inputs of the top-level flake, but also transitive inputs. + +)"" diff --git a/src/nix/meson.build b/src/nix/meson.build index 586ee15c3..f1c79b002 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -78,6 +78,7 @@ nix_sources = [config_priv_h] + files( 'env.cc', 'eval.cc', 'flake.cc', + 'flake-prefetch-inputs.cc', 'formatter.cc', 'hash.cc', 'log.cc', From 413f7821766164b64f653a76bf107999bd6f9c7f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 28 Jul 2025 23:07:59 +0300 Subject: [PATCH 090/382] ci: Run sanitizer tests in a separate job This should speed up the CI somewhat by parallelizing the work across the matrix of configurations. --- .github/workflows/ci.yml | 21 ++++++++++++++-- {scripts => ci/gha/tests}/build-checks | 0 .../prepare-installer-for-github-actions | 0 flake.nix | 24 ++++--------------- 4 files changed, 24 insertions(+), 21 deletions(-) rename {scripts => ci/gha/tests}/build-checks (100%) rename {scripts => ci/gha/tests}/prepare-installer-for-github-actions (100%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f1faeb505..c7e2782d8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,9 +29,18 @@ jobs: - scenario: on ubuntu runs-on: ubuntu-24.04 os: linux + sanitizers: false + primary: true - scenario: on macos runs-on: macos-14 os: darwin + sanitizers: false + primary: true + - scenario: on ubuntu (with sanitizers) + runs-on: ubuntu-24.04 + os: linux + sanitizers: true + primary: false name: tests ${{ matrix.scenario }} runs-on: ${{ matrix.runs-on }} timeout-minutes: 60 @@ -52,13 +61,21 @@ jobs: # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces - run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 if: matrix.os == 'linux' - - run: scripts/build-checks - - run: scripts/prepare-installer-for-github-actions + - name: Run component tests + run: | + nix build --file ci/gha/tests componentTests -L \ + --arg withSanitizers ${{ matrix.sanitizers }} + - name: Run flake checks and prepare the installer tarball + run: | + ci/gha/tests/build-checks + ci/gha/tests/prepare-installer-for-github-actions + if: ${{ matrix.primary }} - name: Upload installer tarball uses: actions/upload-artifact@v4 with: name: installer-${{matrix.os}} path: out/* + if: ${{ matrix.primary }} installer_test: needs: [tests] diff --git a/scripts/build-checks b/ci/gha/tests/build-checks similarity index 100% rename from scripts/build-checks rename to ci/gha/tests/build-checks diff --git a/scripts/prepare-installer-for-github-actions b/ci/gha/tests/prepare-installer-for-github-actions similarity index 100% rename from scripts/prepare-installer-for-github-actions rename to ci/gha/tests/prepare-installer-for-github-actions diff --git a/flake.nix b/flake.nix index 03d77e055..6a6f2cfd8 100644 --- a/flake.nix +++ b/flake.nix @@ -337,25 +337,11 @@ # Add "passthru" tests // flatMapAttrs - ( - { - "" = { - pkgs = nixpkgsFor.${system}.native; - }; - } - // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { - # TODO: enable static builds for darwin, blocked on: - # https://github.com/NixOS/nixpkgs/issues/320448 - # TODO: disabled to speed up GHA CI. - # "static-" = { - # pkgs = nixpkgsFor.${system}.native.pkgsStatic; - # }; - "sanitized-" = { - pkgs = nixpkgsFor.${system}.native; - withSanitizers = true; - }; - } - ) + { + "" = { + pkgs = nixpkgsFor.${system}.native; + }; + } ( nixpkgsPrefix: args: (import ./ci/gha/tests ( From 1c9e11a11f4aedb9e23c89278721ea2879a2df62 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 29 Jul 2025 00:51:09 +0300 Subject: [PATCH 091/382] tests/nixos: Parametrize on the system All of the existing tests only run on x86_64-linux and the whole `nixpkgsFor` makes it hard to reuse those for e.g. running the nixos tests with a sanitizer build of nix. This rips off the bandaid and removes the `nixpkgsFor` parameter in favor of a single instance of nixpkgs with a separate `nixComponents`. --- packaging/hydra.nix | 4 +- tests/nixos/default.nix | 80 ++++++++++++++++--------------- tests/nixos/functional/common.nix | 6 +-- 3 files changed, 47 insertions(+), 43 deletions(-) diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 27c09d9c9..7a7569fa3 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -240,7 +240,9 @@ in # System tests. tests = import ../tests/nixos { - inherit lib nixpkgs nixpkgsFor; + inherit lib nixpkgs; + pkgs = nixpkgsFor.x86_64-linux.native; + nixComponents = nixpkgsFor.x86_64-linux.native.nixComponents2; inherit (self.inputs) nixpkgs-23-11; } // { diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index f0b1a8865..e200ae7d9 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -1,7 +1,8 @@ { lib, + pkgs, + nixComponents, nixpkgs, - nixpkgsFor, nixpkgs-23-11, }: @@ -19,27 +20,28 @@ let ); # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests - runNixOSTestFor = - system: test: + runNixOSTest = + test: (nixos-lib.runTest { imports = [ test ]; - hostPkgs = nixpkgsFor.${system}.native; + hostPkgs = pkgs; defaults = { - nixpkgs.pkgs = nixpkgsFor.${system}.native; + nixpkgs.pkgs = pkgs; nix.checkAllErrors = false; # TODO: decide which packaging stage to use. `nix-cli` is efficient, but not the same as the user-facing `everything.nix` package (`default`). Perhaps a good compromise is `everything.nix` + `noTests` defined above? - nix.package = nixpkgsFor.${system}.native.nixComponents2.nix-cli; + nix.package = nixComponents.nix-cli; # Evaluate VMs faster documentation.enable = false; # this links against nix and might break with our git version. system.tools.nixos-option.enable = false; }; + _module.args.nixComponents = nixComponents; _module.args.nixpkgs = nixpkgs; - _module.args.system = system; + _module.args.system = pkgs.system; }) // { # allow running tests against older nix versions via `nix eval --apply` @@ -47,13 +49,13 @@ let # nix build "$(nix eval --raw --impure .#hydraJobs.tests.fetch-git --apply 't: (t.forNix "2.19.2").drvPath')^*" forNix = nixVersion: - runNixOSTestFor system { + runNixOSTest { imports = [ test ]; defaults.nixpkgs.overlays = [ (curr: prev: { nix = let - packages = (builtins.getFlake "nix/${nixVersion}").packages.${system}; + packages = (builtins.getFlake "nix/${nixVersion}").packages.${pkgs.system}; in packages.nix-cli or packages.nix; }) @@ -97,18 +99,18 @@ let in { - authorization = runNixOSTestFor "x86_64-linux" ./authorization.nix; + authorization = runNixOSTest ./authorization.nix; - remoteBuilds = runNixOSTestFor "x86_64-linux" ./remote-builds.nix; + remoteBuilds = runNixOSTest ./remote-builds.nix; - remoteBuildsSshNg = runNixOSTestFor "x86_64-linux" ./remote-builds-ssh-ng.nix; + remoteBuildsSshNg = runNixOSTest ./remote-builds-ssh-ng.nix; } // lib.concatMapAttrs ( nixVersion: { setNixPackage, ... }: { - "remoteBuilds_remote_${nixVersion}" = runNixOSTestFor "x86_64-linux" { + "remoteBuilds_remote_${nixVersion}" = runNixOSTest { name = "remoteBuilds_remote_${nixVersion}"; imports = [ ./remote-builds.nix ]; builders.config = @@ -118,7 +120,7 @@ in }; }; - "remoteBuilds_local_${nixVersion}" = runNixOSTestFor "x86_64-linux" { + "remoteBuilds_local_${nixVersion}" = runNixOSTest { name = "remoteBuilds_local_${nixVersion}"; imports = [ ./remote-builds.nix ]; nodes.client = @@ -128,7 +130,7 @@ in }; }; - "remoteBuildsSshNg_remote_${nixVersion}" = runNixOSTestFor "x86_64-linux" { + "remoteBuildsSshNg_remote_${nixVersion}" = runNixOSTest { name = "remoteBuildsSshNg_remote_${nixVersion}"; imports = [ ./remote-builds-ssh-ng.nix ]; builders.config = @@ -140,7 +142,7 @@ in # FIXME: these tests don't work yet - # "remoteBuildsSshNg_local_${nixVersion}" = runNixOSTestFor "x86_64-linux" { + # "remoteBuildsSshNg_local_${nixVersion}" = runNixOSTest { # name = "remoteBuildsSshNg_local_${nixVersion}"; # imports = [ ./remote-builds-ssh-ng.nix ]; # nodes.client = { lib, pkgs, ... }: { @@ -151,49 +153,49 @@ in ) otherNixes // { - nix-copy-closure = runNixOSTestFor "x86_64-linux" ./nix-copy-closure.nix; + nix-copy-closure = runNixOSTest ./nix-copy-closure.nix; - nix-copy = runNixOSTestFor "x86_64-linux" ./nix-copy.nix; + nix-copy = runNixOSTest ./nix-copy.nix; - nix-docker = runNixOSTestFor "x86_64-linux" ./nix-docker.nix; + nix-docker = runNixOSTest ./nix-docker.nix; - nssPreload = runNixOSTestFor "x86_64-linux" ./nss-preload.nix; + nssPreload = runNixOSTest ./nss-preload.nix; - githubFlakes = runNixOSTestFor "x86_64-linux" ./github-flakes.nix; + githubFlakes = runNixOSTest ./github-flakes.nix; - gitSubmodules = runNixOSTestFor "x86_64-linux" ./git-submodules.nix; + gitSubmodules = runNixOSTest ./git-submodules.nix; - sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./sourcehut-flakes.nix; + sourcehutFlakes = runNixOSTest ./sourcehut-flakes.nix; - tarballFlakes = runNixOSTestFor "x86_64-linux" ./tarball-flakes.nix; + tarballFlakes = runNixOSTest ./tarball-flakes.nix; - containers = runNixOSTestFor "x86_64-linux" ./containers/containers.nix; + containers = runNixOSTest ./containers/containers.nix; - setuid = lib.genAttrs [ "x86_64-linux" ] (system: runNixOSTestFor system ./setuid.nix); + setuid = runNixOSTest ./setuid.nix; - fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git; + fetch-git = runNixOSTest ./fetch-git; - ca-fd-leak = runNixOSTestFor "x86_64-linux" ./ca-fd-leak; + ca-fd-leak = runNixOSTest ./ca-fd-leak; - gzip-content-encoding = runNixOSTestFor "x86_64-linux" ./gzip-content-encoding.nix; + gzip-content-encoding = runNixOSTest ./gzip-content-encoding.nix; - functional_user = runNixOSTestFor "x86_64-linux" ./functional/as-user.nix; + functional_user = runNixOSTest ./functional/as-user.nix; - functional_trusted = runNixOSTestFor "x86_64-linux" ./functional/as-trusted-user.nix; + functional_trusted = runNixOSTest ./functional/as-trusted-user.nix; - functional_root = runNixOSTestFor "x86_64-linux" ./functional/as-root.nix; + functional_root = runNixOSTest ./functional/as-root.nix; - functional_symlinked-home = runNixOSTestFor "x86_64-linux" ./functional/symlinked-home.nix; + functional_symlinked-home = runNixOSTest ./functional/symlinked-home.nix; - user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; + user-sandboxing = runNixOSTest ./user-sandboxing; - s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; + s3-binary-cache-store = runNixOSTest ./s3-binary-cache-store.nix; - fsync = runNixOSTestFor "x86_64-linux" ./fsync.nix; + fsync = runNixOSTest ./fsync.nix; - cgroups = runNixOSTestFor "x86_64-linux" ./cgroups; + cgroups = runNixOSTest ./cgroups; - fetchurl = runNixOSTestFor "x86_64-linux" ./fetchurl.nix; + fetchurl = runNixOSTest ./fetchurl.nix; - chrootStore = runNixOSTestFor "x86_64-linux" ./chroot-store.nix; + chrootStore = runNixOSTest ./chroot-store.nix; } diff --git a/tests/nixos/functional/common.nix b/tests/nixos/functional/common.nix index a2067c07d..4d32b7573 100644 --- a/tests/nixos/functional/common.nix +++ b/tests/nixos/functional/common.nix @@ -1,4 +1,4 @@ -{ lib, ... }: +{ lib, nixComponents, ... }: let # FIXME (roberth) reference issue @@ -49,11 +49,11 @@ in cd ~ - cp -r ${pkgs.nixComponents2.nix-functional-tests.src} nix + cp -r ${nixComponents.nix-functional-tests.src} nix chmod -R +w nix chmod u+w nix/.version - echo ${pkgs.nixComponents2.version} > nix/.version + echo ${nixComponents.version} > nix/.version export isTestOnNixOS=1 From 6d46dc9f6aae4ccff84166a6ec49eb44377c6d34 Mon Sep 17 00:00:00 2001 From: Oleksandr Knyshuk Date: Thu, 17 Jul 2025 14:00:26 +0200 Subject: [PATCH 092/382] Add warn-short-path-literals setting Add a new setting to warn about path literals that don't start with "." or "/". When enabled, expressions like `foo/bar` will emit a warning suggesting to use `./foo/bar` instead. A functional test is included. The setting defaults to false for backward compatibility but could eventually default to true in the future. Closes: #13374 Co-authored-by: Robert Hensing --- src/libexpr/include/nix/expr/eval-settings.hh | 15 +++++ src/libexpr/parser.y | 9 +++ tests/functional/meson.build | 1 + tests/functional/short-path-literals.sh | 55 +++++++++++++++++++ 4 files changed, 80 insertions(+) create mode 100644 tests/functional/short-path-literals.sh diff --git a/src/libexpr/include/nix/expr/eval-settings.hh b/src/libexpr/include/nix/expr/eval-settings.hh index 25ba84ac9..4c9db0c73 100644 --- a/src/libexpr/include/nix/expr/eval-settings.hh +++ b/src/libexpr/include/nix/expr/eval-settings.hh @@ -327,6 +327,21 @@ struct EvalSettings : Config This option can be enabled by setting `NIX_ABORT_ON_WARN=1` in the environment. )"}; + + Setting warnShortPathLiterals{ + this, + false, + "warn-short-path-literals", + R"( + If set to true, the Nix evaluator will warn when encountering relative path literals + that don't start with `./` or `../`. + + For example, with this setting enabled, `foo/bar` would emit a warning + suggesting to use `./foo/bar` instead. + + This is useful for improving code readability and making path literals + more explicit. + )"}; }; /** diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 8878b86c2..2b2566208 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -365,6 +365,15 @@ string_parts_interpolated path_start : PATH { std::string_view literal({$1.p, $1.l}); + + /* check for short path literals */ + if (state->settings.warnShortPathLiterals && literal.front() != '/' && literal.front() != '.') { + logWarning({ + .msg = HintFmt("relative path literal '%s' should be prefixed with '.' for clarity: './%s'. (" ANSI_BOLD "warn-short-path-literals" ANSI_NORMAL " = true)", literal, literal), + .pos = state->positions[CUR_POS] + }); + } + Path path(absPath(literal, state->basePath.path.abs())); /* add back in the trailing '/' to the first segment */ if (literal.size() > 1 && literal.back() == '/') diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 0e2004219..8f2b1ff59 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -112,6 +112,7 @@ suites = [ 'impure-eval.sh', 'pure-eval.sh', 'eval.sh', + 'short-path-literals.sh', 'repl.sh', 'binary-cache-build-remote.sh', 'search.sh', diff --git a/tests/functional/short-path-literals.sh b/tests/functional/short-path-literals.sh new file mode 100644 index 000000000..f74044dda --- /dev/null +++ b/tests/functional/short-path-literals.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +source common.sh + +clearStoreIfPossible + +# Test 1: Without the setting (default), no warnings should be produced +nix eval --expr 'test/subdir' 2>"$TEST_ROOT"/stderr +grepQuietInverse < "$TEST_ROOT/stderr" -E "relative path|path literal" || fail "Should not produce warnings by default" + +# Test 2: With the setting enabled, warnings should be produced for short path literals +nix eval --warn-short-path-literals --expr 'test/subdir' 2>"$TEST_ROOT"/stderr +grepQuiet "relative path literal 'test/subdir' should be prefixed with '.' for clarity: './test/subdir'" "$TEST_ROOT/stderr" + +# Test 3: Different short path literals should all produce warnings +nix eval --warn-short-path-literals --expr 'foo/bar' 2>"$TEST_ROOT"/stderr +grepQuiet "relative path literal 'foo/bar' should be prefixed with '.' for clarity: './foo/bar'" "$TEST_ROOT/stderr" + +nix eval --warn-short-path-literals --expr 'a/b/c/d' 2>"$TEST_ROOT"/stderr +grepQuiet "relative path literal 'a/b/c/d' should be prefixed with '.' for clarity: './a/b/c/d'" "$TEST_ROOT/stderr" + +# Test 4: Paths starting with ./ should NOT produce warnings +nix eval --warn-short-path-literals --expr './test/subdir' 2>"$TEST_ROOT"/stderr +grepQuietInverse "relative path literal" "$TEST_ROOT/stderr" + +# Test 5: Paths starting with ../ should NOT produce warnings +nix eval --warn-short-path-literals --expr '../test/subdir' 2>"$TEST_ROOT"/stderr +grepQuietInverse "relative path literal" "$TEST_ROOT/stderr" + +# Test 6: Absolute paths should NOT produce warnings +nix eval --warn-short-path-literals --expr '/absolute/path' 2>"$TEST_ROOT"/stderr +grepQuietInverse "relative path literal" "$TEST_ROOT/stderr" + +# Test 7: Test that the warning is at the correct position +nix eval --warn-short-path-literals --expr 'foo/bar' 2>"$TEST_ROOT"/stderr +grepQuiet "at «string»:1:1:" "$TEST_ROOT/stderr" + +# Test 8: Test that evaluation still works correctly despite the warning +result=$(nix eval --warn-short-path-literals --expr 'test/subdir' 2>/dev/null) +expected="$PWD/test/subdir" +[[ "$result" == "$expected" ]] || fail "Evaluation result should be correct despite warning" + +# Test 9: Test with nix-instantiate as well +nix-instantiate --warn-short-path-literals --eval -E 'foo/bar' 2>"$TEST_ROOT"/stderr +grepQuiet "relative path literal 'foo/bar' should be prefixed" "$TEST_ROOT/stderr" + +# Test 10: Test that the setting can be set via configuration +NIX_CONFIG='warn-short-path-literals = true' nix eval --expr 'test/file' 2>"$TEST_ROOT"/stderr +grepQuiet "relative path literal 'test/file' should be prefixed" "$TEST_ROOT/stderr" + +# Test 11: Test that command line flag overrides config +NIX_CONFIG='warn-short-path-literals = true' nix eval --no-warn-short-path-literals --expr 'test/file' 2>"$TEST_ROOT"/stderr +grepQuietInverse "relative path literal" "$TEST_ROOT/stderr" + +echo "short-path-literals test passed!" From 1989dd7bf9cb9d562b3c97e65498c6eca042a0a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 29 Jul 2025 09:25:29 +0200 Subject: [PATCH 093/382] add derivation parser benchmark the current identified bottlenecks are parseString in derivations.cc and dirOf (because of std::filessystem creation). --- doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/development/benchmarking.md | 187 ++++++++++++++++++ meson.options | 7 + packaging/dev-shell.nix | 3 +- .../data/derivation/firefox.drv | 1 + src/libstore-tests/data/derivation/hello.drv | 1 + src/libstore-tests/derivation-parser-bench.cc | 45 +++++ src/libstore-tests/meson.build | 16 ++ src/libstore-tests/meson.options | 9 + src/libstore-tests/package.nix | 2 +- 10 files changed, 270 insertions(+), 2 deletions(-) create mode 100644 doc/manual/source/development/benchmarking.md create mode 100644 src/libstore-tests/data/derivation/firefox.drv create mode 100644 src/libstore-tests/data/derivation/hello.drv create mode 100644 src/libstore-tests/derivation-parser-bench.cc create mode 100644 src/libstore-tests/meson.options diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index bfb921567..cc4748f56 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -128,6 +128,7 @@ - [Development](development/index.md) - [Building](development/building.md) - [Testing](development/testing.md) + - [Benchmarking](development/benchmarking.md) - [Debugging](development/debugging.md) - [Documentation](development/documentation.md) - [CLI guideline](development/cli-guideline.md) diff --git a/doc/manual/source/development/benchmarking.md b/doc/manual/source/development/benchmarking.md new file mode 100644 index 000000000..74eadd9c2 --- /dev/null +++ b/doc/manual/source/development/benchmarking.md @@ -0,0 +1,187 @@ +# Running Benchmarks + +This guide explains how to build and run performance benchmarks in the Nix codebase. + +## Overview + +Nix uses the [Google Benchmark](https://github.com/google/benchmark) framework for performance testing. Benchmarks help measure and track the performance of critical operations like derivation parsing. + +## Building Benchmarks + +Benchmarks are disabled by default and must be explicitly enabled during the build configuration. For accurate results, use a debug-optimized release build. + +### Development Environment Setup + +First, enter the development shell which includes the necessary dependencies: + +```bash +nix develop .#native-ccacheStdenv +``` + +### Configure Build with Benchmarks + +From the project root, configure the build with benchmarks enabled and optimization: + +```bash +cd build +meson configure -Dbenchmarks=true -Dbuildtype=debugoptimized +``` + +The `debugoptimized` build type provides: +- Compiler optimizations for realistic performance measurements +- Debug symbols for profiling and analysis +- Balance between performance and debuggability + +### Build the Benchmarks + +Build the project including benchmarks: + +```bash +ninja +``` + +This will create benchmark executables in the build directory. Currently available: +- `build/src/libstore-tests/nix-store-benchmarks` - Store-related performance benchmarks + +Additional benchmark executables will be created as more benchmarks are added to the codebase. + +## Running Benchmarks + +### Basic Usage + +Run benchmark executables directly. For example, to run store benchmarks: + +```bash +./build/src/libstore-tests/nix-store-benchmarks +``` + +As more benchmark executables are added, run them similarly from their respective build directories. + +### Filtering Benchmarks + +Run specific benchmarks using regex patterns: + +```bash +# Run only derivation parser benchmarks +./build/src/libstore-tests/nix-store-benchmarks --benchmark_filter="derivation.*" + +# Run only benchmarks for hello.drv +./build/src/libstore-tests/nix-store-benchmarks --benchmark_filter=".*hello.*" +``` + +### Output Formats + +Generate benchmark results in different formats: + +```bash +# JSON output +./build/src/libstore-tests/nix-store-benchmarks --benchmark_format=json > results.json + +# CSV output +./build/src/libstore-tests/nix-store-benchmarks --benchmark_format=csv > results.csv +``` + +### Advanced Options + +```bash +# Run benchmarks multiple times for better statistics +./build/src/libstore-tests/nix-store-benchmarks --benchmark_repetitions=10 + +# Set minimum benchmark time (useful for micro-benchmarks) +./build/src/libstore-tests/nix-store-benchmarks --benchmark_min_time=2 + +# Compare against baseline +./build/src/libstore-tests/nix-store-benchmarks --benchmark_baseline=baseline.json + +# Display time in custom units +./build/src/libstore-tests/nix-store-benchmarks --benchmark_time_unit=ms +``` + +## Writing New Benchmarks + +To add new benchmarks: + +1. Create a new `.cc` file in the appropriate `*-tests` directory +2. Include the benchmark header: + ```cpp + #include + ``` + +3. Write benchmark functions: + ```cpp + static void BM_YourBenchmark(benchmark::State & state) + { + // Setup code here + + for (auto _ : state) { + // Code to benchmark + } + } + BENCHMARK(BM_YourBenchmark); + ``` + +4. Add the file to the corresponding `meson.build`: + ```meson + benchmarks_sources = files( + 'your-benchmark.cc', + # existing benchmarks... + ) + ``` + +## Profiling with Benchmarks + +For deeper performance analysis, combine benchmarks with profiling tools: + +```bash +# Using Linux perf +perf record ./build/src/libstore-tests/nix-store-benchmarks +perf report +``` + +### Using Valgrind Callgrind + +Valgrind's callgrind tool provides detailed profiling information that can be visualized with kcachegrind: + +```bash +# Profile with callgrind +valgrind --tool=callgrind ./build/src/libstore-tests/nix-store-benchmarks + +# Visualize the results with kcachegrind +kcachegrind callgrind.out.* +``` + +This provides: +- Function call graphs +- Instruction-level profiling +- Source code annotation +- Interactive visualization of performance bottlenecks + +## Continuous Performance Testing + +```bash +# Save baseline results +./build/src/libstore-tests/nix-store-benchmarks --benchmark_format=json > baseline.json + +# Compare against baseline in CI +./build/src/libstore-tests/nix-store-benchmarks --benchmark_baseline=baseline.json +``` + +## Troubleshooting + +### Benchmarks not building + +Ensure benchmarks are enabled: +```bash +meson configure build | grep benchmarks +# Should show: benchmarks true +``` + +### Inconsistent results + +- Ensure your system is not under heavy load +- Disable CPU frequency scaling for consistent results +- Run benchmarks multiple times with `--benchmark_repetitions` + +## See Also + +- [Google Benchmark documentation](https://github.com/google/benchmark/blob/main/docs/user_guide.md) diff --git a/meson.options b/meson.options index 30670902e..d2c9fa40c 100644 --- a/meson.options +++ b/meson.options @@ -20,3 +20,10 @@ option( value : true, description : 'Build language bindings (e.g. Perl)', ) + +option( + 'benchmarks', + type : 'boolean', + value : false, + description : 'Build benchmarks (requires gbenchmark)', +) diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index e01a0ed8f..f10a9d56e 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -126,7 +126,8 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped; buildInputs = - attrs.buildInputs or [ ] + [ pkgs.gbenchmark ] + ++ attrs.buildInputs or [ ] ++ pkgs.nixComponents2.nix-util.buildInputs ++ pkgs.nixComponents2.nix-store.buildInputs ++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs diff --git a/src/libstore-tests/data/derivation/firefox.drv b/src/libstore-tests/data/derivation/firefox.drv new file mode 100644 index 000000000..98ff69c81 --- /dev/null +++ b/src/libstore-tests/data/derivation/firefox.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/jycqnr8rdfy035ckiwmar4yql406jjh6-firefox-140.0.4","","")],[("/nix/store/01qkwh6g0xqlrchziwa8zm3c8p6s5ylc-krb5-1.21.3.drv",["lib"]),("/nix/store/03ixv9yqpmi015vv0m4gza1d2ac8wql8-libglvnd-1.7.0.drv",["out"]),("/nix/store/2g73b2yd3qhzp774i9svckdnspyrfrn8-firefox-unwrapped-140.0.4.drv",["out"]),("/nix/store/2wf9wi34rvwyq7dxk0xlk41ccll282z0-make-shell-wrapper-hook.drv",["out"]),("/nix/store/3gr22851nw1nj8zmy34961yb3fbr2iz5-libva-2.22.0.drv",["out"]),("/nix/store/7gzk0w5125wqhgp8sh26l3k15lklm82c-policies.json.drv",["out"]),("/nix/store/b3jvr65plcm9nm4wn8ssw2ymnv2zshmq-extract-binary-wrapper-cmd.drv",["out"]),("/nix/store/ck7ckwkvqv49hz5rl8yp52c4v6p2f6vd-pipewire-1.4.6.drv",["out"]),("/nix/store/fm2b1q5m1wrvy237j6x9xcvhxw95wjfp-alsa-lib-1.2.14.drv",["out"]),("/nix/store/fvch7vyklf8gr8i53p9ybv2azyh6q30y-ffmpeg-7.1.1.drv",["lib"]),("/nix/store/ggzpx6yx9xd5h241ri730acdfpdirxjq-mesa-libgbm-25.1.0.drv",["out"]),("/nix/store/hmwbmap5c8zp3kl20bda0qk1dg4f408d-vulkan-loader-1.4.313.0.drv",["out"]),("/nix/store/hy8x576wmvz2fk9a778r05lqdb0dcbl4-speech-dispatcher-0.12.1.drv",["out"]),("/nix/store/knjlgd5nfd7dw45axa6z7w7d7rb11801-pciutils-3.14.0.drv",["out"]),("/nix/store/l7y4nybngpffyndkw8rwx2g3c8dw4yyj-xdg-utils-1.2.1.drv",["out"]),("/nix/store/lh0mbixp7jiz7z8cxfypm8b17lf8z9la-cups-2.4.12.drv",["lib"]),("/nix/store/lmfgw4dz59sdvplfh3zazqmin8d7gh9d-libnotify-0.8.6.drv",["out"]),("/nix/store/m4zp8p8y4wzxg2lhgq43kbgr2xl5l9px-firefox.desktop.drv",["out"]),("/nix/store/n8hy5gvnrw0kzz6cjdhky3rxr0q024sh-jq-1.8.1.drv",["dev"]),("/nix/store/nz0xp44xmbqcp2sfhsgicsswrrb2cpl7-libXScrnSaver-1.2.4.drv",["out"]),("/nix/store/p2nvbwxxiarbqaq26rqrjqkyw8dl0cf4-lndir-1.0.5.drv",["out"]),("/nix/store/p306lljvhw8906r1lxyyk7jn3bckdypn-sndio-1.10.0.drv",["out"]),("/nix/store/pbqbdcp76r373256ajlbp3fankpw5pdh-libpulseaudio-17.0.drv",["out"]),("/nix/store/q27q075ckyjkpqxx8cgvpghcwr0nzz5v-systemd-minimal-libs-257.6.drv",["out"]),("/nix/store/s4b8yadif84kiv8gyr9nxdi6zbg69b4g-bash-5.2p37.drv",["out"]),("/nix/store/sc2pgkzc1s6zp5dp8j7wsd4msilsnijn-stdenv-linux.drv",["out"]),("/nix/store/sy2kn7hwfw0nyvxq7rvzrmvm2ypxs9x6-gtk+3-3.24.49.drv",["dev"]),("/nix/store/v3sc4j11fhsah2ij0wkziw6nd3il9dyy-libjack2-1.9.22.drv",["out"]),("/nix/store/xy07yjv0x8bcdc3lzyq45lrbbz08hg5p-adwaita-icon-theme-48.0.drv",["out"]),("/nix/store/z8v9j1cf6faasgqrw3jw74xjyjiqwk82-libcanberra-0.30.drv",["out"])],["/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh"],"x86_64-linux","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash",["-e","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh","/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh"],[("__json","{\"NIX_MAIN_PROGRAM\":\"firefox\",\"buildCommand\":\"if [ ! -x \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/bin/firefox\\\" ]\\nthen\\n echo \\\"cannot find executable file \\\\`/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/bin/firefox'\\\"\\n exit 1\\nfi\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# Link the runtime. The executable itself has to be copied,\\n# because it will resolve paths relative to its true location.\\n# Any symbolic links have to be replicated as well.\\ncd \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"\\nfind . -type d -exec mkdir -p \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -not -name \\\"firefox\\\" \\\\) -exec ln -sT \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/{} \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -name \\\"firefox\\\" -o -name \\\"firefox-bin\\\" \\\\) -print0 | while read -d $'\\\\0' f; do\\n cp -P --no-preserve=mode,ownership --remove-destination \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/$f\\\" \\\"$out/$f\\\"\\n chmod a+rwx \\\"$out/$f\\\"\\ndone\\n\\n# fix links and absolute references\\n\\nfind . -type l -print0 | while read -d $'\\\\0' l; do\\n target=\\\"$(readlink \\\"$l\\\")\\\"\\n target=${target/#\\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\n ln -sfT \\\"$target\\\" \\\"$out/$l\\\"\\ndone\\n\\ncd \\\"$out\\\"\\n\\n\\n# create the wrapper\\n\\nexecutablePrefix=\\\"$out/bin\\\"\\nexecutablePath=\\\"$out/bin/firefox\\\"\\noldWrapperArgs=()\\n\\nif [[ -L $executablePath ]]; then\\n # Symbolic link: wrap the link's target.\\n oldExe=\\\"$(readlink -v --canonicalize-existing \\\"$executablePath\\\")\\\"\\n rm \\\"$executablePath\\\"\\nelif wrapperCmd=$(/nix/store/qczbm5rh1vfkql4jznp1p5lv9nyjz99r-extract-binary-wrapper-cmd \\\"$executablePath\\\"); [[ $wrapperCmd ]]; then\\n # If the executable is a binary wrapper, we need to update its target to\\n # point to $out, but we can't just edit the binary in-place because of length\\n # issues. So we extract the command used to create the wrapper and add the\\n # arguments to our wrapper.\\n parseMakeCWrapperCall() {\\n shift # makeCWrapper\\n oldExe=$1; shift\\n oldWrapperArgs=(\\\"$@\\\")\\n }\\n eval \\\"parseMakeCWrapperCall ${wrapperCmd//\\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\\"\\n rm \\\"$executablePath\\\"\\nelse\\n if read -rn2 shebang < \\\"$executablePath\\\" && [[ $shebang == '#!' ]]; then\\n # Shell wrapper: patch in place to point to $out.\\n sed -i \\\"s@/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4@$out@g\\\" \\\"$executablePath\\\"\\n fi\\n # Suffix the executable with -old, because -wrapped might already be used by the old wrapper.\\n oldExe=\\\"$executablePrefix/.firefox\\\"-old\\n mv \\\"$executablePath\\\" \\\"$oldExe\\\"\\nfi\\nappendToVar makeWrapperArgs --prefix XDG_DATA_DIRS : \\\"$GSETTINGS_SCHEMAS_PATH\\\"\\nconcatTo makeWrapperArgs oldWrapperArgs\\n\\nmakeWrapper \\\"$oldExe\\\" \\\"$out/bin/firefox\\\" \\\"${makeWrapperArgs[@]}\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\nif [ -e \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/share/icons\\\" ]; then\\n mkdir -p \\\"$out/share\\\"\\n ln -s \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/share/icons\\\" \\\"$out/share/icons\\\"\\nelse\\n for res in 16 32 48 64 128; do\\n mkdir -p \\\"$out/share/icons/hicolor/${res}x${res}/apps\\\"\\n icon=$( find \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/lib/\\\" -name \\\"default${res}.png\\\" )\\n if [ -e \\\"$icon\\\" ]; then ln -s \\\"$icon\\\" \\\\\\n \\\"$out/share/icons/hicolor/${res}x${res}/apps/firefox.png\\\"\\n fi\\n done\\nfi\\n\\ninstall -m 644 -D -t $out/share/applications $desktopItem/share/applications/*\\n\\nmkdir -p $out/lib/mozilla/native-messaging-hosts\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/native-messaging-hosts $ext/lib/mozilla/native-messaging-hosts/*\\ndone\\n\\nmkdir -p $out/lib/mozilla/pkcs11-modules\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/pkcs11-modules $ext/lib/mozilla/pkcs11-modules/*\\ndone\\n\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# user customization\\nlibDir=\\\"$out/lib/firefox\\\"\\n\\n# creating policies.json\\nmkdir -p \\\"$libDir/distribution\\\"\\n\\nPOL_PATH=\\\"$libDir/distribution/policies.json\\\"\\nrm -f \\\"$POL_PATH\\\"\\ncat /nix/store/s9r3kncxydp3s94cari78f2dl68w1k3j-policies.json >> \\\"$POL_PATH\\\"\\n\\nextraPoliciesFiles=()\\nfor extraPoliciesFile in \\\"${extraPoliciesFiles[@]}\\\"; do\\n jq -s '.[0] * .[1]' $extraPoliciesFile \\\"$POL_PATH\\\" > .tmp.json\\n mv .tmp.json \\\"$POL_PATH\\\"\\ndone\\n\\n# preparing for autoconfig\\nprefsDir=\\\"$out/lib/firefox/defaults/pref\\\"\\nmkdir -p \\\"$prefsDir\\\"\\n\\necho 'pref(\\\"general.config.filename\\\", \\\"mozilla.cfg\\\");' > \\\"$prefsDir/autoconfig.js\\\"\\necho 'pref(\\\"general.config.obscure_value\\\", 0);' >> \\\"$prefsDir/autoconfig.js\\\"\\n\\ncat > \\\"$libDir/mozilla.cfg\\\" << EOF\\n// First line must be a comment\\n\\n// Disables addon signature checking\\n// to be able to install addons that do not have an extid\\n// Security is maintained because only user whitelisted addons\\n// with a checksum can be installed\\n\\n\\nEOF\\n\\nextraPrefsFiles=()\\nfor extraPrefsFile in \\\"${extraPrefsFiles[@]}\\\"; do\\n cat \\\"$extraPrefsFile\\\" >> \\\"$libDir/mozilla.cfg\\\"\\ndone\\n\\ncat >> \\\"$libDir/mozilla.cfg\\\" << EOF\\n\\nEOF\\n\\nmkdir -p \\\"$libDir/distribution/extensions\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\n\",\"buildInputs\":[\"/nix/store/09050l3isnxqyjbsn4qfbq190i6a85bx-gtk+3-3.24.49-dev\"],\"builder\":\"/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash\",\"cmakeFlags\":[],\"configureFlags\":[],\"depsBuildBuild\":[],\"depsBuildBuildPropagated\":[],\"depsBuildTarget\":[],\"depsBuildTargetPropagated\":[],\"depsHostHost\":[],\"depsHostHostPropagated\":[],\"depsTargetTarget\":[],\"depsTargetTargetPropagated\":[],\"desktopItem\":\"/nix/store/w8xccv3flpwzgwan7k0l4rrv9m8ipffa-firefox.desktop\",\"doCheck\":false,\"doInstallCheck\":false,\"env\":{\"NIX_MAIN_PROGRAM\":\"firefox\"},\"gtk_modules\":[\"/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib/gtk-3.0/\"],\"libs\":\"/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib:/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib64:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib64:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib64:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib64:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib64:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib64:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib64:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib64:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib64:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib64:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib64:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib64:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib64:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib64:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib64:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib64:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib64:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib64\",\"makeWrapperArgs\":[\"--prefix\",\"LD_LIBRARY_PATH\",\":\",\"/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib:/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib64:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib64:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib64:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib64:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib64:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib64:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib64:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib64:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib64:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib64:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib64:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib64:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib64:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib64:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib64:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib64:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib64:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib64\",\"--suffix\",\"PATH\",\":\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/bin\",\"--set\",\"MOZ_APP_LAUNCHER\",\"firefox\",\"--set\",\"MOZ_LEGACY_PROFILES\",\"1\",\"--set\",\"MOZ_ALLOW_DOWNGRADE\",\"1\",\"--suffix\",\"GTK_PATH\",\":\",\"/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib/gtk-3.0/\",\"--suffix\",\"XDG_DATA_DIRS\",\":\",\"/nix/store/1w8x293926aq2vcyys36aw49fy5p8cm5-adwaita-icon-theme-48.0/share\",\"--set-default\",\"MOZ_ENABLE_WAYLAND\",\"1\",\"--suffix\",\"PATH\",\":\",\"/nix/store/yrzm7cya8nf8xnpi8xlfwx16plqkzhgh-xdg-utils-1.2.1/bin\",\"--set\",\"MOZ_SYSTEM_DIR\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/lib/mozilla\"],\"mesonFlags\":[],\"name\":\"firefox-140.0.4\",\"nativeBuildInputs\":[\"/nix/store/lnd6p3anjxgwawlhlpzvvl40d4yc2jd4-make-shell-wrapper-hook\",\"/nix/store/kn8zagv6mk3ykmax5fqh4h18raqhxbh6-lndir-1.0.5\",\"/nix/store/x0kva02y0iyh7l0qvnx3l8ci7ll1r5si-jq-1.8.1-dev\"],\"outputChecks\":{\"out\":{\"disallowedRequisites\":[\"/nix/store/7nlf5v84s4p2yhx327j8495yik60qnzh-gcc-wrapper-14.3.0\"]}},\"outputs\":[\"out\"],\"patches\":[],\"pname\":\"firefox\",\"preferLocalBuild\":true,\"propagatedBuildInputs\":[],\"propagatedNativeBuildInputs\":[],\"stdenv\":\"/nix/store/a13rl87yjhzqrbkc4gb0mrwz2mfkivcf-stdenv-linux\",\"strictDeps\":false,\"system\":\"x86_64-linux\",\"version\":\"140.0.4\"}"),("out","/nix/store/jycqnr8rdfy035ckiwmar4yql406jjh6-firefox-140.0.4")]) \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/hello.drv b/src/libstore-tests/data/derivation/hello.drv new file mode 100644 index 000000000..741f7745c --- /dev/null +++ b/src/libstore-tests/data/derivation/hello.drv @@ -0,0 +1 @@ +Derive([("out","/nix/store/hhg83gh653wjw4ny49xn92f13v2j1za4-hello-2.12.2","","")],[("/nix/store/1xz4avqqrxqsxw7idz119vdzw837p1n1-version-check-hook.drv",["out"]),("/nix/store/bsv47sbqcar3205il55spxqacxp8j0fj-hello-2.12.2.tar.gz.drv",["out"]),("/nix/store/s4b8yadif84kiv8gyr9nxdi6zbg69b4g-bash-5.2p37.drv",["out"]),("/nix/store/sc2pgkzc1s6zp5dp8j7wsd4msilsnijn-stdenv-linux.drv",["out"])],["/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh"],"x86_64-linux","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash",["-e","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh","/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh"],[("NIX_MAIN_PROGRAM","hello"),("__structuredAttrs",""),("buildInputs",""),("builder","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash"),("cmakeFlags",""),("configureFlags",""),("depsBuildBuild",""),("depsBuildBuildPropagated",""),("depsBuildTarget",""),("depsBuildTargetPropagated",""),("depsHostHost",""),("depsHostHostPropagated",""),("depsTargetTarget",""),("depsTargetTargetPropagated",""),("doCheck","1"),("doInstallCheck","1"),("mesonFlags",""),("name","hello-2.12.2"),("nativeBuildInputs","/nix/store/fxzn6kr5anxn5jgh511x56wrg8b3a99a-version-check-hook"),("out","/nix/store/hhg83gh653wjw4ny49xn92f13v2j1za4-hello-2.12.2"),("outputs","out"),("patches",""),("pname","hello"),("postInstallCheck","stat \"${!outputBin}/bin/hello\"\n"),("propagatedBuildInputs",""),("propagatedNativeBuildInputs",""),("src","/nix/store/dw402azxjrgrzrk6j0p66wkqrab5mwgw-hello-2.12.2.tar.gz"),("stdenv","/nix/store/a13rl87yjhzqrbkc4gb0mrwz2mfkivcf-stdenv-linux"),("strictDeps",""),("system","x86_64-linux"),("version","2.12.2")]) \ No newline at end of file diff --git a/src/libstore-tests/derivation-parser-bench.cc b/src/libstore-tests/derivation-parser-bench.cc new file mode 100644 index 000000000..7598758f0 --- /dev/null +++ b/src/libstore-tests/derivation-parser-bench.cc @@ -0,0 +1,45 @@ +#include +#include "nix/store/derivations.hh" +#include "nix/store/store-api.hh" +#include "nix/util/experimental-features.hh" +#include "nix/store/store-open.hh" +#include "nix/store/globals.hh" +#include +#include + +using namespace nix; + +// Benchmark parsing real derivation files +static void BM_ParseRealDerivationFile(benchmark::State & state, const std::string & filename) +{ + // Read the file once + std::ifstream file(filename); + std::stringstream buffer; + buffer << file.rdbuf(); + std::string content = buffer.str(); + + auto store = openStore("dummy://"); + ExperimentalFeatureSettings xpSettings; + + for (auto _ : state) { + auto drv = parseDerivation(*store, std::string(content), "test", xpSettings); + benchmark::DoNotOptimize(drv); + } + state.SetBytesProcessed(state.iterations() * content.size()); +} + +// Register benchmarks for actual test derivation files if they exist +BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, hello, std::string(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); +BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, firefox, std::string(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); + +// Custom main to initialize Nix before running benchmarks +int main(int argc, char ** argv) +{ + // Initialize libstore + nix::initLibStore(false); + + // Initialize and run benchmarks + ::benchmark::Initialize(&argc, argv); + ::benchmark::RunSpecifiedBenchmarks(); + return 0; +} diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 79f21620e..89189bab9 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -105,3 +105,19 @@ test( }, protocol : 'gtest', ) + +# Build benchmarks if enabled +if get_option('benchmarks') + gbenchmark = dependency('benchmark', required : true) + + benchmark_exe = executable( + 'nix-store-benchmarks', + 'derivation-parser-bench.cc', + config_priv_h, + dependencies : deps_private_subproject + deps_private + deps_other + [gbenchmark], + include_directories : include_dirs, + link_args: linker_export_flags, + install : false, + cpp_args : ['-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"'], + ) +endif diff --git a/src/libstore-tests/meson.options b/src/libstore-tests/meson.options new file mode 100644 index 000000000..2b3c1af60 --- /dev/null +++ b/src/libstore-tests/meson.options @@ -0,0 +1,9 @@ +# vim: filetype=meson + +option( + 'benchmarks', + type : 'boolean', + value : false, + description : 'Build benchmarks (requires gbenchmark)', + yield : true, +) diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index f606604ba..93c71a382 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -35,7 +35,7 @@ mkMesonExecutable (finalAttrs: { ../../.version ./.version ./meson.build - # ./meson.options + ./meson.options (fileset.fileFilter (file: file.hasExt "cc") ./.) (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; From 8652b6b417749ac6b2362e36e502b42ca7784047 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 25 May 2025 20:01:20 -0400 Subject: [PATCH 094/382] Store `StructuredAttrs` directly in `Derivation` Instead of parsing a structured attrs at some later point, we parsed it right away when parsing the A-Term format, and likewise serialize it to `__json = ` when serializing a derivation to A-Term. The JSON format can directly contain the JSON structured attrs without so encoding it, so we just do that. --- src/libexpr/primops.cc | 43 +++++++----- .../derivation-advanced-attrs.cc | 44 +++++------- src/libstore-tests/derivation.cc | 4 +- .../build/derivation-building-goal.cc | 6 +- src/libstore/build/derivation-goal.cc | 3 +- src/libstore/derivation-options.cc | 6 ++ src/libstore/derivations.cc | 69 ++++++++++++++----- .../store/build/derivation-building-goal.hh | 3 +- .../include/nix/store/derivation-options.hh | 3 + src/libstore/include/nix/store/derivations.hh | 6 ++ .../include/nix/store/parsed-derivations.hh | 25 ++++++- src/libstore/misc.cc | 4 +- src/libstore/parsed-derivations.cc | 44 +++++++++--- src/libstore/unix/build/derivation-builder.cc | 8 +-- .../nix/store/build/derivation-builder.hh | 11 --- src/nix-build/nix-build.cc | 7 +- 16 files changed, 177 insertions(+), 109 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a645f546d..6af179e4e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1363,7 +1363,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName /* Check whether attributes should be passed as a JSON file. */ using nlohmann::json; - std::optional jsonObject; + std::optional jsonObject; auto pos = v.determinePos(noPos); auto attr = attrs->find(state.sStructuredAttrs); if (attr != attrs->end() @@ -1372,7 +1372,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) - jsonObject = json::object(); + jsonObject = StructuredAttrs{.structuredAttrs = json::object()}; /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; @@ -1484,7 +1484,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName if (i->name == state.sStructuredAttrs) continue; - jsonObject->emplace(key, printValueAsJSON(state, true, *i->value, pos, context)); + jsonObject->structuredAttrs.emplace(key, printValueAsJSON(state, true, *i->value, pos, context)); if (i->name == state.sBuilder) drv.builder = state.forceString(*i->value, context, pos, context_below); @@ -1532,23 +1532,26 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName } else { auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); - drv.env.emplace(key, s); - if (i->name == state.sBuilder) - drv.builder = std::move(s); - else if (i->name == state.sSystem) - drv.platform = std::move(s); - else if (i->name == state.sOutputHash) - outputHash = std::move(s); - else if (i->name == state.sOutputHashAlgo) - outputHashAlgo = parseHashAlgoOpt(s); - else if (i->name == state.sOutputHashMode) - handleHashMode(s); - else if (i->name == state.sOutputs) - handleOutputs(tokenizeString(s)); - else if (i->name == state.sJson) + if (i->name == state.sJson) { warn( "In derivation '%s': setting structured attributes via '__json' is deprecated, and may be disallowed in future versions of Nix. Set '__structuredAttrs = true' instead.", drvName); + drv.structuredAttrs = StructuredAttrs::parse(s); + } else { + drv.env.emplace(key, s); + if (i->name == state.sBuilder) + drv.builder = std::move(s); + else if (i->name == state.sSystem) + drv.platform = std::move(s); + else if (i->name == state.sOutputHash) + outputHash = std::move(s); + else if (i->name == state.sOutputHashAlgo) + outputHashAlgo = parseHashAlgoOpt(s); + else if (i->name == state.sOutputHashMode) + handleHashMode(s); + else if (i->name == state.sOutputs) + handleOutputs(tokenizeString(s)); + } } } @@ -1560,8 +1563,10 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName } if (jsonObject) { - drv.env.emplace("__json", jsonObject->dump()); - jsonObject.reset(); + /* The only other way `drv.structuredAttrs` can be set is when + `jsonObject` is not set. */ + assert(!drv.structuredAttrs); + drv.structuredAttrs = std::move(*jsonObject); } /* Everything in the context of the strings in the derivation diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index fbdf8ed29..37b422421 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -108,10 +108,9 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_defaults) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - EXPECT_TRUE(!parsedDrv); + EXPECT_TRUE(!got.structuredAttrs); EXPECT_EQ(options.additionalSandboxProfile, ""); EXPECT_EQ(options.noChroot, false); @@ -143,8 +142,7 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_defaults) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); }); @@ -157,8 +155,7 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_defaults) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); }); @@ -171,10 +168,9 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - EXPECT_TRUE(!parsedDrv); + EXPECT_TRUE(!got.structuredAttrs); EXPECT_EQ(options.additionalSandboxProfile, "sandcastle"); EXPECT_EQ(options.noChroot, true); @@ -195,8 +191,7 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ( options.exportReferencesGraph, @@ -245,8 +240,7 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ( options.exportReferencesGraph, @@ -298,10 +292,9 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs_d auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - EXPECT_TRUE(parsedDrv); + EXPECT_TRUE(got.structuredAttrs); EXPECT_EQ(options.additionalSandboxProfile, ""); EXPECT_EQ(options.noChroot, false); @@ -332,8 +325,7 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_defaults) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{}); }); @@ -346,8 +338,7 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs_default auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ(options.getRequiredSystemFeatures(got), StringSet{"ca-derivations"}); }); @@ -360,10 +351,9 @@ TYPED_TEST(DerivationAdvancedAttrsBothTest, advancedAttributes_structuredAttrs) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); - EXPECT_TRUE(parsedDrv); + EXPECT_TRUE(got.structuredAttrs); EXPECT_EQ(options.additionalSandboxProfile, "sandcastle"); EXPECT_EQ(options.noChroot, true); @@ -394,8 +384,7 @@ TEST_F(DerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ( options.exportReferencesGraph, @@ -448,8 +437,7 @@ TEST_F(CaDerivationAdvancedAttrsTest, advancedAttributes_structuredAttrs) auto drvPath = writeDerivation(*this->store, got, NoRepair, true); - auto parsedDrv = StructuredAttrs::tryParse(got.env); - DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, parsedDrv ? &*parsedDrv : nullptr); + DerivationOptions options = DerivationOptions::fromStructuredAttrs(got.env, got.structuredAttrs); EXPECT_EQ( options.exportReferencesGraph, diff --git a/src/libstore-tests/derivation.cc b/src/libstore-tests/derivation.cc index 7d0507a7a..812e1d01b 100644 --- a/src/libstore-tests/derivation.cc +++ b/src/libstore-tests/derivation.cc @@ -222,7 +222,7 @@ Derivation makeSimpleDrv(const Store & store) "bar", "baz", }; - drv.env = { + drv.env = StringPairs{ { "BIG_BAD", "WOLF", @@ -284,7 +284,7 @@ Derivation makeDynDepDerivation(const Store & store) "bar", "baz", }; - drv.env = { + drv.env = StringPairs{ { "BIG_BAD", "WOLF", diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index e68e60250..bc8b35462 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -32,12 +32,9 @@ DerivationBuildingGoal::DerivationBuildingGoal( { drv = std::make_unique(drv_); - if (auto parsedOpt = StructuredAttrs::tryParse(drv->env)) { - parsedDrv = std::make_unique(*parsedOpt); - } try { drvOptions = - std::make_unique(DerivationOptions::fromStructuredAttrs(drv->env, parsedDrv.get())); + std::make_unique(DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs)); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); throw; @@ -661,7 +658,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() buildMode, buildResult, *drv, - parsedDrv.get(), *drvOptions, inputPaths, initialOutputs, diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 55111e378..b2faa7a6e 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -63,9 +63,8 @@ Goal::Co DerivationGoal::haveDerivation() trace("have derivation"); auto drvOptions = [&]() -> DerivationOptions { - auto parsedOpt = StructuredAttrs::tryParse(drv->env); try { - return DerivationOptions::fromStructuredAttrs(drv->env, parsedOpt ? &*parsedOpt : nullptr); + return DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", worker.store.printStorePath(drvPath)); throw; diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 07212289e..f1515c308 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -92,6 +92,12 @@ using OutputChecks = DerivationOptions::OutputChecks; using OutputChecksVariant = std::variant>; +DerivationOptions DerivationOptions::fromStructuredAttrs( + const StringMap & env, const std::optional & parsed, bool shouldWarn) +{ + return fromStructuredAttrs(env, parsed ? &*parsed : nullptr); +} + DerivationOptions DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAttrs * parsed, bool shouldWarn) { diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 279713c71..30048c522 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -452,6 +452,7 @@ Derivation parseDerivation( expect(str, ")"); drv.env.insert_or_assign(std::move(name), std::move(value)); } + drv.structuredAttrs = StructuredAttrs::tryExtract(drv.env); expect(str, ")"); return drv; @@ -685,16 +686,28 @@ std::string Derivation::unparse( s += ",["; first = true; - for (auto & i : env) { - if (first) - first = false; - else + + auto unparseEnv = [&](const StringPairs atermEnv) { + for (auto & i : atermEnv) { + if (first) + first = false; + else + s += ','; + s += '('; + printString(s, i.first); s += ','; - s += '('; - printString(s, i.first); - s += ','; - printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); - s += ')'; + printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); + s += ')'; + } + }; + + StructuredAttrs::checkKeyNotInUse(env); + if (structuredAttrs) { + StringPairs scratch = env; + scratch.insert(structuredAttrs->unparse()); + unparseEnv(scratch); + } else { + unparseEnv(env); } s += "])"; @@ -948,6 +961,7 @@ Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivati auto value = readString(in); drv.env[key] = value; } + drv.structuredAttrs = StructuredAttrs::tryExtract(drv.env); return in; } @@ -983,9 +997,21 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva } CommonProto::write(store, CommonProto::WriteConn{.to = out}, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; - out << drv.env.size(); - for (auto & i : drv.env) - out << i.first << i.second; + + auto writeEnv = [&](const StringPairs atermEnv) { + out << atermEnv.size(); + for (auto & [k, v] : atermEnv) + out << k << v; + }; + + StructuredAttrs::checkKeyNotInUse(drv.env); + if (drv.structuredAttrs) { + StringPairs scratch = drv.env; + scratch.insert(drv.structuredAttrs->unparse()); + writeEnv(scratch); + } else { + writeEnv(drv.env); + } } std::string hashPlaceholder(const OutputNameView outputName) @@ -1017,6 +1043,17 @@ void BasicDerivation::applyRewrites(const StringMap & rewrites) newEnv.emplace(envName, envValue); } env = std::move(newEnv); + + if (structuredAttrs) { + // TODO rewrite the JSON AST properly, rather than dump parse round trip. + auto [k, jsonS] = structuredAttrs->unparse(); + jsonS = rewriteStrings(jsonS, rewrites); + StringPairs newEnv; + newEnv.insert(std::pair{k, std::move(jsonS)}); + auto newStructuredAttrs = StructuredAttrs::tryExtract(newEnv); + assert(newStructuredAttrs); + structuredAttrs = std::move(*newStructuredAttrs); + } } static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) @@ -1338,10 +1375,8 @@ nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const res["args"] = args; res["env"] = env; - if (auto it = env.find("__json"); it != env.end()) { - res["env"].erase("__json"); - res["structuredAttrs"] = nlohmann::json::parse(it->second); - } + if (structuredAttrs) + res["structuredAttrs"] = structuredAttrs->structuredAttrs; return res; } @@ -1411,7 +1446,7 @@ Derivation Derivation::fromJSON( } if (auto structuredAttrs = get(json, "structuredAttrs")) - res.env.insert_or_assign("__json", structuredAttrs->dump()); + res.structuredAttrs = StructuredAttrs{*structuredAttrs}; return res; } diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 66a934d49..029288998 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -1,8 +1,8 @@ #pragma once ///@file -#include "nix/store/parsed-derivations.hh" #include "nix/store/derivations.hh" +#include "nix/store/parsed-derivations.hh" #include "nix/store/derivation-options.hh" #include "nix/store/build/derivation-building-misc.hh" #include "nix/store/outputs-spec.hh" @@ -39,7 +39,6 @@ struct DerivationBuildingGoal : public Goal */ std::unique_ptr drv; - std::unique_ptr parsedDrv; std::unique_ptr drvOptions; /** diff --git a/src/libstore/include/nix/store/derivation-options.hh b/src/libstore/include/nix/store/derivation-options.hh index ff3693366..98517e904 100644 --- a/src/libstore/include/nix/store/derivation-options.hh +++ b/src/libstore/include/nix/store/derivation-options.hh @@ -176,6 +176,9 @@ struct DerivationOptions static DerivationOptions fromStructuredAttrs(const StringMap & env, const StructuredAttrs * parsed, bool shouldWarn = true); + static DerivationOptions + fromStructuredAttrs(const StringMap & env, const std::optional & parsed, bool shouldWarn = true); + /** * @param drv Must be the same derivation we parsed this from. In * the future we'll flip things around so a `BasicDerivation` has diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index 41cd179f4..18479b425 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -7,6 +7,7 @@ #include "nix/store/content-address.hh" #include "nix/util/repair-flag.hh" #include "nix/store/derived-path-map.hh" +#include "nix/store/parsed-derivations.hh" #include "nix/util/sync.hh" #include "nix/util/variant-wrapper.hh" @@ -286,7 +287,12 @@ struct BasicDerivation std::string platform; Path builder; Strings args; + /** + * Must not contain the key `__json`, at least in order to serialize to A-Term. + */ StringPairs env; + std::optional structuredAttrs; + std::string name; BasicDerivation() = default; diff --git a/src/libstore/include/nix/store/parsed-derivations.hh b/src/libstore/include/nix/store/parsed-derivations.hh index ecc2f7e61..2638a3500 100644 --- a/src/libstore/include/nix/store/parsed-derivations.hh +++ b/src/libstore/include/nix/store/parsed-derivations.hh @@ -18,7 +18,30 @@ struct StructuredAttrs { nlohmann::json structuredAttrs; - static std::optional tryParse(const StringPairs & env); + bool operator==(const StructuredAttrs &) const = default; + + /** + * Unconditionally parse from a JSON string. Used by `tryExtract`. + */ + static StructuredAttrs parse(const std::string & encoded); + + /** + * Like `tryParse`, but removes the env var which encoded the structured + * attrs from the map if one is found. + */ + static std::optional tryExtract(StringPairs & env); + + /** + * Opposite of `tryParse`, at least if one makes a map from this + * single key-value PR. + */ + std::pair unparse() const; + + /** + * Ensures that the structured attrs "env var" is not in used, so we + * are free to use it instead. + */ + static void checkKeyNotInUse(const StringPairs & env); nlohmann::json prepareStructuredAttrs( Store & store, diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 7492204ce..c794f8d06 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -224,13 +224,11 @@ MissingPaths Store::queryMissing(const std::vector & targets) return; auto drv = make_ref(derivationFromPath(drvPath)); - auto parsedDrv = StructuredAttrs::tryParse(drv->env); DerivationOptions drvOptions; try { // FIXME: this is a lot of work just to get the value // of `allowSubstitutes`. - drvOptions = - DerivationOptions::fromStructuredAttrs(drv->env, parsedDrv ? &*parsedDrv : nullptr); + drvOptions = DerivationOptions::fromStructuredAttrs(drv->env, drv->structuredAttrs); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", printStorePath(drvPath)); throw; diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 5c6deb87a..797230e97 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -8,20 +8,41 @@ namespace nix { -std::optional StructuredAttrs::tryParse(const StringPairs & env) +static constexpr std::string_view envVarName = "__json"; + +StructuredAttrs StructuredAttrs::parse(const std::string & encoded) +{ + try { + return StructuredAttrs{ + .structuredAttrs = nlohmann::json::parse(encoded), + }; + } catch (std::exception & e) { + throw Error("cannot process __json attribute: %s", e.what()); + } +} + +std::optional StructuredAttrs::tryExtract(StringPairs & env) { /* Parse the __json attribute, if any. */ - auto jsonAttr = env.find("__json"); + auto jsonAttr = env.find(envVarName); if (jsonAttr != env.end()) { - try { - return StructuredAttrs{ - .structuredAttrs = nlohmann::json::parse(jsonAttr->second), - }; - } catch (std::exception & e) { - throw Error("cannot process __json attribute: %s", e.what()); - } - } - return {}; + auto encoded = std::move(jsonAttr->second); + env.erase(jsonAttr); + return parse(encoded); + } else + return {}; +} + +std::pair StructuredAttrs::unparse() const +{ + return {envVarName, structuredAttrs.dump()}; +} + +void StructuredAttrs::checkKeyNotInUse(const StringPairs & env) +{ + if (env.count(envVarName)) + throw Error( + "Cannot have an environment variable named '__json'. This key is reserved for encoding structured attrs"); } static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*"); @@ -175,4 +196,5 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) return jsonSh; } + } // namespace nix diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index d598e51d9..7bb4f3177 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -809,7 +809,7 @@ void DerivationBuilderImpl::startBuilder() writeStructuredAttrs(); /* Handle exportReferencesGraph(), if set. */ - if (!parsedDrv) { + if (!drv.structuredAttrs) { for (auto & [fileName, ss] : drvOptions.exportReferencesGraph) { StorePathSet storePathSet; for (auto & storePathS : ss) { @@ -1081,7 +1081,7 @@ void DerivationBuilderImpl::initEnv() /* In non-structured mode, set all bindings either directory in the environment or via a file, as specified by `DerivationOptions::passAsFile`. */ - if (!parsedDrv) { + if (!drv.structuredAttrs) { for (auto & i : drv.env) { if (drvOptions.passAsFile.find(i.first) == drvOptions.passAsFile.end()) { env[i.first] = i.second; @@ -1149,8 +1149,8 @@ void DerivationBuilderImpl::initEnv() void DerivationBuilderImpl::writeStructuredAttrs() { - if (parsedDrv) { - auto json = parsedDrv->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); + if (drv.structuredAttrs) { + auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); nlohmann::json rewritten; for (auto & [i, v] : json["outputs"].get()) { /* The placeholder must have a rewrite, so we use it to cover both the diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/unix/include/nix/store/build/derivation-builder.hh index eecad3daa..465b45197 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/unix/include/nix/store/build/derivation-builder.hh @@ -27,15 +27,6 @@ struct DerivationBuilderParams */ const Derivation & drv; - /** - * The "structured attrs" of `drv`, if it has them. - * - * @todo this should be part of `Derivation`. - * - * @todo this should be renamed from `parsedDrv`. - */ - const StructuredAttrs * parsedDrv; - /** * The derivation options of `drv`. * @@ -63,14 +54,12 @@ struct DerivationBuilderParams const BuildMode & buildMode, BuildResult & buildResult, const Derivation & drv, - const StructuredAttrs * parsedDrv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths, std::map & initialOutputs) : drvPath{drvPath} , buildResult{buildResult} , drv{drv} - , parsedDrv{parsedDrv} , drvOptions{drvOptions} , inputPaths{inputPaths} , initialOutputs{initialOutputs} diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index d61a2f282..d3902f2a6 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -555,10 +555,9 @@ static void main_nix_build(int argc, char ** argv) env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); - auto parsedDrv = StructuredAttrs::tryParse(drv.env); DerivationOptions drvOptions; try { - drvOptions = DerivationOptions::fromStructuredAttrs(drv.env, parsedDrv ? &*parsedDrv : nullptr); + drvOptions = DerivationOptions::fromStructuredAttrs(drv.env, drv.structuredAttrs); } catch (Error & e) { e.addTrace({}, "while parsing derivation '%s'", store->printStorePath(packageInfo.requireDrvPath())); throw; @@ -577,7 +576,7 @@ static void main_nix_build(int argc, char ** argv) std::string structuredAttrsRC; - if (parsedDrv) { + if (drv.structuredAttrs) { StorePathSet inputs; std::function::ChildNode &)> accumInputClosure; @@ -596,7 +595,7 @@ static void main_nix_build(int argc, char ** argv) for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) accumInputClosure(inputDrv, inputNode); - auto json = parsedDrv->prepareStructuredAttrs(*store, drvOptions, inputs, drv.outputs); + auto json = drv.structuredAttrs->prepareStructuredAttrs(*store, drvOptions, inputs, drv.outputs); structuredAttrsRC = StructuredAttrs::writeShell(json); From 6e8f76482ec7db6d9503a4f951ee0df1bc21da4f Mon Sep 17 00:00:00 2001 From: Oleksandr Knyshuk Date: Fri, 25 Jul 2025 16:57:39 +0200 Subject: [PATCH 095/382] Add functional test for no-url-literals experimental feature Closes: #13533 --- tests/functional/meson.build | 1 + tests/functional/no-url-literals.sh | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 tests/functional/no-url-literals.sh diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 8f2b1ff59..d2989ee84 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -113,6 +113,7 @@ suites = [ 'pure-eval.sh', 'eval.sh', 'short-path-literals.sh', + 'no-url-literals.sh', 'repl.sh', 'binary-cache-build-remote.sh', 'search.sh', diff --git a/tests/functional/no-url-literals.sh b/tests/functional/no-url-literals.sh new file mode 100644 index 000000000..fbc6e1cec --- /dev/null +++ b/tests/functional/no-url-literals.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +source common.sh + +clearStoreIfPossible + +# Test 1: By default, unquoted URLs are accepted +nix eval --expr 'http://example.com' 2>&1 | grepQuietInverse "error: URL literals are disabled" + +# Test 2: With the experimental feature enabled, unquoted URLs are rejected +expect 1 nix eval --extra-experimental-features 'no-url-literals' --expr 'http://example.com' 2>&1 | grepQuiet "error: URL literals are disabled" + +# Test 3: Quoted URLs are always accepted +nix eval --extra-experimental-features 'no-url-literals' --expr '"http://example.com"' 2>&1 | grepQuietInverse "error: URL literals are disabled" + +# Test 4: URLs with parameters (which must be quoted) are accepted +nix eval --extra-experimental-features 'no-url-literals' --expr '"http://example.com?foo=bar"' 2>&1 | grepQuietInverse "error: URL literals are disabled" + +# Test 5: The feature can be enabled via NIX_CONFIG +expect 1 env NIX_CONFIG='extra-experimental-features = no-url-literals' nix eval --expr 'http://example.com' 2>&1 | grepQuiet "error: URL literals are disabled" + +# Test 6: The feature can be enabled via CLI even if not set in config +expect 1 env NIX_CONFIG='' nix eval --extra-experimental-features 'no-url-literals' --expr 'http://example.com' 2>&1 | grepQuiet "error: URL literals are disabled" + +# Test 7: Evaluation still works for quoted URLs +nix eval --raw --extra-experimental-features no-url-literals --expr '"http://example.com"' | grepQuiet "^http://example.com$" + +echo "no-url-literals test passed!" From 5db50e3f777888b18336e9dcc065ef0e9057c6cf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 3 Aug 2025 00:07:03 +0300 Subject: [PATCH 096/382] meson: Disable PCH for GCC GCC doesn't really benefit as much as Clang does from using precompiled headers. Another aspect to consider is that clangd doesn't really like GCC's PCH flags in the compilation database, so GCC based devshells would continue to work with clangd. This also has the slight advantage of ensuring that our includes are in order, since we build with both Clang and GCC. --- nix-meson-build-support/common/meson.build | 3 +++ src/libcmd/meson.build | 2 +- src/libexpr/meson.build | 2 +- src/libstore/meson.build | 2 +- src/libutil/meson.build | 2 +- src/nix/meson.build | 2 +- 6 files changed, 8 insertions(+), 5 deletions(-) diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index df8b958c0..bb57ca941 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -19,6 +19,9 @@ add_project_arguments( language : 'cpp', ) +# GCC doesn't benefit much from precompiled headers. +do_pch = cxx.get_id() == 'clang' + # This is a clang-only option for improving build times. # It forces the instantiation of templates in the PCH itself and # not every translation unit it's included in. diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 5c3dd91ee..0cb41b46f 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -92,7 +92,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : ['pch/precompiled-headers.hh'] + cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] ) install_headers(headers, subdir : 'nix/cmd', preserve_path : true) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index fe795a607..9c8569293 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -178,7 +178,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : ['pch/precompiled-headers.hh'] + cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] ) install_headers(headers, subdir : 'nix/expr', preserve_path : true) diff --git a/src/libstore/meson.build b/src/libstore/meson.build index c243f19c9..708188178 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -350,7 +350,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : ['pch/precompiled-headers.hh'] + cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] ) install_headers(headers, subdir : 'nix/store', preserve_path : true) diff --git a/src/libutil/meson.build b/src/libutil/meson.build index ced9c424d..881a8674c 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -191,7 +191,7 @@ this_library = library( link_args: linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : 'pch/precompiled-headers.hh' + cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] ) install_headers(headers, subdir : 'nix/util', preserve_path : true) diff --git a/src/nix/meson.build b/src/nix/meson.build index 586ee15c3..23b876690 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -186,7 +186,7 @@ this_exe = executable( include_directories : include_dirs, link_args: linker_export_flags, install : true, - cpp_pch : ['pch/precompiled-headers.hh'] + cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] ) meson.override_find_program('nix', this_exe) From b8301b2c25eeaf9f579a0413f4cb01017e8f636d Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 3 Aug 2025 00:21:04 +0300 Subject: [PATCH 097/382] libstore-tests: Add nix-store-benchmarks as a meson benchmark --- src/libstore-tests/meson.build | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 89189bab9..87f268828 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -120,4 +120,6 @@ if get_option('benchmarks') install : false, cpp_args : ['-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"'], ) + + benchmark('nix-store-benchmarks', benchmark_exe) endif From b2b2f2dc53aaaca024b21811fbaef06df4507a85 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 3 Aug 2025 00:53:24 +0300 Subject: [PATCH 098/382] libstore-tests: Split bench-main into a separate file This makes it easier to add new benchmarks. --- src/libstore-tests/bench-main.cc | 14 ++++++++++++++ src/libstore-tests/derivation-parser-bench.cc | 13 ------------- src/libstore-tests/meson.build | 7 ++++++- 3 files changed, 20 insertions(+), 14 deletions(-) create mode 100644 src/libstore-tests/bench-main.cc diff --git a/src/libstore-tests/bench-main.cc b/src/libstore-tests/bench-main.cc new file mode 100644 index 000000000..37a72d8ab --- /dev/null +++ b/src/libstore-tests/bench-main.cc @@ -0,0 +1,14 @@ +#include +#include "nix/store/globals.hh" + +// Custom main to initialize Nix before running benchmarks +int main(int argc, char ** argv) +{ + // Initialize libstore + nix::initLibStore(false); + + // Initialize and run benchmarks + ::benchmark::Initialize(&argc, argv); + ::benchmark::RunSpecifiedBenchmarks(); + return 0; +} diff --git a/src/libstore-tests/derivation-parser-bench.cc b/src/libstore-tests/derivation-parser-bench.cc index 7598758f0..d6c1d4551 100644 --- a/src/libstore-tests/derivation-parser-bench.cc +++ b/src/libstore-tests/derivation-parser-bench.cc @@ -3,7 +3,6 @@ #include "nix/store/store-api.hh" #include "nix/util/experimental-features.hh" #include "nix/store/store-open.hh" -#include "nix/store/globals.hh" #include #include @@ -31,15 +30,3 @@ static void BM_ParseRealDerivationFile(benchmark::State & state, const std::stri // Register benchmarks for actual test derivation files if they exist BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, hello, std::string(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, firefox, std::string(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); - -// Custom main to initialize Nix before running benchmarks -int main(int argc, char ** argv) -{ - // Initialize libstore - nix::initLibStore(false); - - // Initialize and run benchmarks - ::benchmark::Initialize(&argc, argv); - ::benchmark::RunSpecifiedBenchmarks(); - return 0; -} diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 87f268828..606b8f694 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -110,9 +110,14 @@ test( if get_option('benchmarks') gbenchmark = dependency('benchmark', required : true) + benchmark_sources = files( + 'bench-main.cc', + 'derivation-parser-bench.cc', + ) + benchmark_exe = executable( 'nix-store-benchmarks', - 'derivation-parser-bench.cc', + benchmark_sources, config_priv_h, dependencies : deps_private_subproject + deps_private + deps_other + [gbenchmark], include_directories : include_dirs, From ea1f67393d26bdb02053fd914815caefa2e9e96a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 3 Aug 2025 01:01:26 +0300 Subject: [PATCH 099/382] libstore-tests: Build and run benchmarks in CI This changes our GHA CI and nix-store-tests packaging to build and run the benchmarks. This does not affect the default packaging - the overrides apply only for the GHA CI. --- ci/gha/tests/default.nix | 4 ++- src/libstore-tests/derivation-parser-bench.cc | 11 ++++-- src/libstore-tests/meson.build | 2 +- src/libstore-tests/package.nix | 36 +++++++++++++------ 4 files changed, 38 insertions(+), 15 deletions(-) diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index 39d1502df..ce44d7cf7 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -59,7 +59,9 @@ in inherit getStdenv; }).overrideScope ( - _: _: { + final: prev: { + nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; + mesonComponentOverrides = finalAttrs: prevAttrs: { mesonFlags = (prevAttrs.mesonFlags or [ ]) diff --git a/src/libstore-tests/derivation-parser-bench.cc b/src/libstore-tests/derivation-parser-bench.cc index d6c1d4551..ef698b205 100644 --- a/src/libstore-tests/derivation-parser-bench.cc +++ b/src/libstore-tests/derivation-parser-bench.cc @@ -2,6 +2,7 @@ #include "nix/store/derivations.hh" #include "nix/store/store-api.hh" #include "nix/util/experimental-features.hh" +#include "nix/util/environment-variables.hh" #include "nix/store/store-open.hh" #include #include @@ -28,5 +29,11 @@ static void BM_ParseRealDerivationFile(benchmark::State & state, const std::stri } // Register benchmarks for actual test derivation files if they exist -BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, hello, std::string(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); -BENCHMARK_CAPTURE(BM_ParseRealDerivationFile, firefox, std::string(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); +BENCHMARK_CAPTURE( + BM_ParseRealDerivationFile, + hello, + getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); +BENCHMARK_CAPTURE( + BM_ParseRealDerivationFile, + firefox, + getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 606b8f694..dfb936fef 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -122,7 +122,7 @@ if get_option('benchmarks') dependencies : deps_private_subproject + deps_private + deps_other + [gbenchmark], include_directories : include_dirs, link_args: linker_export_flags, - install : false, + install : true, cpp_args : ['-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"'], ) diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 93c71a382..00d40365e 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -12,12 +12,14 @@ rapidcheck, gtest, + gbenchmark, runCommand, # Configuration Options version, filesetToSource, + withBenchmarks ? false, }: let @@ -41,11 +43,15 @@ mkMesonExecutable (finalAttrs: { ]; # Hack for sake of the dev shell - passthru.externalBuildInputs = [ - sqlite - rapidcheck - gtest - ]; + passthru.externalBuildInputs = + [ + sqlite + rapidcheck + gtest + ] + ++ lib.optionals withBenchmarks [ + gbenchmark + ]; buildInputs = finalAttrs.passthru.externalBuildInputs ++ [ nix-store @@ -54,6 +60,7 @@ mkMesonExecutable (finalAttrs: { ]; mesonFlags = [ + (lib.mesonBool "benchmarks" withBenchmarks) ]; passthru = { @@ -75,12 +82,19 @@ mkMesonExecutable (finalAttrs: { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; buildInputs = [ writableTmpDirAsHomeHook ]; } - ('' - export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} - export NIX_REMOTE=$HOME/store - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - ''); + ( + '' + export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} + export NIX_REMOTE=$HOME/store + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + '' + + lib.optionalString withBenchmarks '' + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe' finalAttrs.finalPackage "nix-store-benchmarks"} + '' + + '' + touch $out + '' + ); }; }; From 4bfc007a7b2bdf5b1f85f0692c50d840e925809e Mon Sep 17 00:00:00 2001 From: "Alexander V. Nikolaev" Date: Sun, 3 Aug 2025 18:03:47 +0300 Subject: [PATCH 100/382] Improve base-32 hash decoding performance with reverse map The changes include: * Defining nix32Chars as a constexpr char[]. * Adding a constexpr std::array (reverseNix32Map) to map characters to their base-32 digit values at compile time. * Replacing the slow character search loop with a direct lookup using reverseNix32Map. * Removing std::once_flag/isBase32 logic in references.cc in favor of reverseNix32Map Signed-off-by: Alexander V. Nikolaev --- src/libutil/hash.cc | 25 ++++++++++++++++++------- src/libutil/include/nix/util/hash.hh | 2 +- src/libutil/references.cc | 11 +---------- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 38ef5dd90..a9603b6fc 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -72,7 +72,19 @@ static std::string printHash16(const Hash & hash) } // omitted: E O U T -const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; +constexpr char nix32Chars[] = "0123456789abcdfghijklmnpqrsvwxyz"; + +constexpr const std::array reverseNix32Map = [] { + std::array map{}; + + for (size_t i = 0; i < map.size(); ++i) + map[i] = 0xFF; // invalid + + for (unsigned char i = 0; i < 32; ++i) + map[static_cast(nix32Chars[i])] = i; + + return map; +}(); static std::string printHash32(const Hash & hash) { @@ -217,12 +229,11 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) for (unsigned int n = 0; n < rest.size(); ++n) { char c = rest[rest.size() - n - 1]; - unsigned char digit; - for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */ - if (nix32Chars[digit] == c) - break; - if (digit >= 32) - throw BadHash("invalid base-32 hash '%s'", rest); + unsigned char digit = reverseNix32Map[static_cast(c)]; + + if (digit == 0xFF) + throw BadHash("invalid base-32 hash: '%s'", rest); + unsigned int b = n * 5; unsigned int i = b / 8; unsigned int j = b % 8; diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index daacd7adf..9661fd489 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -35,7 +35,7 @@ constexpr inline size_t regularHashSize(HashAlgorithm type) extern const StringSet hashAlgorithms; -extern const std::string nix32Chars; +extern const std::array reverseNix32Map; /** * @brief Enumeration representing the hash formats. diff --git a/src/libutil/references.cc b/src/libutil/references.cc index cd8a46754..6b88da0c6 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -13,20 +13,11 @@ static size_t refLength = 32; /* characters */ static void search(std::string_view s, StringSet & hashes, StringSet & seen) { - static std::once_flag initialised; - static bool isBase32[256]; - std::call_once(initialised, []() { - for (unsigned int i = 0; i < 256; ++i) - isBase32[i] = false; - for (unsigned int i = 0; i < nix32Chars.size(); ++i) - isBase32[(unsigned char) nix32Chars[i]] = true; - }); - for (size_t i = 0; i + refLength <= s.size();) { int j; bool match = true; for (j = refLength - 1; j >= 0; --j) - if (!isBase32[(unsigned char) s[i + j]]) { + if (reverseNix32Map[(unsigned char) s[i + j]] == 0xFF) { i += j + 1; match = false; break; From 20df0e5fa7820bfc6cafaac39ca29f33542fe146 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 4 Aug 2025 02:11:14 +0300 Subject: [PATCH 101/382] libstore-tests: Add reference scanning benchmark This benchmark should provide a relatively realistic scenario for reference scanning. As shown by the following results, reference scanning code is already plenty fast and is definitely not a bottleneck: ``` BM_RefScanSinkRandom/10000 1672 ns 1682 ns 413354 bytes_per_second=5.53691Gi/s BM_RefScanSinkRandom/100000 11217 ns 11124 ns 64341 bytes_per_second=8.37231Gi/s BM_RefScanSinkRandom/1000000 205745 ns 204421 ns 3360 bytes_per_second=4.55591Gi/s BM_RefScanSinkRandom/5000000 1208407 ns 1201046 ns 597 bytes_per_second=3.87713Gi/s BM_RefScanSinkRandom/10000000 2534397 ns 2523344 ns 273 bytes_per_second=3.69083Gi/s ``` (Measurements on Ryzen 5900X via `nix build --file ci/gha/tests componentTests.nix-store-tests-run -L`) --- src/libstore-tests/meson.build | 1 + src/libstore-tests/ref-scan-bench.cc | 92 ++++++++++++++++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 src/libstore-tests/ref-scan-bench.cc diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index dfb936fef..bba991388 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -113,6 +113,7 @@ if get_option('benchmarks') benchmark_sources = files( 'bench-main.cc', 'derivation-parser-bench.cc', + 'ref-scan-bench.cc', ) benchmark_exe = executable( diff --git a/src/libstore-tests/ref-scan-bench.cc b/src/libstore-tests/ref-scan-bench.cc new file mode 100644 index 000000000..8219c05bf --- /dev/null +++ b/src/libstore-tests/ref-scan-bench.cc @@ -0,0 +1,92 @@ +#include "nix/util/references.hh" +#include "nix/store/path.hh" + +#include + +#include + +using namespace nix; + +template +static void randomReference(std::mt19937 & urng, OIt outIter) +{ + auto dist = std::uniform_int_distribution(0, nix32Chars.size() - 1); + dist(urng); + std::generate_n(outIter, StorePath::HashLen, [&]() { return nix32Chars[dist(urng)]; }); +} + +/** + * Generate a random byte sequence with interleaved + * + * @param charWeight relative frequency of a byte not belonging to a reference (hash part of the store path) + */ +static std::string +randomBytesWithReferences(std::mt19937 & urng, std::size_t size, double charWeight, StringSet & hashes) +{ + std::string res; + res.reserve(size); + + /* NOTE: std::uniform_int_distribution isn't guaranteed to be implemented for char. */ + auto charGen = [&, + charDist = std::uniform_int_distribution{ + std::numeric_limits::min(), + std::numeric_limits::max(), + }]() mutable { res.push_back(charDist(urng)); }; + + auto refGen = [&]() { + std::string ref; + randomReference(urng, std::back_inserter(ref)); + hashes.insert(ref); + res += ref; + }; + + std::discrete_distribution genDist{1.0, StorePath::HashLen * charWeight}; + + while (res.size() < size) { + auto c = genDist(urng); + if (c == 0) + refGen(); + else + charGen(); + } + + res.resize(size); + return res; +} + +// Benchmark reference scanning +static void BM_RefScanSinkRandom(benchmark::State & state) +{ + auto size = state.range(); + auto chunkSize = 4199; + + std::mt19937 urng(0); + StringSet hashes; + auto bytes = randomBytesWithReferences(urng, size, /*charWeight=*/100.0, hashes); + assert(hashes.size() > 0); + + std::size_t processed = 0; + + for (auto _ : state) { + state.PauseTiming(); + RefScanSink Sink{StringSet(hashes)}; + state.ResumeTiming(); + + auto data = std::string_view(bytes); + while (!data.empty()) { + auto chunk = data.substr(0, std::min(chunkSize, data.size())); + data = data.substr(chunk.size()); + Sink(chunk); + processed += chunk.size(); + } + + benchmark::DoNotOptimize(Sink.getResult()); + state.PauseTiming(); + assert(Sink.getResult() == hashes); + state.ResumeTiming(); + } + + state.SetBytesProcessed(processed); +} + +BENCHMARK(BM_RefScanSinkRandom)->Arg(10'000)->Arg(100'000)->Arg(1'000'000)->Arg(5'000'000)->Arg(10'000'000); From 0a96b7e62713a53eb9f4bd4c67a78c52bd3b42c7 Mon Sep 17 00:00:00 2001 From: WxNzEMof <143541718+WxNzEMof@users.noreply.github.com> Date: Mon, 4 Aug 2025 17:00:38 +0000 Subject: [PATCH 102/382] Recognize "identity" compression method Some binary caches (incorrectly) use this header to indicate lack of compression, inspired by the valid "identity" token in the "Accept-Encoding" header. --- src/libutil/compression.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 0e38620d4..78219b7bc 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -215,7 +215,7 @@ std::string decompress(const std::string & method, std::string_view in) std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink) { - if (method == "none" || method == "") + if (method == "none" || method == "" || method == "identity") return std::make_unique(nextSink); else if (method == "br") return std::make_unique(nextSink); From 23c87d8a210ec3ffe29a14d7ceb3f4e87ab4fa90 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 4 Aug 2025 14:31:14 -0400 Subject: [PATCH 103/382] Encapsulate `invalidBase32`, avoid 0xFF magic number This keeps things fast by making the function inline, but also prevents people from having to know about the `0xFF` implementation detail directly, instead making one go through a `std::optional` (which could be fused away with a sufficiently smart compiler). Additionally, the base "nix32" implementation is moved to its own header file pair, as it is logically distinct and prior to the `Hash` data type. It would probably be nice to do this with all the hash format implementations. --- src/libstore-tests/ref-scan-bench.cc | 5 ++- src/libutil/base-nix-32.cc | 42 +++++++++++++++++++ src/libutil/hash.cc | 38 +++-------------- src/libutil/include/nix/util/base-nix-32.hh | 45 +++++++++++++++++++++ src/libutil/include/nix/util/hash.hh | 4 +- src/libutil/include/nix/util/meson.build | 1 + src/libutil/meson.build | 1 + src/libutil/references.cc | 3 +- 8 files changed, 101 insertions(+), 38 deletions(-) create mode 100644 src/libutil/base-nix-32.cc create mode 100644 src/libutil/include/nix/util/base-nix-32.hh diff --git a/src/libstore-tests/ref-scan-bench.cc b/src/libstore-tests/ref-scan-bench.cc index 8219c05bf..011d53aec 100644 --- a/src/libstore-tests/ref-scan-bench.cc +++ b/src/libstore-tests/ref-scan-bench.cc @@ -1,5 +1,6 @@ #include "nix/util/references.hh" #include "nix/store/path.hh" +#include "nix/util/base-nix-32.hh" #include @@ -10,9 +11,9 @@ using namespace nix; template static void randomReference(std::mt19937 & urng, OIt outIter) { - auto dist = std::uniform_int_distribution(0, nix32Chars.size() - 1); + auto dist = std::uniform_int_distribution(0, BaseNix32::characters.size() - 1); dist(urng); - std::generate_n(outIter, StorePath::HashLen, [&]() { return nix32Chars[dist(urng)]; }); + std::generate_n(outIter, StorePath::HashLen, [&]() { return BaseNix32::characters[dist(urng)]; }); } /** diff --git a/src/libutil/base-nix-32.cc b/src/libutil/base-nix-32.cc new file mode 100644 index 000000000..dec5cd7d7 --- /dev/null +++ b/src/libutil/base-nix-32.cc @@ -0,0 +1,42 @@ +#include + +#include "nix/util/base-nix-32.hh" + +namespace nix { + +constexpr const std::array BaseNix32::reverseMap = [] { + std::array map{}; + + for (size_t i = 0; i < map.size(); ++i) + map[i] = invalid; // invalid + + for (unsigned char i = 0; i < 32; ++i) + map[static_cast(characters[i])] = i; + + return map; +}(); + +std::string BaseNix32::encode(std::span originalData) +{ + if (originalData.size() == 0) + return {}; + + size_t len = encodedLength(originalData.size()); + assert(len); + + std::string s; + s.reserve(len); + + for (int n = (int) len - 1; n >= 0; n--) { + unsigned int b = n * 5; + unsigned int i = b / 8; + unsigned int j = b % 8; + unsigned char c = + (originalData.data()[i] >> j) | (i >= originalData.size() - 1 ? 0 : originalData.data()[i + 1] << (8 - j)); + s.push_back(characters[c & 0x1f]); + } + + return s; +} + +} // namespace nix diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index a9603b6fc..a5ac9b654 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -11,6 +11,7 @@ #include "nix/util/archive.hh" #include "nix/util/configuration.hh" #include "nix/util/split.hh" +#include "nix/util/base-nix-32.hh" #include #include @@ -71,39 +72,10 @@ static std::string printHash16(const Hash & hash) return buf; } -// omitted: E O U T -constexpr char nix32Chars[] = "0123456789abcdfghijklmnpqrsvwxyz"; - -constexpr const std::array reverseNix32Map = [] { - std::array map{}; - - for (size_t i = 0; i < map.size(); ++i) - map[i] = 0xFF; // invalid - - for (unsigned char i = 0; i < 32; ++i) - map[static_cast(nix32Chars[i])] = i; - - return map; -}(); - static std::string printHash32(const Hash & hash) { assert(hash.hashSize); - size_t len = hash.base32Len(); - assert(len); - - std::string s; - s.reserve(len); - - for (int n = (int) len - 1; n >= 0; n--) { - unsigned int b = n * 5; - unsigned int i = b / 8; - unsigned int j = b % 8; - unsigned char c = (hash.hash[i] >> j) | (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j)); - s.push_back(nix32Chars[c & 0x1f]); - } - - return s; + return BaseNix32::encode({&hash.hash[0], hash.hashSize}); } std::string printHash16or32(const Hash & hash) @@ -229,11 +201,13 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) for (unsigned int n = 0; n < rest.size(); ++n) { char c = rest[rest.size() - n - 1]; - unsigned char digit = reverseNix32Map[static_cast(c)]; + auto digit_opt = BaseNix32::lookupReverse(c); - if (digit == 0xFF) + if (!digit_opt) throw BadHash("invalid base-32 hash: '%s'", rest); + uint8_t digit = std::move(*digit_opt); + unsigned int b = n * 5; unsigned int i = b / 8; unsigned int j = b % 8; diff --git a/src/libutil/include/nix/util/base-nix-32.hh b/src/libutil/include/nix/util/base-nix-32.hh new file mode 100644 index 000000000..37b23a2bb --- /dev/null +++ b/src/libutil/include/nix/util/base-nix-32.hh @@ -0,0 +1,45 @@ +#pragma once +///@file + +#include +#include +#include +#include +#include + +namespace nix { + +struct BaseNix32 +{ + /// omitted: E O U T + constexpr static std::array characters = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', + 'b', 'c', 'd', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', + 'n', 'p', 'q', 'r', 's', 'v', 'w', 'x', 'y', 'z'}; + +private: + static const std::array reverseMap; + + const static constexpr uint8_t invalid = 0xFF; + +public: + static inline std::optional lookupReverse(char base32) + { + uint8_t digit = reverseMap[static_cast(base32)]; + if (digit == invalid) + return std::nullopt; + else + return digit; + } + + /** + * Returns the length of a base-32 representation of this hash. + */ + static size_t encodedLength(size_t originalLength) + { + return (originalLength * 8 - 1) / 5 + 1; + } + + static std::string encode(std::span originalData); +}; + +} // namespace nix diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index 9661fd489..48f1b0c3c 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -35,8 +35,6 @@ constexpr inline size_t regularHashSize(HashAlgorithm type) extern const StringSet hashAlgorithms; -extern const std::array reverseNix32Map; - /** * @brief Enumeration representing the hash formats. */ @@ -44,7 +42,7 @@ enum struct HashFormat : int { /// @brief Base 64 encoding. /// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4). Base64, - /// @brief Nix-specific base-32 encoding. @see nix32Chars + /// @brief Nix-specific base-32 encoding. @see BaseNix32 Nix32, /// @brief Lowercase hexadecimal encoding. @see base16Chars Base16, diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index 22438c1d0..b7d4d761d 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -8,6 +8,7 @@ headers = files( 'archive.hh', 'args.hh', 'args/root.hh', + 'base-nix-32.hh', 'callback.hh', 'canon-path.hh', 'checked-arithmetic.hh', diff --git a/src/libutil/meson.build b/src/libutil/meson.build index d11d14c33..fb3e98e1d 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -112,6 +112,7 @@ subdir('nix-meson-build-support/common') sources = [config_priv_h] + files( 'archive.cc', 'args.cc', + 'base-nix-32.cc', 'canon-path.cc', 'compression.cc', 'compute-levels.cc', diff --git a/src/libutil/references.cc b/src/libutil/references.cc index 6b88da0c6..0f5164f6b 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -1,6 +1,7 @@ #include "nix/util/references.hh" #include "nix/util/hash.hh" #include "nix/util/archive.hh" +#include "nix/util/base-nix-32.hh" #include #include @@ -17,7 +18,7 @@ static void search(std::string_view s, StringSet & hashes, StringSet & seen) int j; bool match = true; for (j = refLength - 1; j >= 0; --j) - if (reverseNix32Map[(unsigned char) s[i + j]] == 0xFF) { + if (!BaseNix32::lookupReverse(s[i + j])) { i += j + 1; match = false; break; From 4333a9d5a83e5a90981f2d8272db6b3125aa10b6 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 4 Aug 2025 23:50:02 +0300 Subject: [PATCH 104/382] ci: Collect code coverage in tests This adds the necessary infrastructure to collect code coverage in CI, which could be useful to look at munually or track consistently via something like codecov. Co-authored-by: Jade Lovelace --- .github/workflows/ci.yml | 30 +++- ci/gha/tests/default.nix | 176 +++++++++++++++++---- ci/gha/tests/wrapper.nix | 16 ++ nix-meson-build-support/common/meson.build | 8 + tests/functional/flakes/run.sh | 2 + tests/functional/shell.sh | 2 + 6 files changed, 200 insertions(+), 34 deletions(-) create mode 100644 ci/gha/tests/wrapper.nix diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c7e2782d8..1745988da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,18 +29,21 @@ jobs: - scenario: on ubuntu runs-on: ubuntu-24.04 os: linux - sanitizers: false + instrumented: false primary: true + stdenv: stdenv - scenario: on macos runs-on: macos-14 os: darwin - sanitizers: false + instrumented: false primary: true - - scenario: on ubuntu (with sanitizers) + stdenv: stdenv + - scenario: on ubuntu (with sanitizers / coverage) runs-on: ubuntu-24.04 os: linux - sanitizers: true + instrumented: true primary: false + stdenv: clangStdenv name: tests ${{ matrix.scenario }} runs-on: ${{ matrix.runs-on }} timeout-minutes: 60 @@ -63,13 +66,28 @@ jobs: if: matrix.os == 'linux' - name: Run component tests run: | - nix build --file ci/gha/tests componentTests -L \ - --arg withSanitizers ${{ matrix.sanitizers }} + nix build --file ci/gha/tests/wrapper.nix componentTests -L \ + --arg withInstrumentation ${{ matrix.instrumented }} \ + --argstr stdenv "${{ matrix.stdenv }}" - name: Run flake checks and prepare the installer tarball run: | ci/gha/tests/build-checks ci/gha/tests/prepare-installer-for-github-actions if: ${{ matrix.primary }} + - name: Collect code coverage + run: | + nix build --file ci/gha/tests/wrapper.nix codeCoverage.coverageReports -L \ + --arg withInstrumentation ${{ matrix.instrumented }} \ + --argstr stdenv "${{ matrix.stdenv }}" \ + --out-link coverage-reports + cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY + if: ${{ matrix.instrumented }} + - name: Upload coverage reports + uses: actions/upload-artifact@v4 + with: + name: coverage-reports + path: coverage-reports/ + if: ${{ matrix.instrumented }} - name: Upload installer tarball uses: actions/upload-artifact@v4 with: diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index ce44d7cf7..d2bee699b 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -5,15 +5,78 @@ getStdenv ? p: p.stdenv, componentTestsPrefix ? "", withSanitizers ? false, + withCoverage ? false, + ... }: let inherit (pkgs) lib; hydraJobs = nixFlake.hydraJobs; packages' = nixFlake.packages.${system}; + stdenv = (getStdenv pkgs); + + enableSanitizersLayer = finalAttrs: prevAttrs: { + mesonFlags = + (prevAttrs.mesonFlags or [ ]) + ++ [ + # Run all tests with UBSAN enabled. Running both with ubsan and + # without doesn't seem to have much immediate benefit for doubling + # the GHA CI workaround. + # + # TODO: Work toward enabling "address,undefined" if it seems feasible. + # This would maybe require dropping Boost coroutines and ignoring intentional + # memory leaks with detect_leaks=0. + (lib.mesonOption "b_sanitize" "undefined") + ] + ++ (lib.optionals stdenv.cc.isClang [ + # https://www.github.com/mesonbuild/meson/issues/764 + (lib.mesonBool "b_lundef" false) + ]); + }; + + collectCoverageLayer = finalAttrs: prevAttrs: { + env = + let + # https://clang.llvm.org/docs/SourceBasedCodeCoverage.html#the-code-coverage-workflow + coverageFlags = [ + "-fprofile-instr-generate" + "-fcoverage-mapping" + ]; + in + { + CFLAGS = toString coverageFlags; + CXXFLAGS = toString coverageFlags; + }; + + # Done in a pre-configure hook, because $NIX_BUILD_TOP needs to be substituted. + preConfigure = + prevAttrs.preConfigure or "" + + '' + mappingFlag=" -fcoverage-prefix-map=$NIX_BUILD_TOP/${finalAttrs.src.name}=${finalAttrs.src}" + CFLAGS+="$mappingFlag" + CXXFLAGS+="$mappingFlag" + ''; + }; + + componentOverrides = + (lib.optional withSanitizers enableSanitizersLayer) + ++ (lib.optional withCoverage collectCoverageLayer); in -{ +rec { + nixComponents = + (nixFlake.lib.makeComponents { + inherit pkgs; + inherit getStdenv; + }).overrideScope + ( + final: prev: { + nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; + + mesonComponentOverrides = lib.composeManyExtensions componentOverrides; + } + ); + /** Top-level tests for the flake outputs, as they would be built by hydra. These tests generally can't be overridden to run with sanitizers. @@ -52,33 +115,6 @@ in }; componentTests = - let - nixComponents = - (nixFlake.lib.makeComponents { - inherit pkgs; - inherit getStdenv; - }).overrideScope - ( - final: prev: { - nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; - - mesonComponentOverrides = finalAttrs: prevAttrs: { - mesonFlags = - (prevAttrs.mesonFlags or [ ]) - ++ lib.optionals withSanitizers [ - # Run all tests with UBSAN enabled. Running both with ubsan and - # without doesn't seem to have much immediate benefit for doubling - # the GHA CI workaround. - # - # TODO: Work toward enabling "address,undefined" if it seems feasible. - # This would maybe require dropping Boost coroutines and ignoring intentional - # memory leaks with detect_leaks=0. - (lib.mesonOption "b_sanitize" "undefined") - ]; - }; - } - ); - in (lib.concatMapAttrs ( pkgName: pkg: lib.concatMapAttrs (testName: test: { @@ -88,4 +124,88 @@ in // lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) { "${componentTestsPrefix}nix-functional-tests" = nixComponents.nix-functional-tests; }; + + codeCoverage = + let + componentsTestsToProfile = + (builtins.mapAttrs (n: v: nixComponents.${n}.tests.run) { + "nix-util-tests" = { }; + "nix-store-tests" = { }; + "nix-fetchers-tests" = { }; + "nix-expr-tests" = { }; + "nix-flake-tests" = { }; + }) + // { + inherit (nixComponents) nix-functional-tests; + }; + + coverageProfileDrvs = lib.mapAttrs ( + n: v: + v.overrideAttrs ( + finalAttrs: prevAttrs: { + outputs = (prevAttrs.outputs or [ "out" ]) ++ [ "profraw" ]; + env = { + LLVM_PROFILE_FILE = "${placeholder "profraw"}/%m"; + }; + } + ) + ) componentsTestsToProfile; + + coverageProfiles = lib.mapAttrsToList (n: v: lib.getOutput "profraw" v) coverageProfileDrvs; + + mergedProfdata = + pkgs.runCommand "merged-profdata" + { + __structuredAttrs = true; + nativeBuildInputs = [ pkgs.llvmPackages.libllvm ]; + inherit coverageProfiles; + } + '' + rawProfiles=() + for dir in "''\${coverageProfiles[@]}"; do + rawProfiles+=($dir/*) + done + llvm-profdata merge -sparse -output $out "''\${rawProfiles[@]}" + ''; + + coverageReports = + let + nixComponentDrvs = lib.filter (lib.isDerivation) (lib.attrValues nixComponents); + in + pkgs.runCommand "code-coverage-report" + { + nativeBuildInputs = [ + pkgs.llvmPackages.libllvm + ]; + __structuredAttrs = true; + nixComponents = nixComponentDrvs; + } + '' + # ${toString (lib.map (v: v.src) nixComponentDrvs)} + + binaryFiles=() + for dir in "''\${nixComponents[@]}"; do + readarray -t filesInDir < <(find "$dir" -type f -executable) + binaryFiles+=("''\${filesInDir[@]}") + done + + arguments=$(concatStringsSep " -object " binaryFiles) + llvm-cov show $arguments -instr-profile ${mergedProfdata} -output-dir $out -format=html + + { + echo "# Code coverage summary (generated via \`llvm-cov\`):" + echo + echo '```' + llvm-cov report $arguments -instr-profile ${mergedProfdata} -format=text -use-color=false + echo '```' + echo + } >> $out/index.txt + + ''; + in + assert withCoverage; + assert stdenv.cc.isClang; + { + inherit coverageProfileDrvs mergedProfdata coverageReports; + }; } diff --git a/ci/gha/tests/wrapper.nix b/ci/gha/tests/wrapper.nix new file mode 100644 index 000000000..dc280ebbb --- /dev/null +++ b/ci/gha/tests/wrapper.nix @@ -0,0 +1,16 @@ +{ + nixFlake ? builtins.getFlake ("git+file://" + toString ../../..), + system ? builtins.currentSystem, + pkgs ? nixFlake.inputs.nixpkgs.legacyPackages.${system}, + stdenv ? "stdenv", + componentTestsPrefix ? "", + withInstrumentation ? false, +}@args: +import ./. ( + args + // { + getStdenv = p: p.${stdenv}; + withSanitizers = withInstrumentation; + withCoverage = withInstrumentation; + } +) diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index bb57ca941..fd686f140 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -32,3 +32,11 @@ do_pch = cxx.get_id() == 'clang' if cxx.get_id() == 'clang' add_project_arguments('-fpch-instantiate-templates', language : 'cpp') endif + +# Clang gets grumpy about missing libasan symbols if -shared-libasan is not +# passed when building shared libs, at least on Linux +if cxx.get_id() == 'clang' and ('address' in get_option('b_sanitize') or 'undefined' in get_option( + 'b_sanitize', +)) + add_project_link_arguments('-shared-libasan', language : 'cpp') +endif diff --git a/tests/functional/flakes/run.sh b/tests/functional/flakes/run.sh index c92ddca2b..0a2947825 100755 --- a/tests/functional/flakes/run.sh +++ b/tests/functional/flakes/run.sh @@ -41,11 +41,13 @@ nix run -f shell-hello.nix env > $TEST_ROOT/actual-env # - we unset TMPDIR on macOS if it contains /var/folders. bad. https://github.com/NixOS/nix/issues/7731 # - _ is set by bash and is expected to differ because it contains the original command # - __CF_USER_TEXT_ENCODING is set by macOS and is beyond our control +# - __LLVM_PROFILE_RT_INIT_ONCE - implementation detail of LLVM source code coverage collection sed -i \ -e 's/PATH=.*/PATH=.../' \ -e 's/_=.*/_=.../' \ -e '/^TMPDIR=\/var\/folders\/.*/d' \ -e '/^__CF_USER_TEXT_ENCODING=.*$/d' \ + -e '/^__LLVM_PROFILE_RT_INIT_ONCE=.*$/d' \ $TEST_ROOT/expected-env $TEST_ROOT/actual-env sort $TEST_ROOT/expected-env | uniq > $TEST_ROOT/expected-env.sorted # nix run appears to clear _. I don't understand why. Is this ok? diff --git a/tests/functional/shell.sh b/tests/functional/shell.sh index 51032ff1b..9769c90d1 100755 --- a/tests/functional/shell.sh +++ b/tests/functional/shell.sh @@ -34,11 +34,13 @@ nix shell -f shell-hello.nix hello -c env > "$TEST_ROOT/actual-env" # - we unset TMPDIR on macOS if it contains /var/folders # - _ is set by bash and is expectedf to differ because it contains the original command # - __CF_USER_TEXT_ENCODING is set by macOS and is beyond our control +# - __LLVM_PROFILE_RT_INIT_ONCE - implementation detail of LLVM source code coverage collection sed -i \ -e 's/PATH=.*/PATH=.../' \ -e 's/_=.*/_=.../' \ -e '/^TMPDIR=\/var\/folders\/.*/d' \ -e '/^__CF_USER_TEXT_ENCODING=.*$/d' \ + -e '/^__LLVM_PROFILE_RT_INIT_ONCE=.*$/d' \ "$TEST_ROOT/expected-env" "$TEST_ROOT/actual-env" sort "$TEST_ROOT/expected-env" > "$TEST_ROOT/expected-env.sorted" sort "$TEST_ROOT/actual-env" > "$TEST_ROOT/actual-env.sorted" From aea312dae39d8cf2c2409b03b54ca6520e29732c Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 5 Aug 2025 02:10:21 +0300 Subject: [PATCH 105/382] nix-cli: Move nix2 binaries sources properly into nix subproject There have been prior concerns about reogranizing the repo, but this seems like a trivial simplification which will not interfere with either our packaging or the modular builds in nixpkgs. --- src/internal-api-docs/doxygen.cfg.in | 4 +-- src/nix/build-remote | 1 - src/{ => nix}/build-remote/build-remote.cc | 0 src/nix/nix-build | 1 - src/{ => nix}/nix-build/nix-build.cc | 0 src/nix/nix-channel | 1 - src/{ => nix}/nix-channel/nix-channel.cc | 0 src/{ => nix}/nix-channel/unpack-channel.nix | 0 src/nix/nix-collect-garbage | 1 - .../nix-collect-garbage.cc | 0 src/nix/nix-copy-closure | 1 - .../nix-copy-closure/nix-copy-closure.cc | 0 src/nix/nix-env | 1 - src/{ => nix}/nix-env/buildenv.nix | 0 src/{ => nix}/nix-env/nix-env.cc | 0 src/{ => nix}/nix-env/user-env.cc | 0 src/{ => nix}/nix-env/user-env.hh | 0 src/nix/nix-instantiate | 1 - .../nix-instantiate/nix-instantiate.cc | 0 src/nix/nix-store | 1 - src/{ => nix}/nix-store/dotgraph.cc | 0 src/{ => nix}/nix-store/dotgraph.hh | 0 src/{ => nix}/nix-store/graphml.cc | 0 src/{ => nix}/nix-store/graphml.hh | 0 src/{ => nix}/nix-store/nix-store.cc | 0 src/nix/package.nix | 35 ++++--------------- 26 files changed, 8 insertions(+), 39 deletions(-) delete mode 120000 src/nix/build-remote rename src/{ => nix}/build-remote/build-remote.cc (100%) delete mode 120000 src/nix/nix-build rename src/{ => nix}/nix-build/nix-build.cc (100%) delete mode 120000 src/nix/nix-channel rename src/{ => nix}/nix-channel/nix-channel.cc (100%) rename src/{ => nix}/nix-channel/unpack-channel.nix (100%) delete mode 120000 src/nix/nix-collect-garbage rename src/{ => nix}/nix-collect-garbage/nix-collect-garbage.cc (100%) delete mode 120000 src/nix/nix-copy-closure rename src/{ => nix}/nix-copy-closure/nix-copy-closure.cc (100%) delete mode 120000 src/nix/nix-env rename src/{ => nix}/nix-env/buildenv.nix (100%) rename src/{ => nix}/nix-env/nix-env.cc (100%) rename src/{ => nix}/nix-env/user-env.cc (100%) rename src/{ => nix}/nix-env/user-env.hh (100%) delete mode 120000 src/nix/nix-instantiate rename src/{ => nix}/nix-instantiate/nix-instantiate.cc (100%) delete mode 120000 src/nix/nix-store rename src/{ => nix}/nix-store/dotgraph.cc (100%) rename src/{ => nix}/nix-store/dotgraph.hh (100%) rename src/{ => nix}/nix-store/graphml.cc (100%) rename src/{ => nix}/nix-store/graphml.hh (100%) rename src/{ => nix}/nix-store/nix-store.cc (100%) diff --git a/src/internal-api-docs/doxygen.cfg.in b/src/internal-api-docs/doxygen.cfg.in index 950497ca3..2769edd9f 100644 --- a/src/internal-api-docs/doxygen.cfg.in +++ b/src/internal-api-docs/doxygen.cfg.in @@ -57,9 +57,7 @@ INPUT = \ @src@/libutil/args \ @src@/libutil-tests \ @src@/libutil-test-support/tests \ - @src@/nix \ - @src@/nix-env \ - @src@/nix-store + @src@/nix # If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names # in the source code. If set to NO, only conditional compilation will be diff --git a/src/nix/build-remote b/src/nix/build-remote deleted file mode 120000 index 2cea44d46..000000000 --- a/src/nix/build-remote +++ /dev/null @@ -1 +0,0 @@ -../build-remote \ No newline at end of file diff --git a/src/build-remote/build-remote.cc b/src/nix/build-remote/build-remote.cc similarity index 100% rename from src/build-remote/build-remote.cc rename to src/nix/build-remote/build-remote.cc diff --git a/src/nix/nix-build b/src/nix/nix-build deleted file mode 120000 index 2954d8ac7..000000000 --- a/src/nix/nix-build +++ /dev/null @@ -1 +0,0 @@ -../nix-build \ No newline at end of file diff --git a/src/nix-build/nix-build.cc b/src/nix/nix-build/nix-build.cc similarity index 100% rename from src/nix-build/nix-build.cc rename to src/nix/nix-build/nix-build.cc diff --git a/src/nix/nix-channel b/src/nix/nix-channel deleted file mode 120000 index 29b759473..000000000 --- a/src/nix/nix-channel +++ /dev/null @@ -1 +0,0 @@ -../nix-channel \ No newline at end of file diff --git a/src/nix-channel/nix-channel.cc b/src/nix/nix-channel/nix-channel.cc similarity index 100% rename from src/nix-channel/nix-channel.cc rename to src/nix/nix-channel/nix-channel.cc diff --git a/src/nix-channel/unpack-channel.nix b/src/nix/nix-channel/unpack-channel.nix similarity index 100% rename from src/nix-channel/unpack-channel.nix rename to src/nix/nix-channel/unpack-channel.nix diff --git a/src/nix/nix-collect-garbage b/src/nix/nix-collect-garbage deleted file mode 120000 index b037fc1b0..000000000 --- a/src/nix/nix-collect-garbage +++ /dev/null @@ -1 +0,0 @@ -../nix-collect-garbage \ No newline at end of file diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix/nix-collect-garbage/nix-collect-garbage.cc similarity index 100% rename from src/nix-collect-garbage/nix-collect-garbage.cc rename to src/nix/nix-collect-garbage/nix-collect-garbage.cc diff --git a/src/nix/nix-copy-closure b/src/nix/nix-copy-closure deleted file mode 120000 index 9063c583a..000000000 --- a/src/nix/nix-copy-closure +++ /dev/null @@ -1 +0,0 @@ -../nix-copy-closure \ No newline at end of file diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix/nix-copy-closure/nix-copy-closure.cc similarity index 100% rename from src/nix-copy-closure/nix-copy-closure.cc rename to src/nix/nix-copy-closure/nix-copy-closure.cc diff --git a/src/nix/nix-env b/src/nix/nix-env deleted file mode 120000 index f2f19f580..000000000 --- a/src/nix/nix-env +++ /dev/null @@ -1 +0,0 @@ -../nix-env \ No newline at end of file diff --git a/src/nix-env/buildenv.nix b/src/nix/nix-env/buildenv.nix similarity index 100% rename from src/nix-env/buildenv.nix rename to src/nix/nix-env/buildenv.nix diff --git a/src/nix-env/nix-env.cc b/src/nix/nix-env/nix-env.cc similarity index 100% rename from src/nix-env/nix-env.cc rename to src/nix/nix-env/nix-env.cc diff --git a/src/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc similarity index 100% rename from src/nix-env/user-env.cc rename to src/nix/nix-env/user-env.cc diff --git a/src/nix-env/user-env.hh b/src/nix/nix-env/user-env.hh similarity index 100% rename from src/nix-env/user-env.hh rename to src/nix/nix-env/user-env.hh diff --git a/src/nix/nix-instantiate b/src/nix/nix-instantiate deleted file mode 120000 index 2d7502ffa..000000000 --- a/src/nix/nix-instantiate +++ /dev/null @@ -1 +0,0 @@ -../nix-instantiate \ No newline at end of file diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix/nix-instantiate/nix-instantiate.cc similarity index 100% rename from src/nix-instantiate/nix-instantiate.cc rename to src/nix/nix-instantiate/nix-instantiate.cc diff --git a/src/nix/nix-store b/src/nix/nix-store deleted file mode 120000 index e6efcac42..000000000 --- a/src/nix/nix-store +++ /dev/null @@ -1 +0,0 @@ -../nix-store/ \ No newline at end of file diff --git a/src/nix-store/dotgraph.cc b/src/nix/nix-store/dotgraph.cc similarity index 100% rename from src/nix-store/dotgraph.cc rename to src/nix/nix-store/dotgraph.cc diff --git a/src/nix-store/dotgraph.hh b/src/nix/nix-store/dotgraph.hh similarity index 100% rename from src/nix-store/dotgraph.hh rename to src/nix/nix-store/dotgraph.hh diff --git a/src/nix-store/graphml.cc b/src/nix/nix-store/graphml.cc similarity index 100% rename from src/nix-store/graphml.cc rename to src/nix/nix-store/graphml.cc diff --git a/src/nix-store/graphml.hh b/src/nix/nix-store/graphml.hh similarity index 100% rename from src/nix-store/graphml.hh rename to src/nix/nix-store/graphml.hh diff --git a/src/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc similarity index 100% rename from src/nix-store/nix-store.cc rename to src/nix/nix-store/nix-store.cc diff --git a/src/nix/package.nix b/src/nix/package.nix index 3d4f6f40b..8195e6c6f 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -33,15 +33,7 @@ mkMesonExecutable (finalAttrs: { # Symbolic links to other dirs ## exes - ./build-remote ./doc - ./nix-build - ./nix-channel - ./nix-collect-garbage - ./nix-copy-closure - ./nix-env - ./nix-instantiate - ./nix-store ## dirs ./scripts ../../scripts @@ -55,8 +47,8 @@ mkMesonExecutable (finalAttrs: { ../../doc/manual/generate-store-info.nix # Other files to be included as string literals - ../nix-channel/unpack-channel.nix - ../nix-env/buildenv.nix + ./nix-channel/unpack-channel.nix + ./nix-env/buildenv.nix ./get-env.sh ./help-stores.md ../../doc/manual/source/store/types/index.md.in @@ -65,24 +57,11 @@ mkMesonExecutable (finalAttrs: { # Files ] - ++ - lib.concatMap - (dir: [ - (fileset.fileFilter (file: file.hasExt "cc") dir) - (fileset.fileFilter (file: file.hasExt "hh") dir) - (fileset.fileFilter (file: file.hasExt "md") dir) - ]) - [ - ./. - ../build-remote - ../nix-build - ../nix-channel - ../nix-collect-garbage - ../nix-copy-closure - ../nix-env - ../nix-instantiate - ../nix-store - ] + ++ [ + (fileset.fileFilter (file: file.hasExt "cc") ./.) + (fileset.fileFilter (file: file.hasExt "hh") ./.) + (fileset.fileFilter (file: file.hasExt "md") ./.) + ] ); buildInputs = [ From 5ee0d5669e783438ede3ba5b2a66d4f474426c0f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 5 Aug 2025 02:15:21 +0300 Subject: [PATCH 106/382] libstore: Move State to an anonymous namespace Having a State class in the nix namespace is asking for ODR trouble. This class is already private to the translation unit, let's move it into an anonymous namespace. --- src/libstore/builtins/buildenv.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 0ff0be3aa..22ed8d807 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -10,12 +10,16 @@ namespace nix { +namespace { + struct State { std::map priorities; unsigned long symlinks = 0; }; +} // namespace + /* For each activated package, create symlinks */ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, int priority) { From 0118e5ea5d4f08553f3876976296e2547ed69571 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 5 Aug 2025 02:20:28 +0300 Subject: [PATCH 107/382] libutil: Move Ctx type from the nix namespace to Hash class Same as previous commit. This really should not be a part of the `nix` namespace. Otherwise the doxygen documentation is really confusing. --- src/libutil/hash.cc | 14 +++++++------- src/libutil/include/nix/util/hash.hh | 7 ++++--- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 38ef5dd90..941224d58 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -273,7 +273,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha return Hash::parseAny(hashStr, ha); } -union Ctx +union Hash::Ctx { blake3_hasher blake3; MD5_CTX md5; @@ -282,7 +282,7 @@ union Ctx SHA512_CTX sha512; }; -static void start(HashAlgorithm ha, Ctx & ctx) +static void start(HashAlgorithm ha, Hash::Ctx & ctx) { if (ha == HashAlgorithm::BLAKE3) blake3_hasher_init(&ctx.blake3); @@ -317,7 +317,7 @@ void blake3_hasher_update_with_heuristics(blake3_hasher * blake3, std::string_vi } } -static void update(HashAlgorithm ha, Ctx & ctx, std::string_view data) +static void update(HashAlgorithm ha, Hash::Ctx & ctx, std::string_view data) { if (ha == HashAlgorithm::BLAKE3) blake3_hasher_update_with_heuristics(&ctx.blake3, data); @@ -331,7 +331,7 @@ static void update(HashAlgorithm ha, Ctx & ctx, std::string_view data) SHA512_Update(&ctx.sha512, data.data(), data.size()); } -static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash) +static void finish(HashAlgorithm ha, Hash::Ctx & ctx, unsigned char * hash) { if (ha == HashAlgorithm::BLAKE3) blake3_hasher_finalize(&ctx.blake3, hash, BLAKE3_OUT_LEN); @@ -347,7 +347,7 @@ static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash) Hash hashString(HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { - Ctx ctx; + Hash::Ctx ctx; Hash hash(ha, xpSettings); start(ha, ctx); update(ha, ctx, s); @@ -365,7 +365,7 @@ Hash hashFile(HashAlgorithm ha, const Path & path) HashSink::HashSink(HashAlgorithm ha) : ha(ha) { - ctx = new Ctx; + ctx = new Hash::Ctx; bytes = 0; start(ha, *ctx); } @@ -393,7 +393,7 @@ HashResult HashSink::finish() HashResult HashSink::currentHash() { flush(); - Ctx ctx2 = *ctx; + Hash::Ctx ctx2 = *ctx; Hash hash(ha); nix::finish(ha, ctx2, hash.hash); return HashResult(hash, bytes); diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index daacd7adf..584ab6899 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -57,6 +57,9 @@ extern const StringSet hashFormats; struct Hash { + /** Opaque handle type for the hash calculation state. */ + union Ctx; + constexpr static size_t maxHashSize = 64; size_t hashSize = 0; uint8_t hash[maxHashSize] = {}; @@ -224,8 +227,6 @@ std::optional parseHashAlgoOpt(std::string_view s); */ std::string_view printHashAlgo(HashAlgorithm ha); -union Ctx; - struct AbstractHashSink : virtual Sink { virtual HashResult finish() = 0; @@ -235,7 +236,7 @@ class HashSink : public BufferedSink, public AbstractHashSink { private: HashAlgorithm ha; - Ctx * ctx; + Hash::Ctx * ctx; uint64_t bytes; public: From 866d5e6cf4f7dc213ff56941afe9621a866b2cb1 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 5 Aug 2025 02:25:40 +0300 Subject: [PATCH 108/382] treewide: Sprinkle more anonymous namespace for classes private to TUs This code should be private to the corresponding translation units. --- src/libutil/args.cc | 4 ++++ src/nix/diff-closures.cc | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 2e6d85afd..f4309473b 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -81,6 +81,8 @@ std::optional RootArgs::needsCompletion(std::string_view s) return {}; } +namespace { + /** * Basically this is `typedef std::optional Parser(std::string_view s, Strings & r);` * @@ -246,6 +248,8 @@ void ParseQuoted::operator()(std::shared_ptr & state, Strings & r) assert(false); } +} // namespace + Strings parseShebangContent(std::string_view s) { Strings result; diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index 020c3e13b..cbf842e5c 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -10,11 +10,15 @@ namespace nix { +namespace { + struct Info { std::string outputName; }; +} // namespace + // name -> version -> store paths typedef std::map>> GroupedPaths; From e3d7f20afee537a4a7f1ef9c8c9f1c9ded984271 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 5 Aug 2025 02:46:29 +0300 Subject: [PATCH 109/382] libstore: Remove dead variable --- src/libstore/gc.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 2ab15639a..0366fe0b0 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -825,7 +825,6 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) unreachable. We don't use readDirectory() here so that GCing can start faster. */ auto linksName = baseNameOf(linksDir); - Paths entries; struct dirent * dirent; while (errno = 0, dirent = readdir(dir.get())) { checkInterrupt(); From 5e94fe5693e4846502c79b47168e7ba26aa85dd1 Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Tue, 5 Aug 2025 21:59:59 +0200 Subject: [PATCH 110/382] speedup derivation parsing --- src/libstore/derivations.cc | 213 ++++++++++-------- .../include/nix/store/parsed-derivations.hh | 4 +- src/libstore/parsed-derivations.cc | 6 +- 3 files changed, 124 insertions(+), 99 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 30048c522..1afc343d7 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -15,6 +15,8 @@ namespace nix { +using namespace std::literals::string_view_literals; + std::optional DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { @@ -183,36 +185,60 @@ static void expect(StringViewStream & str, std::string_view s) str.remaining.remove_prefix(s.size()); } +static void expect(StringViewStream & str, char c) +{ + if (str.remaining.empty() || str.remaining[0] != c) + throw FormatError("expected string '%1%'", c); + str.remaining.remove_prefix(1); +} + /* Read a C-style string from stream `str'. */ static BackedStringView parseString(StringViewStream & str) { - expect(str, "\""); - auto c = str.remaining.begin(), end = str.remaining.end(); - bool escaped = false; - for (; c != end && *c != '"'; c++) { - if (*c == '\\') { - c++; - if (c == end) - throw FormatError("unterminated string in derivation"); - escaped = true; + expect(str, '"'); + size_t start = 0; + size_t end = str.remaining.size(); + const auto data = str.remaining.data(); + while (start < end) { + auto idx = str.remaining.find('"', start); + if (idx == std::string_view::npos) { + break; } + size_t pos = idx; + for (; pos > 0 && data[pos - 1] == '\\'; pos--) + ; + if ((idx - pos) % 2 == 0) { // even number of backslashes + end = idx; + break; + } + start = idx + 1; } - const auto contentLen = c - str.remaining.begin(); - const auto content = str.remaining.substr(0, contentLen); - str.remaining.remove_prefix(contentLen + 1); + start = 0; + const auto content = str.remaining.substr(start, end); + str.remaining.remove_prefix(end + 1); - if (!escaped) + auto nextBackslash = content.find('\\', start); + if (nextBackslash == std::string_view::npos) { return content; + } std::string res; - res.reserve(content.size()); - for (c = content.begin(), end = content.end(); c != end; c++) - if (*c == '\\') { - c++; - res += escapes[*c]; - } else - res += *c; + res.reserve(end); + do { + if (nextBackslash == end - 1) { + throw FormatError("unterminated string in derivation"); + } + if (nextBackslash > start) { + res.append(&data[start], nextBackslash - start); + } + res.push_back(escapes[data[nextBackslash + 1]]); + start = nextBackslash + 2; + nextBackslash = content.find('\\', start); + } while (nextBackslash != std::string_view::npos); + if (end > start) { + res.append(&data[start], end - start); + } return res; } @@ -245,7 +271,7 @@ static bool endOfList(StringViewStream & str) static StringSet parseStrings(StringViewStream & str, bool arePaths) { StringSet res; - expect(str, "["); + expect(str, '['); while (!endOfList(str)) res.insert((arePaths ? parsePath(str) : parseString(str)).toOwned()); return res; @@ -258,20 +284,20 @@ static DerivationOutput parseDerivationOutput( std::string_view hashS, const ExperimentalFeatureSettings & xpSettings) { - if (hashAlgoStr != "") { + if (!hashAlgoStr.empty()) { ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); if (method == ContentAddressMethod::Raw::Text) xpSettings.require(Xp::DynamicDerivations); const auto hashAlgo = parseHashAlgo(hashAlgoStr); - if (hashS == "impure") { + if (hashS == "impure"sv) { xpSettings.require(Xp::ImpureDerivations); - if (pathS != "") + if (!pathS.empty()) throw FormatError("impure derivation output should not specify output path"); return DerivationOutput::Impure{ .method = std::move(method), .hashAlgo = std::move(hashAlgo), }; - } else if (hashS != "") { + } else if (!hashS.empty()) { validatePath(pathS); auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo); return DerivationOutput::CAFixed{ @@ -283,7 +309,7 @@ static DerivationOutput parseDerivationOutput( }; } else { xpSettings.require(Xp::CaDerivations); - if (pathS != "") + if (!pathS.empty()) throw FormatError("content-addressing derivation output should not specify output path"); return DerivationOutput::CAFloating{ .method = std::move(method), @@ -291,7 +317,7 @@ static DerivationOutput parseDerivationOutput( }; } } else { - if (pathS == "") { + if (pathS.empty()) { return DerivationOutput::Deferred{}; } validatePath(pathS); @@ -306,13 +332,13 @@ static DerivationOutput parseDerivationOutput( StringViewStream & str, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { - expect(str, ","); + expect(str, ','); const auto pathS = parseString(str); - expect(str, ","); + expect(str, ','); const auto hashAlgo = parseString(str); - expect(str, ","); + expect(str, ','); const auto hash = parseString(str); - expect(str, ")"); + expect(str, ')'); return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings); } @@ -353,17 +379,17 @@ parseDerivedPathMapNode(const StoreDirConfig & store, StringViewStream & str, De parseNonDynamic(); break; case '(': - expect(str, "("); + expect(str, '('); node.value = parseStrings(str, false); - expect(str, ",["); + expect(str, ",["sv); while (!endOfList(str)) { - expect(str, "("); + expect(str, '('); auto outputName = parseString(str).toOwned(); - expect(str, ","); + expect(str, ','); node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version)); - expect(str, ")"); + expect(str, ')'); } - expect(str, ")"); + expect(str, ')'); break; default: throw FormatError("invalid inputDrvs entry in derivation"); @@ -386,24 +412,24 @@ Derivation parseDerivation( drv.name = name; StringViewStream str{s}; - expect(str, "D"); + expect(str, 'D'); DerivationATermVersion version; switch (str.peek()) { case 'e': - expect(str, "erive("); + expect(str, "erive("sv); version = DerivationATermVersion::Traditional; break; case 'r': { - expect(str, "rvWithVersion("); + expect(str, "rvWithVersion("sv); auto versionS = parseString(str); - if (*versionS == "xp-dyn-drv") { + if (*versionS == "xp-dyn-drv"sv) { // Only version we have so far version = DerivationATermVersion::DynamicDerivations; xpSettings.require(Xp::DynamicDerivations); } else { throw FormatError("Unknown derivation ATerm format version '%s'", *versionS); } - expect(str, ","); + expect(str, ','); break; } default: @@ -411,50 +437,53 @@ Derivation parseDerivation( } /* Parse the list of outputs. */ - expect(str, "["); + expect(str, '['); while (!endOfList(str)) { - expect(str, "("); + expect(str, '('); std::string id = parseString(str).toOwned(); auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } /* Parse the list of input derivations. */ - expect(str, ",["); + expect(str, ",["sv); while (!endOfList(str)) { - expect(str, "("); + expect(str, '('); auto drvPath = parsePath(str); - expect(str, ","); + expect(str, ','); drv.inputDrvs.map.insert_or_assign( store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); - expect(str, ")"); + expect(str, ')'); } - expect(str, ","); + expect(str, ','); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); - expect(str, ","); + expect(str, ','); drv.platform = parseString(str).toOwned(); - expect(str, ","); + expect(str, ','); drv.builder = parseString(str).toOwned(); /* Parse the builder arguments. */ - expect(str, ",["); + expect(str, ",["sv); while (!endOfList(str)) drv.args.push_back(parseString(str).toOwned()); /* Parse the environment variables. */ - expect(str, ",["); + expect(str, ",["sv); while (!endOfList(str)) { - expect(str, "("); + expect(str, '('); auto name = parseString(str).toOwned(); - expect(str, ","); - auto value = parseString(str).toOwned(); - expect(str, ")"); - drv.env.insert_or_assign(std::move(name), std::move(value)); + expect(str, ','); + auto value = parseString(str); + if (name == StructuredAttrs::envVarName) { + drv.structuredAttrs = StructuredAttrs::parse(*std::move(value)); + } else { + drv.env.insert_or_assign(std::move(name), std::move(value).toOwned()); + } + expect(str, ')'); } - drv.structuredAttrs = StructuredAttrs::tryExtract(drv.env); - expect(str, ")"); + expect(str, ')'); return drv; } @@ -537,9 +566,9 @@ static void unparseDerivedPathMapNode( if (node.childMap.empty()) { printUnquotedStrings(s, node.value.begin(), node.value.end()); } else { - s += "("; + s += '('; printUnquotedStrings(s, node.value.begin(), node.value.end()); - s += ",["; + s += ",["sv; bool first = true; for (auto & [outputName, childNode] : node.childMap) { if (first) @@ -551,7 +580,7 @@ static void unparseDerivedPathMapNode( unparseDerivedPathMapNode(store, s, childNode); s += ')'; } - s += "])"; + s += "])"sv; } } @@ -583,16 +612,16 @@ std::string Derivation::unparse( newer form only if we need it, which we do for `Xp::DynamicDerivations`. */ if (hasDynamicDrvDep(*this)) { - s += "DrvWithVersion("; + s += "DrvWithVersion("sv; // Only version we have so far - printUnquotedString(s, "xp-dyn-drv"); - s += ","; + printUnquotedString(s, "xp-dyn-drv"sv); + s += ','; } else { - s += "Derive("; + s += "Derive("sv; } bool first = true; - s += "["; + s += '['; for (auto & i : outputs) { if (first) first = false; @@ -604,15 +633,15 @@ std::string Derivation::unparse( overloaded{ [&](const DerivationOutput::InputAddressed & doi) { s += ','; - printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path)); + printUnquotedString(s, maskOutputs ? ""sv : store.printStorePath(doi.path)); s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); }, [&](const DerivationOutput::CAFixed & dof) { s += ','; - printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); + printUnquotedString(s, maskOutputs ? ""sv : store.printStorePath(dof.path(store, name, i.first))); s += ','; printUnquotedString(s, dof.ca.printMethodAlgo()); s += ','; @@ -620,34 +649,34 @@ std::string Derivation::unparse( }, [&](const DerivationOutput::CAFloating & dof) { s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); s += ','; printUnquotedString(s, std::string{dof.method.renderPrefix()} + printHashAlgo(dof.hashAlgo)); s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); }, [&](const DerivationOutput::Deferred &) { s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); }, [&](const DerivationOutput::Impure & doi) { // FIXME s += ','; - printUnquotedString(s, ""); + printUnquotedString(s, {}); s += ','; printUnquotedString(s, std::string{doi.method.renderPrefix()} + printHashAlgo(doi.hashAlgo)); s += ','; - printUnquotedString(s, "impure"); + printUnquotedString(s, "impure"sv); }}, i.second.raw); s += ')'; } - s += "],["; + s += "],["sv; first = true; if (actualInputs) { for (auto & [drvHashModulo, childMap] : *actualInputs) { @@ -673,7 +702,7 @@ std::string Derivation::unparse( } } - s += "],"; + s += "],"sv; auto paths = store.printStorePathSet(inputSrcs); // FIXME: slow printUnquotedStrings(s, paths.begin(), paths.end()); @@ -684,7 +713,7 @@ std::string Derivation::unparse( s += ','; printStrings(s, args.begin(), args.end()); - s += ",["; + s += ",["sv; first = true; auto unparseEnv = [&](const StringPairs atermEnv) { @@ -696,7 +725,7 @@ std::string Derivation::unparse( s += '('; printString(s, i.first); s += ','; - printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); + printString(s, maskOutputs && outputs.count(i.first) ? ""sv : i.second); s += ')'; } }; @@ -710,7 +739,7 @@ std::string Derivation::unparse( unparseEnv(env); } - s += "])"; + s += "])"sv; return s; } @@ -724,8 +753,8 @@ bool isDerivation(std::string_view fileName) std::string outputPathName(std::string_view drvName, OutputNameView outputName) { std::string res{drvName}; - if (outputName != "out") { - res += "-"; + if (outputName != "out"sv) { + res += '-'; res += outputName; } return res; @@ -772,7 +801,7 @@ DerivationType BasicDerivation::type() const if (fixedCAOutputs.size() > 1) // FIXME: Experimental feature? throw Error("only one fixed output is allowed for now"); - if (*fixedCAOutputs.begin() != "out") + if (*fixedCAOutputs.begin() != "out"sv) throw Error("single fixed output must be named \"out\""); return DerivationType::ContentAddressed{ .sandboxed = false, @@ -1046,13 +1075,9 @@ void BasicDerivation::applyRewrites(const StringMap & rewrites) if (structuredAttrs) { // TODO rewrite the JSON AST properly, rather than dump parse round trip. - auto [k, jsonS] = structuredAttrs->unparse(); - jsonS = rewriteStrings(jsonS, rewrites); - StringPairs newEnv; - newEnv.insert(std::pair{k, std::move(jsonS)}); - auto newStructuredAttrs = StructuredAttrs::tryExtract(newEnv); - assert(newStructuredAttrs); - structuredAttrs = std::move(*newStructuredAttrs); + auto [_, jsonS] = structuredAttrs->unparse(); + jsonS = rewriteStrings(std::move(jsonS), rewrites); + structuredAttrs = StructuredAttrs::parse(jsonS); } } diff --git a/src/libstore/include/nix/store/parsed-derivations.hh b/src/libstore/include/nix/store/parsed-derivations.hh index 2638a3500..edef1b2d2 100644 --- a/src/libstore/include/nix/store/parsed-derivations.hh +++ b/src/libstore/include/nix/store/parsed-derivations.hh @@ -16,6 +16,8 @@ typedef std::map DerivationOutputs; struct StructuredAttrs { + static constexpr std::string_view envVarName{"__json"}; + nlohmann::json structuredAttrs; bool operator==(const StructuredAttrs &) const = default; @@ -23,7 +25,7 @@ struct StructuredAttrs /** * Unconditionally parse from a JSON string. Used by `tryExtract`. */ - static StructuredAttrs parse(const std::string & encoded); + static StructuredAttrs parse(std::string_view encoded); /** * Like `tryParse`, but removes the env var which encoded the structured diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 797230e97..1006bbc0a 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -8,16 +8,14 @@ namespace nix { -static constexpr std::string_view envVarName = "__json"; - -StructuredAttrs StructuredAttrs::parse(const std::string & encoded) +StructuredAttrs StructuredAttrs::parse(std::string_view encoded) { try { return StructuredAttrs{ .structuredAttrs = nlohmann::json::parse(encoded), }; } catch (std::exception & e) { - throw Error("cannot process __json attribute: %s", e.what()); + throw Error("cannot process %s attribute: %s", envVarName, e.what()); } } From 68f61d3fcfbc36357a3fca0a948b815d4e620a36 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 6 Aug 2025 11:01:34 +0300 Subject: [PATCH 111/382] nix/profile.cc: Fix header include path This wasn't caught by CI for aea312dae39d8cf2c2409b03b54ca6520e29732c due to weird componentized build reasons. --- src/nix/profile.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 1f00d8fa4..df92d888e 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -7,7 +7,7 @@ #include "nix/util/archive.hh" #include "nix/store/builtins/buildenv.hh" #include "nix/flake/flakeref.hh" -#include "../nix-env/user-env.hh" +#include "nix-env/user-env.hh" #include "nix/store/profiles.hh" #include "nix/store/names.hh" #include "nix/util/url.hh" From 991831227e7ce9de0f06c4b6331a455b49e3e168 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 5 Aug 2025 14:12:00 -0400 Subject: [PATCH 112/382] Clean up Base* code Make it separate from Hash, since other things can be base-encoded too. This isn't really needed for Nix, but it makes the code easier to read e.g. for someone reimplementing this stuff in a different language. (Of course, Base16/Base64 should be gotten off-the-shelf, but now the hash code, which is more bespoke, is less cluttered with the parts that would be from some library.) Many reimplementations of "Nix32" and our hash type already exist, so this cleanup is coming years too late, but I say better late than never / it is always good to nudge the code in the direction of being a "living spec". Co-authored-by: Sergei Zimmerman --- src/libfetchers/git-utils.cc | 3 +- src/libstore/machines.cc | 3 +- src/libstore/ssh.cc | 3 +- src/libutil-tests/base-n.cc | 68 +++++++++++ src/libutil-tests/meson.build | 1 + src/libutil-tests/util.cc | 55 +-------- src/libutil/base-n.cc | 114 ++++++++++++++++++ src/libutil/base-nix-32.cc | 43 ++++++- src/libutil/hash.cc | 109 ++++++----------- .../nix/util/array-from-string-literal.hh | 27 +++++ src/libutil/include/nix/util/base-n.hh | 53 ++++++++ src/libutil/include/nix/util/base-nix-32.hh | 12 +- src/libutil/include/nix/util/hash.hh | 24 ---- src/libutil/include/nix/util/meson.build | 2 + src/libutil/include/nix/util/util.hh | 10 -- src/libutil/meson.build | 1 + src/libutil/signature/local-keys.cc | 9 +- src/libutil/util.cc | 64 ---------- 18 files changed, 357 insertions(+), 244 deletions(-) create mode 100644 src/libutil-tests/base-n.cc create mode 100644 src/libutil/base-n.cc create mode 100644 src/libutil/include/nix/util/array-from-string-literal.hh create mode 100644 src/libutil/include/nix/util/base-n.hh diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index a758848b2..993d7fb08 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -2,6 +2,7 @@ #include "nix/fetchers/git-lfs-fetch.hh" #include "nix/fetchers/cache.hh" #include "nix/fetchers/fetch-settings.hh" +#include "nix/util/base-n.hh" #include "nix/util/finally.hh" #include "nix/util/processes.hh" #include "nix/util/signals.hh" @@ -608,7 +609,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally std::string keyDecoded; try { - keyDecoded = base64Decode(k.key); + keyDecoded = base64::decode(k.key); } catch (Error & e) { e.addTrace({}, "while decoding public key '%s' used for git signature", k.key); } diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 4ae5cd206..d61467666 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -1,3 +1,4 @@ +#include "nix/util/base-n.hh" #include "nix/store/machines.hh" #include "nix/store/globals.hh" #include "nix/store/store-open.hh" @@ -158,7 +159,7 @@ static Machine parseBuilderLine(const StringSet & defaultSystems, const std::str auto ensureBase64 = [&](size_t fieldIndex) { const auto & str = tokens[fieldIndex]; try { - base64Decode(str); + base64::decode(str); } catch (FormatError & e) { e.addTrace({}, "while parsing machine specification at a column #%lu in a row: '%s'", fieldIndex, line); throw; diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index e53c4b336..474b3622a 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -4,13 +4,14 @@ #include "nix/util/environment-variables.hh" #include "nix/util/util.hh" #include "nix/util/exec.hh" +#include "nix/util/base-n.hh" namespace nix { static std::string parsePublicHostKey(std::string_view host, std::string_view sshPublicHostKey) { try { - return base64Decode(sshPublicHostKey); + return base64::decode(sshPublicHostKey); } catch (Error & e) { e.addTrace({}, "while decoding ssh public host key for host '%s'", host); throw; diff --git a/src/libutil-tests/base-n.cc b/src/libutil-tests/base-n.cc new file mode 100644 index 000000000..8de78b55d --- /dev/null +++ b/src/libutil-tests/base-n.cc @@ -0,0 +1,68 @@ +#include +#include + +#include "nix/util/base-n.hh" +#include "nix/util/error.hh" + +namespace nix { + +static const std::span stringToByteSpan(const std::string_view s) +{ + return {(const std::byte *) s.data(), s.size()}; +} + +/* ---------------------------------------------------------------------------- + * base64::encode + * --------------------------------------------------------------------------*/ + +TEST(base64Encode, emptyString) +{ + ASSERT_EQ(base64::encode(stringToByteSpan("")), ""); +} + +TEST(base64Encode, encodesAString) +{ + ASSERT_EQ(base64::encode(stringToByteSpan("quod erat demonstrandum")), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="); +} + +TEST(base64Encode, encodeAndDecode) +{ + auto s = "quod erat demonstrandum"; + auto encoded = base64::encode(stringToByteSpan(s)); + auto decoded = base64::decode(encoded); + + ASSERT_EQ(decoded, s); +} + +TEST(base64Encode, encodeAndDecodeNonPrintable) +{ + char s[256]; + std::iota(std::rbegin(s), std::rend(s), 0); + + auto encoded = base64::encode(std::as_bytes(std::span{std::string_view{s}})); + auto decoded = base64::decode(encoded); + + EXPECT_EQ(decoded.length(), 255u); + ASSERT_EQ(decoded, s); +} + +/* ---------------------------------------------------------------------------- + * base64::decode + * --------------------------------------------------------------------------*/ + +TEST(base64Decode, emptyString) +{ + ASSERT_EQ(base64::decode(""), ""); +} + +TEST(base64Decode, decodeAString) +{ + ASSERT_EQ(base64::decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum"); +} + +TEST(base64Decode, decodeThrowsOnInvalidChar) +{ + ASSERT_THROW(base64::decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error); +} + +} // namespace nix diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index b3776e094..0097611c6 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -44,6 +44,7 @@ subdir('nix-meson-build-support/common') sources = files( 'args.cc', + 'base-n.cc', 'canon-path.cc', 'checked-arithmetic.cc', 'chunked-vector.cc', diff --git a/src/libutil-tests/util.cc b/src/libutil-tests/util.cc index 534731c6c..c48b97e8e 100644 --- a/src/libutil-tests/util.cc +++ b/src/libutil-tests/util.cc @@ -3,6 +3,7 @@ #include "nix/util/file-system.hh" #include "nix/util/terminal.hh" #include "nix/util/strings.hh" +#include "nix/util/base-n.hh" #include #include @@ -48,60 +49,6 @@ TEST(hasSuffix, trivialCase) ASSERT_TRUE(hasSuffix("foobar", "bar")); } -/* ---------------------------------------------------------------------------- - * base64Encode - * --------------------------------------------------------------------------*/ - -TEST(base64Encode, emptyString) -{ - ASSERT_EQ(base64Encode(""), ""); -} - -TEST(base64Encode, encodesAString) -{ - ASSERT_EQ(base64Encode("quod erat demonstrandum"), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="); -} - -TEST(base64Encode, encodeAndDecode) -{ - auto s = "quod erat demonstrandum"; - auto encoded = base64Encode(s); - auto decoded = base64Decode(encoded); - - ASSERT_EQ(decoded, s); -} - -TEST(base64Encode, encodeAndDecodeNonPrintable) -{ - char s[256]; - std::iota(std::rbegin(s), std::rend(s), 0); - - auto encoded = base64Encode(s); - auto decoded = base64Decode(encoded); - - EXPECT_EQ(decoded.length(), 255u); - ASSERT_EQ(decoded, s); -} - -/* ---------------------------------------------------------------------------- - * base64Decode - * --------------------------------------------------------------------------*/ - -TEST(base64Decode, emptyString) -{ - ASSERT_EQ(base64Decode(""), ""); -} - -TEST(base64Decode, decodeAString) -{ - ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum"); -} - -TEST(base64Decode, decodeThrowsOnInvalidChar) -{ - ASSERT_THROW(base64Decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error); -} - /* ---------------------------------------------------------------------------- * getLine * --------------------------------------------------------------------------*/ diff --git a/src/libutil/base-n.cc b/src/libutil/base-n.cc new file mode 100644 index 000000000..4c9726ad2 --- /dev/null +++ b/src/libutil/base-n.cc @@ -0,0 +1,114 @@ +#include + +#include "nix/util/array-from-string-literal.hh" +#include "nix/util/util.hh" +#include "nix/util/base-n.hh" + +using namespace std::literals; + +namespace nix { + +constexpr static const std::array base16Chars = "0123456789abcdef"_arrayNoNull; + +std::string base16::encode(std::span b) +{ + std::string buf; + buf.reserve(b.size() * 2); + for (size_t i = 0; i < b.size(); i++) { + buf.push_back(base16Chars[(uint8_t) b.data()[i] >> 4]); + buf.push_back(base16Chars[(uint8_t) b.data()[i] & 0x0f]); + } + return buf; +} + +std::string base16::decode(std::string_view s) +{ + auto parseHexDigit = [&](char c) { + if (c >= '0' && c <= '9') + return c - '0'; + if (c >= 'A' && c <= 'F') + return c - 'A' + 10; + if (c >= 'a' && c <= 'f') + return c - 'a' + 10; + throw FormatError("invalid character in Base16 string: '%c'", c); + }; + + assert(s.size() % 2 == 0); + auto decodedSize = s.size() / 2; + + std::string res; + res.reserve(decodedSize); + + for (unsigned int i = 0; i < decodedSize; i++) { + res.push_back(parseHexDigit(s[i * 2]) << 4 | parseHexDigit(s[i * 2 + 1])); + } + + return res; +} + +constexpr static const std::array base64Chars = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"_arrayNoNull; + +std::string base64::encode(std::span s) +{ + std::string res; + res.reserve((s.size() + 2) / 3 * 4); + int data = 0, nbits = 0; + + for (std::byte c : s) { + data = data << 8 | (uint8_t) c; + nbits += 8; + while (nbits >= 6) { + nbits -= 6; + res.push_back(base64Chars[data >> nbits & 0x3f]); + } + } + + if (nbits) + res.push_back(base64Chars[data << (6 - nbits) & 0x3f]); + while (res.size() % 4) + res.push_back('='); + + return res; +} + +std::string base64::decode(std::string_view s) +{ + constexpr char npos = -1; + constexpr std::array base64DecodeChars = [&] { + std::array result{}; + for (auto & c : result) + c = npos; + for (int i = 0; i < 64; i++) + result[base64Chars[i]] = i; + return result; + }(); + + std::string res; + // Some sequences are missing the padding consisting of up to two '='. + // vvv + res.reserve((s.size() + 2) / 4 * 3); + unsigned int d = 0, bits = 0; + + for (char c : s) { + if (c == '=') + break; + if (c == '\n') + continue; + + char digit = base64DecodeChars[(unsigned char) c]; + if (digit == npos) + throw FormatError("invalid character in Base64 string: '%c'", c); + + bits += 6; + d = d << 6 | digit; + if (bits >= 8) { + res.push_back(d >> (bits - 8) & 0xff); + bits -= 8; + } + } + + return res; +} + +} // namespace nix diff --git a/src/libutil/base-nix-32.cc b/src/libutil/base-nix-32.cc index dec5cd7d7..4f5af462d 100644 --- a/src/libutil/base-nix-32.cc +++ b/src/libutil/base-nix-32.cc @@ -1,6 +1,7 @@ #include #include "nix/util/base-nix-32.hh" +#include "nix/util/util.hh" namespace nix { @@ -16,12 +17,12 @@ constexpr const std::array BaseNix32::reverseMap = [] { return map; }(); -std::string BaseNix32::encode(std::span originalData) +std::string BaseNix32::encode(std::span bs) { - if (originalData.size() == 0) + if (bs.size() == 0) return {}; - size_t len = encodedLength(originalData.size()); + size_t len = encodedLength(bs.size()); assert(len); std::string s; @@ -31,12 +32,42 @@ std::string BaseNix32::encode(std::span originalData) unsigned int b = n * 5; unsigned int i = b / 8; unsigned int j = b % 8; - unsigned char c = - (originalData.data()[i] >> j) | (i >= originalData.size() - 1 ? 0 : originalData.data()[i + 1] << (8 - j)); - s.push_back(characters[c & 0x1f]); + std::byte c = (bs.data()[i] >> j) | (i >= bs.size() - 1 ? std::byte{0} : bs.data()[i + 1] << (8 - j)); + s.push_back(characters[uint8_t(c & std::byte{0x1f})]); } return s; } +std::string BaseNix32::decode(std::string_view s) +{ + std::string res; + res.reserve((s.size() * 5 + 7) / 8); // ceiling(size * 5/8) + + for (unsigned int n = 0; n < s.size(); ++n) { + char c = s[s.size() - n - 1]; + auto digit_opt = BaseNix32::lookupReverse(c); + + if (!digit_opt) + throw FormatError("invalid character in Nix32 (Nix's Base32 variation) string: '%c'", c); + + uint8_t digit = *digit_opt; + + unsigned int b = n * 5; + unsigned int i = b / 8; + unsigned int j = b % 8; + + // Ensure res has enough space + res.resize(i + 1); + res[i] |= digit << j; + + if (digit >> (8 - j)) { + res.resize(i + 2); + res[i + 1] |= digit >> (8 - j); + } + } + + return res; +} + } // namespace nix diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 03003c689..1319924bf 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -11,6 +11,7 @@ #include "nix/util/archive.hh" #include "nix/util/configuration.hh" #include "nix/util/split.hh" +#include "nix/util/base-n.hh" #include "nix/util/base-nix-32.hh" #include @@ -59,25 +60,6 @@ std::strong_ordering Hash::operator<=>(const Hash & h) const noexcept return std::strong_ordering::equivalent; } -const std::string base16Chars = "0123456789abcdef"; - -static std::string printHash16(const Hash & hash) -{ - std::string buf; - buf.reserve(hash.hashSize * 2); - for (unsigned int i = 0; i < hash.hashSize; i++) { - buf.push_back(base16Chars[hash.hash[i] >> 4]); - buf.push_back(base16Chars[hash.hash[i] & 0x0f]); - } - return buf; -} - -static std::string printHash32(const Hash & hash) -{ - assert(hash.hashSize); - return BaseNix32::encode({&hash.hash[0], hash.hashSize}); -} - std::string printHash16or32(const Hash & hash) { assert(static_cast(hash.algo)); @@ -91,16 +73,20 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const s += printHashAlgo(algo); s += hashFormat == HashFormat::SRI ? '-' : ':'; } + const auto bytes = std::as_bytes(std::span{&hash[0], hashSize}); switch (hashFormat) { case HashFormat::Base16: - s += printHash16(*this); + assert(hashSize); + s += base16::encode(bytes); break; case HashFormat::Nix32: - s += printHash32(*this); + assert(hashSize); + s += BaseNix32::encode(bytes); break; case HashFormat::Base64: case HashFormat::SRI: - s += base64Encode(std::string_view((const char *) hash, hashSize)); + assert(hashSize); + s += base64::encode(bytes); break; } return s; @@ -180,63 +166,38 @@ Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) : Hash(algo) { - if (!isSRI && rest.size() == base16Len()) { + auto [decode, formatName] = [&]() -> std::pair { + if (isSRI) { + /* In the SRI case, we always are using Base64. If the + length is wrong, get an error later. */ + return {base64::decode, "SRI"}; + } else { + /* Otherwise, decide via the length of the hash (for the + given algorithm) what base encoding it is. */ - auto parseHexDigit = [&](char c) { - if (c >= '0' && c <= '9') - return c - '0'; - if (c >= 'A' && c <= 'F') - return c - 'A' + 10; - if (c >= 'a' && c <= 'f') - return c - 'a' + 10; - throw BadHash("invalid base-16 hash '%s'", rest); - }; + if (rest.size() == base16::encodedLength(hashSize)) + return {base16::decode, "base16"}; - for (unsigned int i = 0; i < hashSize; i++) { - hash[i] = parseHexDigit(rest[i * 2]) << 4 | parseHexDigit(rest[i * 2 + 1]); + if (rest.size() == BaseNix32::encodedLength(hashSize)) + return {BaseNix32::decode, "nix32"}; + + if (rest.size() == base64::encodedLength(hashSize)) + return {base64::decode, "Base64"}; } - } - else if (!isSRI && rest.size() == base32Len()) { - - for (unsigned int n = 0; n < rest.size(); ++n) { - char c = rest[rest.size() - n - 1]; - auto digit_opt = BaseNix32::lookupReverse(c); - - if (!digit_opt) - throw BadHash("invalid base-32 hash: '%s'", rest); - - uint8_t digit = std::move(*digit_opt); - - unsigned int b = n * 5; - unsigned int i = b / 8; - unsigned int j = b % 8; - hash[i] |= digit << j; - - if (i < hashSize - 1) { - hash[i + 1] |= digit >> (8 - j); - } else { - if (digit >> (8 - j)) - throw BadHash("invalid base-32 hash '%s'", rest); - } - } - } - - else if (isSRI || rest.size() == base64Len()) { - std::string d; - try { - d = base64Decode(rest); - } catch (Error & e) { - e.addTrace({}, "While decoding hash '%s'", rest); - } - if (d.size() != hashSize) - throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", rest); - assert(hashSize); - memcpy(hash, d.data(), hashSize); - } - - else throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); + }(); + + std::string d; + try { + d = decode(rest); + } catch (Error & e) { + e.addTrace({}, "While decoding hash '%s'", rest); + } + if (d.size() != hashSize) + throw BadHash("invalid %s hash '%s' %d %d", formatName, rest); + assert(hashSize); + memcpy(hash, d.data(), hashSize); } Hash Hash::random(HashAlgorithm algo) diff --git a/src/libutil/include/nix/util/array-from-string-literal.hh b/src/libutil/include/nix/util/array-from-string-literal.hh new file mode 100644 index 000000000..a4a137609 --- /dev/null +++ b/src/libutil/include/nix/util/array-from-string-literal.hh @@ -0,0 +1,27 @@ +#pragma once +///@file + +#include +#include + +namespace nix { + +template +struct ArrayNoNullAdaptor +{ + std::array data; + + constexpr ArrayNoNullAdaptor(const char (&init)[sizeWithNull]) + { + static_assert(sizeWithNull > 0); + std::copy_n(init, sizeWithNull - 1, data.data()); + } +}; + +template +constexpr auto operator""_arrayNoNull() +{ + return str.data; +} + +} // namespace nix diff --git a/src/libutil/include/nix/util/base-n.hh b/src/libutil/include/nix/util/base-n.hh new file mode 100644 index 000000000..637a06f3f --- /dev/null +++ b/src/libutil/include/nix/util/base-n.hh @@ -0,0 +1,53 @@ +#pragma once +///@file + +#include +#include + +namespace nix { + +namespace base16 { + +/** + * Returns the length of a base-16 representation of this many bytes. + */ +[[nodiscard]] constexpr static inline size_t encodedLength(size_t origSize) +{ + return origSize * 2; +} + +/** + * Encode arbitrary bytes as Base16. + */ +std::string encode(std::span b); + +/** + * Decode arbitrary Base16 string to bytes. + */ +std::string decode(std::string_view s); + +} // namespace base16 + +namespace base64 { + +/** + * Returns the length of a base-64 representation of this many bytes. + */ +[[nodiscard]] constexpr static inline size_t encodedLength(size_t origSize) +{ + return ((4 * origSize / 3) + 3) & ~3; +} + +/** + * Encode arbitrary bytes as Base64. + */ +std::string encode(std::span b); + +/** + * Decode arbitrary Base64 string to bytes. + */ +std::string decode(std::string_view s); + +} // namespace base64 + +} // namespace nix diff --git a/src/libutil/include/nix/util/base-nix-32.hh b/src/libutil/include/nix/util/base-nix-32.hh index 37b23a2bb..28095e92c 100644 --- a/src/libutil/include/nix/util/base-nix-32.hh +++ b/src/libutil/include/nix/util/base-nix-32.hh @@ -7,14 +7,14 @@ #include #include +#include "nix/util/array-from-string-literal.hh" + namespace nix { struct BaseNix32 { /// omitted: E O U T - constexpr static std::array characters = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', - 'b', 'c', 'd', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', - 'n', 'p', 'q', 'r', 's', 'v', 'w', 'x', 'y', 'z'}; + constexpr static std::array characters = "0123456789abcdfghijklmnpqrsvwxyz"_arrayNoNull; private: static const std::array reverseMap; @@ -34,12 +34,14 @@ public: /** * Returns the length of a base-32 representation of this hash. */ - static size_t encodedLength(size_t originalLength) + [[nodiscard]] constexpr static inline size_t encodedLength(size_t originalLength) { return (originalLength * 8 - 1) / 5 + 1; } - static std::string encode(std::span originalData); + static std::string encode(std::span originalData); + + static std::string decode(std::string_view s); }; } // namespace nix diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index 7b76095cf..fdd4c6fa7 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -110,30 +110,6 @@ public: */ std::strong_ordering operator<=>(const Hash & h2) const noexcept; - /** - * Returns the length of a base-16 representation of this hash. - */ - [[nodiscard]] size_t base16Len() const - { - return hashSize * 2; - } - - /** - * Returns the length of a base-32 representation of this hash. - */ - [[nodiscard]] size_t base32Len() const - { - return (hashSize * 8 - 1) / 5 + 1; - } - - /** - * Returns the length of a base-64 representation of this hash. - */ - [[nodiscard]] size_t base64Len() const - { - return ((4 * hashSize / 3) + 3) & ~3; - } - /** * Return a string representation of the hash, in base-16, base-32 * or base-64. By default, this is prefixed by the hash algo diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index b7d4d761d..bc58b4d5e 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -8,6 +8,8 @@ headers = files( 'archive.hh', 'args.hh', 'args/root.hh', + 'array-from-string-literal.hh', + 'base-n.hh', 'base-nix-32.hh', 'callback.hh', 'canon-path.hh', diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 015086d39..56041a112 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -179,16 +179,6 @@ constexpr char treeLast[] = "└───"; constexpr char treeLine[] = "│ "; constexpr char treeNull[] = " "; -/** - * Encode arbitrary bytes as Base64. - */ -std::string base64Encode(std::string_view s); - -/** - * Decode arbitrary bytes to Base64. - */ -std::string base64Decode(std::string_view s); - /** * Remove common leading whitespace from the lines in the string * 's'. For example, if every line is indented by at least 3 spaces, diff --git a/src/libutil/meson.build b/src/libutil/meson.build index fb3e98e1d..ea2cb679e 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -112,6 +112,7 @@ subdir('nix-meson-build-support/common') sources = [config_priv_h] + files( 'archive.cc', 'args.cc', + 'base-n.cc', 'base-nix-32.cc', 'canon-path.cc', 'compression.cc', diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 374b5569d..1541aed2f 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -1,6 +1,7 @@ #include "nix/util/signature/local-keys.hh" #include "nix/util/file-system.hh" +#include "nix/util/base-n.hh" #include "nix/util/util.hh" #include @@ -25,7 +26,7 @@ Key::Key(std::string_view s, bool sensitiveValue) if (name == "" || key == "") throw FormatError("key is corrupt"); - key = base64Decode(key); + key = base64::decode(key); } catch (Error & e) { std::string extra; if (!sensitiveValue) @@ -37,7 +38,7 @@ Key::Key(std::string_view s, bool sensitiveValue) std::string Key::to_string() const { - return name + ":" + base64Encode(key); + return name + ":" + base64::encode(std::as_bytes(std::span{key})); } SecretKey::SecretKey(std::string_view s) @@ -52,7 +53,7 @@ std::string SecretKey::signDetached(std::string_view data) const unsigned char sig[crypto_sign_BYTES]; unsigned long long sigLen; crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(), (unsigned char *) key.data()); - return name + ":" + base64Encode(std::string((char *) sig, sigLen)); + return name + ":" + base64::encode(std::as_bytes(std::span{sig, sigLen})); } PublicKey SecretKey::toPublicKey() const @@ -93,7 +94,7 @@ bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) { std::string sig2; try { - sig2 = base64Decode(sig); + sig2 = base64::decode(sig); } catch (Error & e) { e.addTrace({}, "while decoding signature '%s'", sig); } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 5cbbb80ee..383a904ad 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -204,70 +204,6 @@ void ignoreExceptionExceptInterrupt(Verbosity lvl) } } -constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; - -std::string base64Encode(std::string_view s) -{ - std::string res; - res.reserve((s.size() + 2) / 3 * 4); - int data = 0, nbits = 0; - - for (char c : s) { - data = data << 8 | (unsigned char) c; - nbits += 8; - while (nbits >= 6) { - nbits -= 6; - res.push_back(base64Chars[data >> nbits & 0x3f]); - } - } - - if (nbits) - res.push_back(base64Chars[data << (6 - nbits) & 0x3f]); - while (res.size() % 4) - res.push_back('='); - - return res; -} - -std::string base64Decode(std::string_view s) -{ - constexpr char npos = -1; - constexpr std::array base64DecodeChars = [&] { - std::array result{}; - for (auto & c : result) - c = npos; - for (int i = 0; i < 64; i++) - result[base64Chars[i]] = i; - return result; - }(); - - std::string res; - // Some sequences are missing the padding consisting of up to two '='. - // vvv - res.reserve((s.size() + 2) / 4 * 3); - unsigned int d = 0, bits = 0; - - for (char c : s) { - if (c == '=') - break; - if (c == '\n') - continue; - - char digit = base64DecodeChars[(unsigned char) c]; - if (digit == npos) - throw FormatError("invalid character in Base64 string: '%c'", c); - - bits += 6; - d = d << 6 | digit; - if (bits >= 8) { - res.push_back(d >> (bits - 8) & 0xff); - bits -= 8; - } - } - - return res; -} - std::string stripIndentation(std::string_view s) { size_t minIndent = 10000; From ead795404d2f8dd642e5baa6e516564bc84ea408 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 5 Aug 2025 18:09:05 -0400 Subject: [PATCH 113/382] Base64 trailing test (from Snix, thanks) --- src/libutil-tests/base-n.cc | 42 +++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/libutil-tests/base-n.cc b/src/libutil-tests/base-n.cc index 8de78b55d..b3a845365 100644 --- a/src/libutil-tests/base-n.cc +++ b/src/libutil-tests/base-n.cc @@ -65,4 +65,46 @@ TEST(base64Decode, decodeThrowsOnInvalidChar) ASSERT_THROW(base64::decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error); } +// A SHA-512 hash. Hex encoded to be clearer / distinct from the Base64 test case. +const std::string expectedDecoded = base16::decode( + "ee0f754c1bd8a18428ad14eaa3ead80ff8b96275af5012e7a8384f1f10490da056eec9ae3cc791a7a13a24e16e54df5bccdd109c7d53a14534bbd7360a300b11"); + +struct Base64TrailingParseCase +{ + std::string sri; +}; + +class Base64TrailParseTest : public ::testing::TestWithParam +{}; + +TEST_P(Base64TrailParseTest, AcceptsVariousSha512Paddings) +{ + auto sri = GetParam().sri; + auto decoded = base64::decode(sri); + + EXPECT_EQ(decoded, expectedDecoded); +} + +/* Nix's Base64 implementation has historically accepted trailing + garbage. We may want to warn about this in the future, but we cannot + take it away suddenly. + + Test case taken from Snix: + https://git.snix.dev/snix/snix/src/commit/2a29b90c7f3f3c52b5bdae50260fb0bd903c6b38/snix/nix-compat/src/nixhash/mod.rs#L431 + */ +INSTANTIATE_TEST_SUITE_P( + Sha512Paddings, + Base64TrailParseTest, + ::testing::Values( + Base64TrailingParseCase{ + "7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ"}, + Base64TrailingParseCase{ + "7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ="}, + Base64TrailingParseCase{ + "7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ=="}, + Base64TrailingParseCase{ + "7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ==="}, + Base64TrailingParseCase{ + "7g91TBvYoYQorRTqo+rYD/i5YnWvUBLnqDhPHxBJDaBW7smuPMeRp6E6JOFuVN9bzN0QnH1ToUU0u9c2CjALEQ== cheesecake"})); + } // namespace nix From c38987e04a953bcb8161eef31ec20906bffa37fc Mon Sep 17 00:00:00 2001 From: Philip Taron Date: Wed, 23 Jul 2025 10:59:04 -0700 Subject: [PATCH 114/382] libstore: always canonicalize directory permissions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Prior to this patch, mode 0444 is not updated to 0555 for directories. That means for instance 0554 is canonicalized, but not 0444. We don't believe this has any implications for backwards compatibility, because directories do not have permissions in NAR format and so are always 0555 after deserialization, and store paths with wrong permissions can’t be copied to another host. Co-authored-by: Robert Hensing --- src/libstore/posix-fs-canonicalise.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index 2484d51a6..a889938c9 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -21,9 +21,9 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct /* Mask out all type related bits. */ mode_t mode = st.st_mode & ~S_IFMT; - - if (mode != 0444 && mode != 0555) { - mode = (st.st_mode & S_IFMT) | 0444 | (st.st_mode & S_IXUSR ? 0111 : 0); + bool isDir = S_ISDIR(st.st_mode); + if ((mode != 0444 || isDir) && mode != 0555) { + mode = (st.st_mode & S_IFMT) | 0444 | (st.st_mode & S_IXUSR || isDir ? 0111 : 0); if (chmod(path.c_str(), mode) == -1) throw SysError("changing mode of '%1%' to %2$o", path, mode); } From 02cc215221e21822bdfdeb0291c2ae49a8257ef6 Mon Sep 17 00:00:00 2001 From: h0nIg Date: Wed, 6 Aug 2025 20:33:44 +0200 Subject: [PATCH 115/382] Revert "docker: make sure `nix config check` works" --- docker.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/docker.nix b/docker.nix index 410e4a178..ed62c3a4e 100644 --- a/docker.nix +++ b/docker.nix @@ -311,6 +311,7 @@ let # see doc/manual/source/command-ref/files/profiles.md ln -s ${profile} $out/nix/var/nix/profiles/default-1-link ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default + ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile # see doc/manual/source/command-ref/files/channels.md ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link From 49ba06175ebc632a4c043e944ac6d9faf6a3ef2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Kr=C3=BCger?= Date: Sat, 25 Apr 2020 16:07:41 +0200 Subject: [PATCH 116/382] Add user@address:port support This patch allows users to specify the connection port in the store URLS like so: ``` nix store info --store "ssh-ng://localhost:22" --json ``` Previously this failed with: `error: failed to start SSH connection to 'localhost:22'`, because the code did not distinguish the port from the hostname. This patch remedies that problem by introducing a ParsedURL::Authority type for working with parsed authority components of URIs. Now that the URL parsing code is less ad-hoc we can add more long-awaited fixes for specifying SSH connection ports in store URIs. Builds upon the work from bd1d2d1041a321284efcf22e11beb86ede08648d. Co-authored-by: Sergei Zimmerman Co-authored-by: John Ericson --- doc/manual/rl-next/port-in-store-uris.md | 13 +++ src/libfetchers/git-lfs-fetch.cc | 7 +- src/libfetchers/path.cc | 2 +- src/libflake/flakeref.cc | 6 +- src/libstore-tests/machines.cc | 8 ++ src/libstore/common-ssh-store-config.cc | 23 ++--- .../nix/store/common-ssh-store-config.hh | 22 +---- src/libstore/include/nix/store/globals.hh | 2 +- src/libstore/include/nix/store/ssh.hh | 6 +- src/libstore/legacy-ssh-store.cc | 21 +++-- src/libstore/legacy-ssh-store.md | 2 +- src/libstore/ssh-store.md | 2 +- src/libstore/ssh.cc | 69 +++++++++++--- src/libstore/store-reference.cc | 2 +- src/libutil-tests/url.cc | 62 ++++++++----- src/libutil/include/nix/util/url.hh | 68 +++++++++++++- src/libutil/url.cc | 92 ++++++++++++++++++- tests/nixos/remote-builds.nix | 6 +- 18 files changed, 312 insertions(+), 101 deletions(-) create mode 100644 doc/manual/rl-next/port-in-store-uris.md diff --git a/doc/manual/rl-next/port-in-store-uris.md b/doc/manual/rl-next/port-in-store-uris.md new file mode 100644 index 000000000..8291c0fd1 --- /dev/null +++ b/doc/manual/rl-next/port-in-store-uris.md @@ -0,0 +1,13 @@ +--- +synopsis: "Add support for user@address:port syntax in store URIs" +prs: [3425] +issues: [7044] +--- + +It's now possible to specify the port used for the SSH stores directly in the store URL in accordance with [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). Previously the only way to specify custom ports was via `ssh_config` or `NIX_SSHOPTS` environment variable, because Nix incorrectly passed the port number together with the host name to the SSH executable. This has now been fixed. + +This change affects [store references](@docroot@/store/types/index.md#store-url-format) passed via the `--store` and similar flags in CLI as well as in the configuration for [remote builders](@docroot@/command-ref/conf-file.md#conf-builders). For example, the following store URIs now work: + +- `ssh://127.0.0.1:2222` +- `ssh://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` +- `ssh-ng://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index 97f10f0c6..1337c5b83 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -44,16 +44,19 @@ static void downloadToSink( static std::string getLfsApiToken(const ParsedURL & url) { + assert(url.authority.has_value()); + + // FIXME: Not entirely correct. auto [status, output] = runProgram( RunOptions{ .program = "ssh", - .args = {*url.authority, "git-lfs-authenticate", url.path, "download"}, + .args = {url.authority->to_string(), "git-lfs-authenticate", url.path, "download"}, }); if (output.empty()) throw Error( "git-lfs-authenticate: no output (cmd: ssh %s git-lfs-authenticate %s download)", - url.authority.value_or(""), + url.authority.value_or(ParsedURL::Authority{}).to_string(), url.path); auto queryResp = nlohmann::json::parse(output); diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 9f8344edf..e5635ee75 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -15,7 +15,7 @@ struct PathInputScheme : InputScheme if (url.scheme != "path") return {}; - if (url.authority && *url.authority != "") + if (url.authority && url.authority->host.size()) throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority); Input input{settings}; diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index 9a75a2259..5b1c3e8b2 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -142,7 +142,7 @@ std::pair parsePathFlakeRefWithFragment( if (pathExists(flakeRoot + "/.git")) { auto parsedURL = ParsedURL{ .scheme = "git+file", - .authority = "", + .authority = ParsedURL::Authority{}, .path = flakeRoot, .query = query, .fragment = fragment, @@ -172,7 +172,7 @@ std::pair parsePathFlakeRefWithFragment( return fromParsedURL( fetchSettings, - {.scheme = "path", .authority = "", .path = path, .query = query, .fragment = fragment}, + {.scheme = "path", .authority = ParsedURL::Authority{}, .path = path, .query = query, .fragment = fragment}, isFlake); } @@ -192,7 +192,7 @@ parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & ur if (std::regex_match(url, match, flakeRegex)) { auto parsedURL = ParsedURL{ .scheme = "flake", - .authority = "", + .authority = ParsedURL::Authority{}, .path = match[1], }; diff --git a/src/libstore-tests/machines.cc b/src/libstore-tests/machines.cc index 72562e6fc..e4186372d 100644 --- a/src/libstore-tests/machines.cc +++ b/src/libstore-tests/machines.cc @@ -39,6 +39,14 @@ TEST(machines, getMachinesUriOnly) EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0))); } +TEST(machines, getMachinesUriWithPort) +{ + auto actual = Machine::parseConfig({"TEST_ARCH-TEST_OS"}, "nix@scratchy.labs.cs.uu.nl:2222"); + ASSERT_THAT(actual, SizeIs(1)); + EXPECT_THAT( + actual[0], Field(&Machine::storeUri, Eq(StoreReference::parse("ssh://nix@scratchy.labs.cs.uu.nl:2222")))); +} + TEST(machines, getMachinesDefaults) { auto actual = Machine::parseConfig({"TEST_ARCH-TEST_OS"}, "nix@scratchy.labs.cs.uu.nl - - - - - - -"); diff --git a/src/libstore/common-ssh-store-config.cc b/src/libstore/common-ssh-store-config.cc index 0e3a126ec..12f187b4c 100644 --- a/src/libstore/common-ssh-store-config.cc +++ b/src/libstore/common-ssh-store-config.cc @@ -5,33 +5,22 @@ namespace nix { -static std::string extractConnStr(std::string_view scheme, std::string_view _connStr) +CommonSSHStoreConfig::CommonSSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) + : CommonSSHStoreConfig(scheme, ParsedURL::Authority::parse(authority), params) { - if (_connStr.empty()) - throw UsageError("`%s` store requires a valid SSH host as the authority part in Store URI", scheme); - - std::string connStr{_connStr}; - - std::smatch result; - static std::regex v6AddrRegex("^((.*)@)?\\[(.*)\\]$"); - - if (std::regex_match(connStr, result, v6AddrRegex)) { - connStr = result[1].matched ? result.str(1) + result.str(3) : result.str(3); - } - - return connStr; } -CommonSSHStoreConfig::CommonSSHStoreConfig(std::string_view scheme, std::string_view host, const Params & params) +CommonSSHStoreConfig::CommonSSHStoreConfig( + std::string_view scheme, const ParsedURL::Authority & authority, const Params & params) : StoreConfig(params) - , host(extractConnStr(scheme, host)) + , authority(authority) { } SSHMaster CommonSSHStoreConfig::createSSHMaster(bool useMaster, Descriptor logFD) const { return { - host, + authority, sshKey.get(), sshPublicHostKey.get(), useMaster, diff --git a/src/libstore/include/nix/store/common-ssh-store-config.hh b/src/libstore/include/nix/store/common-ssh-store-config.hh index 9e6a24b74..bbd81835d 100644 --- a/src/libstore/include/nix/store/common-ssh-store-config.hh +++ b/src/libstore/include/nix/store/common-ssh-store-config.hh @@ -2,6 +2,7 @@ ///@file #include "nix/store/store-api.hh" +#include "nix/util/url.hh" namespace nix { @@ -11,7 +12,8 @@ struct CommonSSHStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - CommonSSHStoreConfig(std::string_view scheme, std::string_view host, const Params & params); + CommonSSHStoreConfig(std::string_view scheme, const ParsedURL::Authority & authority, const Params & params); + CommonSSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params); const Setting sshKey{ this, "", "ssh-key", "Path to the SSH private key used to authenticate to the remote machine."}; @@ -32,23 +34,9 @@ struct CommonSSHStoreConfig : virtual StoreConfig )"}; /** - * The `parseURL` function supports both IPv6 URIs as defined in - * RFC2732, but also pure addresses. The latter one is needed here to - * connect to a remote store via SSH (it's possible to do e.g. `ssh root@::1`). - * - * When initialized, the following adjustments are made: - * - * - If the URL looks like `root@[::1]` (which is allowed by the URL parser and probably - * needed to pass further flags), it - * will be transformed into `root@::1` for SSH (same for `[::1]` -> `::1`). - * - * - If the URL looks like `root@::1` it will be left as-is. - * - * - In any other case, the string will be left as-is. - * - * Will throw an error if `connStr` is empty too. + * Authority representing the SSH host to connect to. */ - std::string host; + ParsedURL::Authority authority; /** * Small wrapper around `SSHMaster::SSHMaster` that gets most diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 310aca80d..0014a6638 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -291,7 +291,7 @@ public: Only the first element is required. To leave a field at its default, set it to `-`. - 1. The URI of the remote store in the format `ssh://[username@]hostname`. + 1. The URI of the remote store in the format `ssh://[username@]hostname[:port]`. > **Example** > diff --git a/src/libstore/include/nix/store/ssh.hh b/src/libstore/include/nix/store/ssh.hh index 998312ddf..6eb38acef 100644 --- a/src/libstore/include/nix/store/ssh.hh +++ b/src/libstore/include/nix/store/ssh.hh @@ -2,6 +2,7 @@ ///@file #include "nix/util/sync.hh" +#include "nix/util/url.hh" #include "nix/util/processes.hh" #include "nix/util/file-system.hh" @@ -11,7 +12,8 @@ class SSHMaster { private: - const std::string host; + ParsedURL::Authority authority; + std::string hostnameAndUser; bool fakeSSH; const std::string keyFile; /** @@ -43,7 +45,7 @@ private: public: SSHMaster( - std::string_view host, + const ParsedURL::Authority & authority, std::string_view keyFile, std::string_view sshPublicHostKey, bool useMaster, diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 09bea1ca3..075702f93 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -18,7 +18,7 @@ namespace nix { LegacySSHStoreConfig::LegacySSHStoreConfig(std::string_view scheme, std::string_view authority, const Params & params) : StoreConfig(params) - , CommonSSHStoreConfig(scheme, authority, params) + , CommonSSHStoreConfig(scheme, ParsedURL::Authority::parse(authority), params) { } @@ -71,7 +71,7 @@ ref LegacySSHStore::openConnection() TeeSource tee(conn->from, saved); try { conn->remoteVersion = - ServeProto::BasicClientConnection::handshake(conn->to, tee, SERVE_PROTOCOL_VERSION, config->host); + ServeProto::BasicClientConnection::handshake(conn->to, tee, SERVE_PROTOCOL_VERSION, config->authority.host); } catch (SerialisationError & e) { // in.close(): Don't let the remote block on us not writing. conn->sshConn->in.close(); @@ -79,9 +79,10 @@ ref LegacySSHStore::openConnection() NullSink nullSink; tee.drainInto(nullSink); } - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", config->host, chomp(saved.s)); + throw Error( + "'nix-store --serve' protocol mismatch from '%s', got '%s'", config->authority.host, chomp(saved.s)); } catch (EndOfFile & e) { - throw Error("cannot connect to '%1%'", config->host); + throw Error("cannot connect to '%1%'", config->authority.host); } return conn; @@ -89,7 +90,7 @@ ref LegacySSHStore::openConnection() std::string LegacySSHStore::getUri() { - return *Config::uriSchemes().begin() + "://" + config->host; + return *Config::uriSchemes().begin() + "://" + config->authority.to_string(); } std::map LegacySSHStore::queryPathInfosUncached(const StorePathSet & paths) @@ -99,7 +100,10 @@ std::map LegacySSHStore::queryPathInfosUncached /* No longer support missing NAR hash */ assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); - debug("querying remote host '%s' for info on '%s'", config->host, concatStringsSep(", ", printStorePathSet(paths))); + debug( + "querying remote host '%s' for info on '%s'", + config->authority.host, + concatStringsSep(", ", printStorePathSet(paths))); auto infos = conn->queryPathInfos(*this, paths); @@ -136,7 +140,7 @@ void LegacySSHStore::queryPathInfoUncached( void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { - debug("adding path '%s' to remote host '%s'", printStorePath(info.path), config->host); + debug("adding path '%s' to remote host '%s'", printStorePath(info.path), config->authority.host); auto conn(connections->get()); @@ -157,7 +161,8 @@ void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, Rep conn->to.flush(); if (readInt(conn->from) != 1) - throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), config->host); + throw Error( + "failed to add path '%s' to remote host '%s'", printStorePath(info.path), config->authority.host); } else { diff --git a/src/libstore/legacy-ssh-store.md b/src/libstore/legacy-ssh-store.md index 043acebd6..c33fc8992 100644 --- a/src/libstore/legacy-ssh-store.md +++ b/src/libstore/legacy-ssh-store.md @@ -1,6 +1,6 @@ R"( -**Store URL format**: `ssh://[username@]hostname` +**Store URL format**: `ssh://[username@]hostname[:port]` This store type allows limited access to a remote store on another machine via SSH. diff --git a/src/libstore/ssh-store.md b/src/libstore/ssh-store.md index 881537e71..26e0d6e39 100644 --- a/src/libstore/ssh-store.md +++ b/src/libstore/ssh-store.md @@ -1,6 +1,6 @@ R"( -**Store URL format**: `ssh-ng://[username@]hostname` +**Store URL format**: `ssh-ng://[username@]hostname[:port]` Experimental store type that allows full access to a Nix store on a remote machine. diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 474b3622a..8ed72c643 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -18,24 +18,62 @@ static std::string parsePublicHostKey(std::string_view host, std::string_view ss } } +class InvalidSSHAuthority : public Error +{ +public: + InvalidSSHAuthority(const ParsedURL::Authority & authority, std::string_view reason) + : Error("invalid SSH authority: '%s': %s", authority.to_string(), reason) + { + } +}; + +/** + * Checks if the hostname/username are valid for use with ssh. + * + * @todo Enforce this better. Probably this needs to reimplement the same logic as in + * https://github.com/openssh/openssh-portable/blob/6ebd472c391a73574abe02771712d407c48e130d/ssh.c#L648-L681 + */ +static void checkValidAuthority(const ParsedURL::Authority & authority) +{ + if (const auto & user = authority.user) { + if (user->empty()) + throw InvalidSSHAuthority(authority, "user name must not be empty"); + if (user->starts_with("-")) + throw InvalidSSHAuthority(authority, fmt("user name '%s' must not start with '-'", *user)); + } + + { + std::string_view host = authority.host; + if (host.empty()) + throw InvalidSSHAuthority(authority, "host name must not be empty"); + if (host.starts_with("-")) + throw InvalidSSHAuthority(authority, fmt("host name '%s' must not start with '-'", host)); + } +} + SSHMaster::SSHMaster( - std::string_view host, + const ParsedURL::Authority & authority, std::string_view keyFile, std::string_view sshPublicHostKey, bool useMaster, bool compress, Descriptor logFD) - : host(host) - , fakeSSH(host == "localhost") + : authority(authority) + , hostnameAndUser([authority]() { + std::ostringstream oss; + if (authority.user) + oss << *authority.user << "@"; + oss << authority.host; + return std::move(oss).str(); + }()) + , fakeSSH(authority.host == "localhost") , keyFile(keyFile) - , sshPublicHostKey(parsePublicHostKey(host, sshPublicHostKey)) + , sshPublicHostKey(parsePublicHostKey(authority.host, sshPublicHostKey)) , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) { - if (host == "" || hasPrefix(host, "-")) - throw Error("invalid SSH host name '%s'", host); - + checkValidAuthority(authority); auto state(state_.lock()); state->tmpDir = std::make_unique(createTempDir("", "nix", 0700)); } @@ -59,14 +97,15 @@ void SSHMaster::addCommonSSHOpts(Strings & args) args.insert(args.end(), {"-i", keyFile}); if (!sshPublicHostKey.empty()) { std::filesystem::path fileName = state->tmpDir->path() / "host-key"; - auto p = host.rfind("@"); - std::string thost = p != std::string::npos ? std::string(host, p + 1) : host; - writeFile(fileName.string(), thost + " " + sshPublicHostKey + "\n"); + writeFile(fileName.string(), authority.host + " " + sshPublicHostKey + "\n"); args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName.string()}); } if (compress) args.push_back("-C"); + if (authority.port) + args.push_back(fmt("-p%d", *authority.port)); + // We use this to make ssh signal back to us that the connection is established. // It really does run locally; see createSSHEnv which sets up SHELL to make // it launch more reliably. The local command runs synchronously, so presumably @@ -77,7 +116,7 @@ void SSHMaster::addCommonSSHOpts(Strings & args) bool SSHMaster::isMasterRunning() { - Strings args = {"-O", "check", host}; + Strings args = {"-O", "check", hostnameAndUser}; addCommonSSHOpts(args); auto res = runProgram(RunOptions{.program = "ssh", .args = args, .mergeStderrToStdout = true}); @@ -142,7 +181,7 @@ std::unique_ptr SSHMaster::startCommand(Strings && comman Strings args; if (!fakeSSH) { - args = {"ssh", host.c_str(), "-x"}; + args = {"ssh", hostnameAndUser.c_str(), "-x"}; addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); @@ -175,7 +214,7 @@ std::unique_ptr SSHMaster::startCommand(Strings && comman if (reply != "started") { printTalkative("SSH stdout first line: %s", reply); - throw Error("failed to start SSH connection to '%s'", host); + throw Error("failed to start SSH connection to '%s'", authority.host); } } @@ -220,7 +259,7 @@ Path SSHMaster::startMaster() if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("duping over stdout"); - Strings args = {"ssh", host.c_str(), "-M", "-N", "-S", state->socketPath}; + Strings args = {"ssh", hostnameAndUser.c_str(), "-M", "-N", "-S", state->socketPath}; if (verbosity >= lvlChatty) args.push_back("-v"); addCommonSSHOpts(args); @@ -241,7 +280,7 @@ Path SSHMaster::startMaster() if (reply != "started") { printTalkative("SSH master stdout first line: %s", reply); - throw Error("failed to start SSH master connection to '%s'", host); + throw Error("failed to start SSH master connection to '%s'", authority.host); } return state->socketPath; diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index 99edefeba..13feeae3e 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -48,7 +48,7 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen auto parsedUri = parseURL(uri); params.insert(parsedUri.query.begin(), parsedUri.query.end()); - auto baseURI = parsedUri.authority.value_or("") + parsedUri.path; + auto baseURI = parsedUri.authority.value_or(ParsedURL::Authority{}).to_string() + parsedUri.path; return { .variant = diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 5e9b81f46..fb27689de 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -7,20 +7,8 @@ namespace nix { /* ----------- tests for url.hh --------------------------------------------------*/ -std::string print_map(StringMap m) -{ - StringMap::iterator it; - std::string s = "{ "; - for (it = m.begin(); it != m.end(); ++it) { - s += "{ "; - s += it->first; - s += " = "; - s += it->second; - s += " } "; - } - s += "}"; - return s; -} +using Authority = ParsedURL::Authority; +using HostType = Authority::HostType; TEST(parseURL, parsesSimpleHttpUrl) { @@ -29,13 +17,14 @@ TEST(parseURL, parsesSimpleHttpUrl) ParsedURL expected{ .scheme = "http", - .authority = "www.example.org", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, .path = "/file.tar.gz", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsesSimpleHttpsUrl) @@ -45,13 +34,14 @@ TEST(parseURL, parsesSimpleHttpsUrl) ParsedURL expected{ .scheme = "https", - .authority = "www.example.org", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, .path = "/file.tar.gz", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) @@ -61,13 +51,14 @@ TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) ParsedURL expected{ .scheme = "https", - .authority = "www.example.org", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, .path = "/file.tar.gz", .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, .fragment = "hello", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) @@ -77,7 +68,7 @@ TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) ParsedURL expected{ .scheme = "http", - .authority = "www.example.org", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, .path = "/file.tar.gz", .query = (StringMap) {{"field", "value"}}, .fragment = "?foo=bar#", @@ -93,13 +84,14 @@ TEST(parseURL, parsesFilePlusHttpsUrl) ParsedURL expected{ .scheme = "file+https", - .authority = "www.example.org", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, .path = "/video.mp4", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) @@ -115,13 +107,14 @@ TEST(parseURL, parseIPv4Address) ParsedURL expected{ .scheme = "http", - .authority = "127.0.0.1:8080", + .authority = Authority{.hostType = HostType::IPv4, .host = "127.0.0.1", .port = 8080}, .path = "/file.tar.gz", .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, .fragment = "hello", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parseScopedRFC6874IPv6Address) @@ -131,13 +124,14 @@ TEST(parseURL, parseScopedRFC6874IPv6Address) ParsedURL expected{ .scheme = "http", - .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080", + .authority = Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, .path = "", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parseIPv6Address) @@ -147,13 +141,19 @@ TEST(parseURL, parseIPv6Address) ParsedURL expected{ .scheme = "http", - .authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080", + .authority = + Authority{ + .hostType = HostType::IPv6, + .host = "2a02:8071:8192:c100:311d:192d:81ac:11ea", + .port = 8080, + }, .path = "", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parseEmptyQueryParams) @@ -170,13 +170,21 @@ TEST(parseURL, parseUserPassword) ParsedURL expected{ .scheme = "http", - .authority = "user:pass@www.example.org:8080", + .authority = + Authority{ + .hostType = HostType::Name, + .host = "www.example.org", + .user = "user", + .password = "pass", + .port = 8080, + }, .path = "/file.tar.gz", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parseFileURLWithQueryAndFragment) @@ -186,13 +194,14 @@ TEST(parseURL, parseFileURLWithQueryAndFragment) ParsedURL expected{ .scheme = "file", - .authority = "", + .authority = Authority{}, .path = "/none/of//your/business", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsedUrlsIsEqualToItself) @@ -210,25 +219,28 @@ TEST(parseURL, parseFTPUrl) ParsedURL expected{ .scheme = "ftp", - .authority = "ftp.nixos.org", + .authority = Authority{.hostType = HostType::Name, .host = "ftp.nixos.org"}, .path = "/downloads/nixos.iso", .query = (StringMap) {}, .fragment = "", }; ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsesAnythingInUriFormat) { auto s = "whatever://github.com/NixOS/nixpkgs.git"; auto parsed = parseURL(s); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) { auto s = "whatever:github.com/NixOS/nixpkgs.git"; auto parsed = parseURL(s); + ASSERT_EQ(s, parsed.to_string()); } TEST(parseURL, emptyStringIsInvalidURL) diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 1c51ab797..0a6194b19 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -5,10 +5,76 @@ namespace nix { +/** + * Represents a parsed RFC3986 URL. + * + * @note All fields are already percent decoded. + */ struct ParsedURL { + /** + * Parsed representation of a URL authority. + * + * It consists of user information, hostname and an optional port number. + * Note that passwords in the userinfo are not yet supported and are ignored. + * + * @todo Maybe support passwords in userinfo part of the url for auth. + */ + struct Authority + { + enum class HostType { + Name, //< Registered name (can be empty) + IPv4, + IPv6, + IPvFuture + }; + + static Authority parse(std::string_view encodedAuthority); + bool operator==(const Authority & other) const = default; + std::string to_string() const; + friend std::ostream & operator<<(std::ostream & os, const Authority & self); + + /** + * Type of the host subcomponent, as specified by rfc3986 3.2.2. Host. + */ + HostType hostType = HostType::Name; + + /** + * Host subcomponent. Either a registered name or IPv{4,6,Future} literal addresses. + * + * IPv6 enclosing brackets are already stripped. Percent encoded characters + * in the hostname are decoded. + */ + std::string host; + + /** Percent-decoded user part of the userinfo. */ + std::optional user; + + /** + * Password subcomponent of the authority (if specified). + * + * @warning As per the rfc3986, the password syntax is deprecated, + * but it's necessary to make the parse -> to_string roundtrip. + * We don't use it anywhere (at least intentionally). + * @todo Warn about unused password subcomponent. + */ + std::optional password; + + /** Port subcomponent (if specified). Default value is determined by the scheme. */ + std::optional port; + }; + std::string scheme; - std::optional authority; + /** + * Optional parsed authority component of the URL. + * + * IMPORTANT: An empty authority (i.e. one with an empty host string) and + * a missing authority (std::nullopt) are drastically different cases. This + * is especially important for "file:///path/to/file" URLs defined by RFC8089. + * The presence of the authority is indicated by `//` following the : + * part of the URL. + */ + std::optional authority; std::string path; StringMap query; std::string fragment; diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 2f9c7736a..134d313ed 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -34,6 +34,81 @@ static std::string percentEncodeSpaces(std::string_view url) return replaceStrings(std::string(url), " ", percentEncode(" ")); } +ParsedURL::Authority ParsedURL::Authority::parse(std::string_view encodedAuthority) +{ + auto parsed = boost::urls::parse_authority(encodedAuthority); + if (!parsed) + throw BadURL("invalid URL authority: '%s': %s", encodedAuthority, parsed.error().message()); + + auto hostType = [&]() { + switch (parsed->host_type()) { + case boost::urls::host_type::ipv4: + return HostType::IPv4; + case boost::urls::host_type::ipv6: + return HostType::IPv6; + case boost::urls::host_type::ipvfuture: + return HostType::IPvFuture; + case boost::urls::host_type::none: + case boost::urls::host_type::name: + return HostType::Name; + } + unreachable(); + }(); + + auto port = [&]() -> std::optional { + if (!parsed->has_port()) + return std::nullopt; + /* If the port number is non-zero and representable. */ + if (auto portNumber = parsed->port_number()) + return portNumber; + throw BadURL("port '%s' is invalid", parsed->port()); + }(); + + return { + .hostType = hostType, + .host = parsed->host_address(), + .user = parsed->has_userinfo() ? parsed->user() : std::optional{}, + .password = parsed->has_password() ? parsed->password() : std::optional{}, + .port = port, + }; +} + +std::ostream & operator<<(std::ostream & os, const ParsedURL::Authority & self) +{ + if (self.user) { + os << percentEncode(*self.user); + if (self.password) + os << ":" << percentEncode(*self.password); + os << "@"; + } + + using HostType = ParsedURL::Authority::HostType; + switch (self.hostType) { + case HostType::Name: + os << percentEncode(self.host); + break; + case HostType::IPv4: + os << self.host; + break; + case HostType::IPv6: + case HostType::IPvFuture: + /* Reencode percent sign for RFC4007 ScopeId literals. */ + os << "[" << percentEncode(self.host, ":") << "]"; + } + + if (self.port) + os << ":" << *self.port; + + return os; +} + +std::string ParsedURL::Authority::to_string() const +{ + std::ostringstream oss; + oss << *this; + return std::move(oss).str(); +} + ParsedURL parseURL(const std::string & url) try { /* Drop the shevron suffix used for the flakerefs. Shevron character is reserved and @@ -47,14 +122,21 @@ try { throw BadURL("'%s' doesn't have a scheme", url); auto scheme = urlView.scheme(); - auto authority = [&]() -> std::optional { + auto authority = [&]() -> std::optional { if (urlView.has_authority()) - return percentDecode(urlView.authority().buffer()); + return ParsedURL::Authority::parse(urlView.authority().buffer()); return std::nullopt; }(); + /* 3.2.2. Host (RFC3986): + * If the URI scheme defines a default for host, then that default + * applies when the host subcomponent is undefined or when the + * registered name is empty (zero length). For example, the "file" URI + * scheme is defined so that no authority, an empty host, and + * "localhost" all mean the end-user's machine, whereas the "http" + * scheme considers a missing authority or empty host invalid. */ auto transportIsFile = parseUrlScheme(scheme).transport == "file"; - if (authority && *authority != "" && transportIsFile) + if (authority && authority->host.size() && transportIsFile) throw BadURL("file:// URL '%s' has unexpected authority '%s'", url, *authority); auto path = urlView.path(); /* Does pct-decoding */ @@ -135,7 +217,7 @@ std::string encodeQuery(const StringMap & ss) std::string ParsedURL::to_string() const { - return scheme + ":" + (authority ? "//" + *authority : "") + percentEncode(path, allowedInPath) + return scheme + ":" + (authority ? "//" + authority->to_string() : "") + percentEncode(path, allowedInPath) + (query.empty() ? "" : "?" + encodeQuery(query)) + (fragment.empty() ? "" : "#" + percentEncode(fragment)); } @@ -177,7 +259,7 @@ std::string fixGitURL(const std::string & url) if (hasPrefix(url, "file:")) return url; if (url.find("://") == std::string::npos) { - return (ParsedURL{.scheme = "file", .authority = "", .path = url}).to_string(); + return (ParsedURL{.scheme = "file", .authority = ParsedURL::Authority{}, .path = url}).to_string(); } return url; } diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index fbfff9a7d..3bfb651bd 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -18,6 +18,9 @@ let services.openssh.enable = true; virtualisation.writableStore = true; nix.settings.sandbox = true; + services.openssh.ports = [ + 22 + ] ++ lib.optional supportsCustomPort 2222; # Regression test for use of PID namespaces when /proc has # filesystems mounted on top of it @@ -42,6 +45,7 @@ let supportsBadShell = lib.versionAtLeast config.nodes.client.nix.package.version "2.25pre"; + supportsCustomPort = lib.versionAtLeast config.nodes.client.nix.package.version "2.31.0pre20250806"; in { @@ -74,7 +78,7 @@ in nix.distributedBuilds = true; nix.buildMachines = [ { - hostName = "builder1"; + hostName = "builder1" + (lib.optionalString supportsCustomPort ":2222"); sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; From da028aa454743683692bf1b50f9f559583f09b4a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 7 Aug 2025 02:34:21 +0300 Subject: [PATCH 117/382] maintainers: Fetch patch for multiline formatting in meson --- maintainers/flake-module.nix | 237 ++++++++++++++++++----------------- 1 file changed, 125 insertions(+), 112 deletions(-) diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index b6686f134..1905d6e6b 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -37,118 +37,131 @@ fi ''}"; }; - meson-format = { - enable = true; - files = "(meson.build|meson.options)$"; - entry = "${pkgs.writeScript "format-meson" '' - #!${pkgs.runtimeShell} - for file in "$@"; do - ${lib.getExe pkgs.meson} format -ic ${../meson.format} "$file" - done - ''}"; - excludes = [ - # We haven't applied formatting to these files yet - ''^doc/manual/meson.build$'' - ''^doc/manual/source/command-ref/meson.build$'' - ''^doc/manual/source/development/meson.build$'' - ''^doc/manual/source/language/meson.build$'' - ''^doc/manual/source/meson.build$'' - ''^doc/manual/source/release-notes/meson.build$'' - ''^doc/manual/source/store/meson.build$'' - ''^misc/bash/meson.build$'' - ''^misc/fish/meson.build$'' - ''^misc/launchd/meson.build$'' - ''^misc/meson.build$'' - ''^misc/systemd/meson.build$'' - ''^misc/zsh/meson.build$'' - ''^nix-meson-build-support/$'' - ''^nix-meson-build-support/big-objs/meson.build$'' - ''^nix-meson-build-support/common/meson.build$'' - ''^nix-meson-build-support/deps-lists/meson.build$'' - ''^nix-meson-build-support/export/meson.build$'' - ''^nix-meson-build-support/export-all-symbols/meson.build$'' - ''^nix-meson-build-support/generate-header/meson.build$'' - ''^nix-meson-build-support/libatomic/meson.build$'' - ''^nix-meson-build-support/subprojects/meson.build$'' - ''^scripts/meson.build$'' - ''^src/external-api-docs/meson.build$'' - ''^src/internal-api-docs/meson.build$'' - ''^src/libcmd/include/nix/cmd/meson.build$'' - ''^src/libcmd/meson.build$'' - ''^src/libcmd/nix-meson-build-support$'' - ''^src/libexpr/include/nix/expr/meson.build$'' - ''^src/libexpr/meson.build$'' - ''^src/libexpr/nix-meson-build-support$'' - ''^src/libexpr-c/meson.build$'' - ''^src/libexpr-c/nix-meson-build-support$'' - ''^src/libexpr-test-support/meson.build$'' - ''^src/libexpr-test-support/nix-meson-build-support$'' - ''^src/libexpr-tests/meson.build$'' - ''^src/libexpr-tests/nix-meson-build-support$'' - ''^src/libfetchers/include/nix/fetchers/meson.build$'' - ''^src/libfetchers/meson.build$'' - ''^src/libfetchers/nix-meson-build-support$'' - ''^src/libfetchers-c/meson.build$'' - ''^src/libfetchers-c/nix-meson-build-support$'' - ''^src/libfetchers-tests/meson.build$'' - ''^src/libfetchers-tests/nix-meson-build-support$'' - ''^src/libflake/include/nix/flake/meson.build$'' - ''^src/libflake/meson.build$'' - ''^src/libflake/nix-meson-build-support$'' - ''^src/libflake-c/meson.build$'' - ''^src/libflake-c/nix-meson-build-support$'' - ''^src/libflake-tests/meson.build$'' - ''^src/libflake-tests/nix-meson-build-support$'' - ''^src/libmain/include/nix/main/meson.build$'' - ''^src/libmain/meson.build$'' - ''^src/libmain/nix-meson-build-support$'' - ''^src/libmain-c/meson.build$'' - ''^src/libmain-c/nix-meson-build-support$'' - ''^src/libstore/include/nix/store/meson.build$'' - ''^src/libstore/meson.build$'' - ''^src/libstore/nix-meson-build-support$'' - ''^src/libstore/unix/include/nix/store/meson.build$'' - ''^src/libstore/unix/meson.build$'' - ''^src/libstore/windows/meson.build$'' - ''^src/libstore-c/meson.build$'' - ''^src/libstore-c/nix-meson-build-support$'' - ''^src/libstore-test-support/include/nix/store/tests/meson.build$'' - ''^src/libstore-test-support/meson.build$'' - ''^src/libstore-test-support/nix-meson-build-support$'' - ''^src/libstore-tests/meson.build$'' - ''^src/libstore-tests/nix-meson-build-support$'' - ''^src/libutil/meson.build$'' - ''^src/libutil/nix-meson-build-support$'' - ''^src/libutil/unix/include/nix/util/meson.build$'' - ''^src/libutil/unix/meson.build$'' - ''^src/libutil/windows/meson.build$'' - ''^src/libutil-c/meson.build$'' - ''^src/libutil-c/nix-meson-build-support$'' - ''^src/libutil-test-support/include/nix/util/tests/meson.build$'' - ''^src/libutil-test-support/meson.build$'' - ''^src/libutil-test-support/nix-meson-build-support$'' - ''^src/libutil-tests/meson.build$'' - ''^src/libutil-tests/nix-meson-build-support$'' - ''^src/nix/meson.build$'' - ''^src/nix/nix-meson-build-support$'' - ''^src/perl/lib/Nix/meson.build$'' - ''^src/perl/meson.build$'' - ''^tests/functional/ca/meson.build$'' - ''^tests/functional/common/meson.build$'' - ''^tests/functional/dyn-drv/meson.build$'' - ''^tests/functional/flakes/meson.build$'' - ''^tests/functional/git-hashing/meson.build$'' - ''^tests/functional/local-overlay-store/meson.build$'' - ''^tests/functional/meson.build$'' - ''^src/libcmd/meson.options$'' - ''^src/libexpr/meson.options$'' - ''^src/libstore/meson.options$'' - ''^src/libutil/meson.options$'' - ''^src/libutil-c/meson.options$'' - ''^src/nix/meson.options$'' - ''^src/perl/meson.options$'' - ]; - }; + meson-format = + let + meson = pkgs.meson.overrideAttrs { + doCheck = false; + doInstallCheck = false; + patches = [ + (pkgs.fetchpatch { + url = "https://github.com/mesonbuild/meson/commit/38d29b4dd19698d5cad7b599add2a69b243fd88a.patch"; + hash = "sha256-PgPBvGtCISKn1qQQhzBW5XfknUe91i5XGGBcaUK4yeE="; + }) + ]; + }; + in + { + enable = true; + files = "(meson.build|meson.options)$"; + entry = "${pkgs.writeScript "format-meson" '' + #!${pkgs.runtimeShell} + for file in "$@"; do + ${lib.getExe meson} format -ic ${../meson.format} "$file" + done + ''}"; + excludes = [ + # We haven't applied formatting to these files yet + ''^doc/manual/meson.build$'' + ''^doc/manual/source/command-ref/meson.build$'' + ''^doc/manual/source/development/meson.build$'' + ''^doc/manual/source/language/meson.build$'' + ''^doc/manual/source/meson.build$'' + ''^doc/manual/source/release-notes/meson.build$'' + ''^doc/manual/source/store/meson.build$'' + ''^misc/bash/meson.build$'' + ''^misc/fish/meson.build$'' + ''^misc/launchd/meson.build$'' + ''^misc/meson.build$'' + ''^misc/systemd/meson.build$'' + ''^misc/zsh/meson.build$'' + ''^nix-meson-build-support/$'' + ''^nix-meson-build-support/big-objs/meson.build$'' + ''^nix-meson-build-support/common/meson.build$'' + ''^nix-meson-build-support/deps-lists/meson.build$'' + ''^nix-meson-build-support/export/meson.build$'' + ''^nix-meson-build-support/export-all-symbols/meson.build$'' + ''^nix-meson-build-support/generate-header/meson.build$'' + ''^nix-meson-build-support/libatomic/meson.build$'' + ''^nix-meson-build-support/subprojects/meson.build$'' + ''^scripts/meson.build$'' + ''^src/external-api-docs/meson.build$'' + ''^src/internal-api-docs/meson.build$'' + ''^src/libcmd/include/nix/cmd/meson.build$'' + ''^src/libcmd/meson.build$'' + ''^src/libcmd/nix-meson-build-support$'' + ''^src/libexpr/include/nix/expr/meson.build$'' + ''^src/libexpr/meson.build$'' + ''^src/libexpr/nix-meson-build-support$'' + ''^src/libexpr-c/meson.build$'' + ''^src/libexpr-c/nix-meson-build-support$'' + ''^src/libexpr-test-support/meson.build$'' + ''^src/libexpr-test-support/nix-meson-build-support$'' + ''^src/libexpr-tests/meson.build$'' + ''^src/libexpr-tests/nix-meson-build-support$'' + ''^src/libfetchers/include/nix/fetchers/meson.build$'' + ''^src/libfetchers/meson.build$'' + ''^src/libfetchers/nix-meson-build-support$'' + ''^src/libfetchers-c/meson.build$'' + ''^src/libfetchers-c/nix-meson-build-support$'' + ''^src/libfetchers-tests/meson.build$'' + ''^src/libfetchers-tests/nix-meson-build-support$'' + ''^src/libflake/include/nix/flake/meson.build$'' + ''^src/libflake/meson.build$'' + ''^src/libflake/nix-meson-build-support$'' + ''^src/libflake-c/meson.build$'' + ''^src/libflake-c/nix-meson-build-support$'' + ''^src/libflake-tests/meson.build$'' + ''^src/libflake-tests/nix-meson-build-support$'' + ''^src/libmain/include/nix/main/meson.build$'' + ''^src/libmain/meson.build$'' + ''^src/libmain/nix-meson-build-support$'' + ''^src/libmain-c/meson.build$'' + ''^src/libmain-c/nix-meson-build-support$'' + ''^src/libstore/include/nix/store/meson.build$'' + ''^src/libstore/meson.build$'' + ''^src/libstore/nix-meson-build-support$'' + ''^src/libstore/unix/include/nix/store/meson.build$'' + ''^src/libstore/unix/meson.build$'' + ''^src/libstore/windows/meson.build$'' + ''^src/libstore-c/meson.build$'' + ''^src/libstore-c/nix-meson-build-support$'' + ''^src/libstore-test-support/include/nix/store/tests/meson.build$'' + ''^src/libstore-test-support/meson.build$'' + ''^src/libstore-test-support/nix-meson-build-support$'' + ''^src/libstore-tests/meson.build$'' + ''^src/libstore-tests/nix-meson-build-support$'' + ''^src/libutil/meson.build$'' + ''^src/libutil/nix-meson-build-support$'' + ''^src/libutil/unix/include/nix/util/meson.build$'' + ''^src/libutil/unix/meson.build$'' + ''^src/libutil/windows/meson.build$'' + ''^src/libutil-c/meson.build$'' + ''^src/libutil-c/nix-meson-build-support$'' + ''^src/libutil-test-support/include/nix/util/tests/meson.build$'' + ''^src/libutil-test-support/meson.build$'' + ''^src/libutil-test-support/nix-meson-build-support$'' + ''^src/libutil-tests/meson.build$'' + ''^src/libutil-tests/nix-meson-build-support$'' + ''^src/nix/meson.build$'' + ''^src/nix/nix-meson-build-support$'' + ''^src/perl/lib/Nix/meson.build$'' + ''^src/perl/meson.build$'' + ''^tests/functional/ca/meson.build$'' + ''^tests/functional/common/meson.build$'' + ''^tests/functional/dyn-drv/meson.build$'' + ''^tests/functional/flakes/meson.build$'' + ''^tests/functional/git-hashing/meson.build$'' + ''^tests/functional/local-overlay-store/meson.build$'' + ''^tests/functional/meson.build$'' + ''^src/libcmd/meson.options$'' + ''^src/libexpr/meson.options$'' + ''^src/libstore/meson.options$'' + ''^src/libutil/meson.options$'' + ''^src/libutil-c/meson.options$'' + ''^src/nix/meson.options$'' + ''^src/perl/meson.options$'' + ]; + }; nixfmt-rfc-style = { enable = true; excludes = [ From bf80696ed976ff94ba22f8ada556c57cc71d3ef1 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 7 Aug 2025 02:41:05 +0300 Subject: [PATCH 118/382] meson: Get rid of multiline array formatting hack --- meson.build | 1 - src/libexpr-test-support/include/nix/expr/tests/meson.build | 1 - src/libstore/linux/include/nix/store/meson.build | 1 - src/libstore/linux/meson.build | 1 - src/libutil/freebsd/include/nix/util/meson.build | 1 - src/libutil/freebsd/meson.build | 1 - src/libutil/linux/include/nix/util/meson.build | 1 - src/libutil/linux/meson.build | 1 - src/libutil/windows/include/nix/util/meson.build | 1 - src/perl/t/meson.build | 1 - tests/functional/plugins/meson.build | 1 - tests/functional/test-libstoreconsumer/meson.build | 1 - 12 files changed, 12 deletions(-) diff --git a/meson.build b/meson.build index 28f7ccbbb..5dcf98717 100644 --- a/meson.build +++ b/meson.build @@ -8,7 +8,6 @@ project( subproject_dir : 'src', default_options : [ 'localstatedir=/nix/var', - # hack for trailing newline ], meson_version : '>= 1.1', ) diff --git a/src/libexpr-test-support/include/nix/expr/tests/meson.build b/src/libexpr-test-support/include/nix/expr/tests/meson.build index 84ec401ab..6575d11c8 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/meson.build +++ b/src/libexpr-test-support/include/nix/expr/tests/meson.build @@ -6,5 +6,4 @@ headers = files( 'libexpr.hh', 'nix_api_expr.hh', 'value/context.hh', - # hack for trailing newline ) diff --git a/src/libstore/linux/include/nix/store/meson.build b/src/libstore/linux/include/nix/store/meson.build index c8e6a8268..a664aefa9 100644 --- a/src/libstore/linux/include/nix/store/meson.build +++ b/src/libstore/linux/include/nix/store/meson.build @@ -2,5 +2,4 @@ include_dirs += include_directories('../..') headers += files( 'personality.hh', - # hack for trailing newline ) diff --git a/src/libstore/linux/meson.build b/src/libstore/linux/meson.build index 5771cead5..6fc193cf8 100644 --- a/src/libstore/linux/meson.build +++ b/src/libstore/linux/meson.build @@ -1,6 +1,5 @@ sources += files( 'personality.cc', - # hack for trailing newline ) subdir('include/nix/store') diff --git a/src/libutil/freebsd/include/nix/util/meson.build b/src/libutil/freebsd/include/nix/util/meson.build index 4b7d78624..561c8796c 100644 --- a/src/libutil/freebsd/include/nix/util/meson.build +++ b/src/libutil/freebsd/include/nix/util/meson.build @@ -4,5 +4,4 @@ include_dirs += include_directories('../..') headers += files( 'freebsd-jail.hh', - # hack for trailing newline ) diff --git a/src/libutil/freebsd/meson.build b/src/libutil/freebsd/meson.build index d9b91a03d..8ffdc2832 100644 --- a/src/libutil/freebsd/meson.build +++ b/src/libutil/freebsd/meson.build @@ -1,6 +1,5 @@ sources += files( 'freebsd-jail.cc', - # hack for trailing newline ) subdir('include/nix/util') diff --git a/src/libutil/linux/include/nix/util/meson.build b/src/libutil/linux/include/nix/util/meson.build index ec7030c49..e28ad8e05 100644 --- a/src/libutil/linux/include/nix/util/meson.build +++ b/src/libutil/linux/include/nix/util/meson.build @@ -5,5 +5,4 @@ include_dirs += include_directories('../..') headers += files( 'cgroup.hh', 'linux-namespaces.hh', - # hack for trailing newline ) diff --git a/src/libutil/linux/meson.build b/src/libutil/linux/meson.build index 230dd46f3..b8053a5bb 100644 --- a/src/libutil/linux/meson.build +++ b/src/libutil/linux/meson.build @@ -1,7 +1,6 @@ sources += files( 'cgroup.cc', 'linux-namespaces.cc', - # hack for trailing newline ) subdir('include/nix/util') diff --git a/src/libutil/windows/include/nix/util/meson.build b/src/libutil/windows/include/nix/util/meson.build index 5d0ace929..1bd56c4bd 100644 --- a/src/libutil/windows/include/nix/util/meson.build +++ b/src/libutil/windows/include/nix/util/meson.build @@ -6,5 +6,4 @@ headers += files( 'signals-impl.hh', 'windows-async-pipe.hh', 'windows-error.hh', - # hack for trailing newline ) diff --git a/src/perl/t/meson.build b/src/perl/t/meson.build index 5e75920ac..dbd1139f3 100644 --- a/src/perl/t/meson.build +++ b/src/perl/t/meson.build @@ -7,7 +7,6 @@ nix_perl_tests = files( 'init.t', - # hack for trailing newline ) diff --git a/tests/functional/plugins/meson.build b/tests/functional/plugins/meson.build index 41050ffc1..ae66e3036 100644 --- a/tests/functional/plugins/meson.build +++ b/tests/functional/plugins/meson.build @@ -3,7 +3,6 @@ libplugintest = shared_module( 'plugintest.cc', dependencies : [ dependency('nix-expr'), - # hack for trailing newline ], build_by_default : false, ) diff --git a/tests/functional/test-libstoreconsumer/meson.build b/tests/functional/test-libstoreconsumer/meson.build index ce566035f..e5a1cc182 100644 --- a/tests/functional/test-libstoreconsumer/meson.build +++ b/tests/functional/test-libstoreconsumer/meson.build @@ -3,7 +3,6 @@ libstoreconsumer_tester = executable( 'main.cc', dependencies : [ dependency('nix-store'), - # hack for trailing newline ], build_by_default : false, ) From 385e2c3542c707d95e3784f7f6d623f67e77ab61 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 7 Aug 2025 02:52:13 +0300 Subject: [PATCH 119/382] meson: Apply formatting universally Now that we have applied the [1] patch, the diff is much nicer and less noisy. [1]: https://www.github.com/mesonbuild/meson/pull/14861 --- doc/manual/meson.build | 39 ++++--- doc/manual/source/command-ref/meson.build | 18 +--- doc/manual/source/development/meson.build | 3 +- doc/manual/source/language/meson.build | 12 +-- doc/manual/source/meson.build | 5 +- doc/manual/source/store/meson.build | 10 +- maintainers/flake-module.nix | 102 ------------------ misc/launchd/meson.build | 2 +- nix-meson-build-support/big-objs/meson.build | 2 +- .../deps-lists/meson.build | 10 +- .../export-all-symbols/meson.build | 2 +- nix-meson-build-support/export/meson.build | 25 +++-- .../generate-header/meson.build | 9 +- scripts/meson.build | 8 +- src/external-api-docs/meson.build | 7 +- src/internal-api-docs/meson.build | 7 +- src/libcmd/include/nix/cmd/meson.build | 2 +- src/libcmd/meson.build | 24 +++-- src/libcmd/meson.options | 6 +- src/libexpr-c/meson.build | 8 +- src/libexpr-test-support/meson.build | 9 +- src/libexpr-tests/meson.build | 13 +-- src/libexpr/include/nix/expr/meson.build | 4 +- src/libexpr/meson.build | 20 ++-- src/libexpr/meson.options | 4 +- src/libfetchers-c/meson.build | 8 +- src/libfetchers-tests/meson.build | 13 +-- .../include/nix/fetchers/meson.build | 2 +- src/libfetchers/meson.build | 9 +- src/libflake-c/meson.build | 8 +- src/libflake-tests/meson.build | 17 +-- src/libflake/include/nix/flake/meson.build | 2 +- src/libflake/meson.build | 11 +- src/libmain-c/meson.build | 8 +- src/libmain/include/nix/main/meson.build | 2 +- src/libmain/meson.build | 11 +- src/libstore-c/meson.build | 8 +- .../include/nix/store/tests/meson.build | 2 +- src/libstore-test-support/meson.build | 9 +- src/libstore-tests/meson.build | 27 +++-- src/libstore/include/nix/store/meson.build | 6 +- src/libstore/meson.build | 85 ++++++++------- src/libstore/meson.options | 24 ++++- src/libstore/windows/meson.build | 3 +- src/libutil-c/meson.build | 11 +- .../include/nix/util/tests/meson.build | 2 +- src/libutil-test-support/meson.build | 9 +- src/libutil-tests/meson.build | 13 +-- src/libutil/meson.build | 43 ++++---- src/libutil/meson.options | 4 +- src/libutil/unix/meson.build | 4 +- src/nix/meson.build | 39 +++---- src/nix/meson.options | 5 +- src/perl/lib/Nix/meson.build | 6 +- src/perl/meson.build | 53 ++++++--- src/perl/meson.options | 9 +- tests/functional/ca/meson.build | 8 +- tests/functional/dyn-drv/meson.build | 8 +- tests/functional/flakes/meson.build | 8 +- tests/functional/git-hashing/meson.build | 8 +- .../local-overlay-store/meson.build | 8 +- tests/functional/meson.build | 54 +++++----- 62 files changed, 444 insertions(+), 454 deletions(-) diff --git a/doc/manual/meson.build b/doc/manual/meson.build index 0779cd267..2e372dedd 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -1,4 +1,5 @@ -project('nix-manual', +project( + 'nix-manual', version : files('.version'), meson_version : '>= 1.1', license : 'LGPL-2.1-or-later', @@ -8,44 +9,45 @@ nix = find_program('nix', native : true) mdbook = find_program('mdbook', native : true) bash = find_program('bash', native : true) -rsync = find_program('rsync', required: true, native: true) +rsync = find_program('rsync', required : true, native : true) pymod = import('python') python = pymod.find_installation('python3') nix_env_for_docs = { - 'HOME': '/dummy', - 'NIX_CONF_DIR': '/dummy', - 'NIX_SSL_CERT_FILE': '/dummy/no-ca-bundle.crt', - 'NIX_STATE_DIR': '/dummy', - 'NIX_CONFIG': 'cores = 0', + 'HOME' : '/dummy', + 'NIX_CONF_DIR' : '/dummy', + 'NIX_SSL_CERT_FILE' : '/dummy/no-ca-bundle.crt', + 'NIX_STATE_DIR' : '/dummy', + 'NIX_CONFIG' : 'cores = 0', } -nix_for_docs = [nix, '--experimental-features', 'nix-command'] +nix_for_docs = [ nix, '--experimental-features', 'nix-command' ] nix_eval_for_docs_common = nix_for_docs + [ 'eval', - '-I', 'nix=' + meson.current_source_dir(), + '-I', + 'nix=' + meson.current_source_dir(), '--store', 'dummy://', '--impure', ] nix_eval_for_docs = nix_eval_for_docs_common + '--raw' conf_file_json = custom_target( - command : nix_for_docs + ['config', 'show', '--json'], + command : nix_for_docs + [ 'config', 'show', '--json' ], capture : true, output : 'conf-file.json', env : nix_env_for_docs, ) language_json = custom_target( - command: [nix, '__dump-language'], + command : [ nix, '__dump-language' ], output : 'language.json', capture : true, env : nix_env_for_docs, ) nix3_cli_json = custom_target( - command : [nix, '__dump-cli'], + command : [ nix, '__dump-cli' ], capture : true, output : 'nix.json', env : nix_env_for_docs, @@ -79,7 +81,8 @@ manual = custom_target( 'manual', command : [ bash, - '-euo', 'pipefail', + '-euo', + 'pipefail', '-c', ''' @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ @@ -120,8 +123,8 @@ manual = custom_target( ], depfile : 'manual.d', env : { - 'RUST_LOG': 'info', - 'MDBOOK_SUBSTITUTE_SEARCH': meson.current_build_dir() / 'source', + 'RUST_LOG' : 'info', + 'MDBOOK_SUBSTITUTE_SEARCH' : meson.current_build_dir() / 'source', }, ) manual_html = manual[0] @@ -133,7 +136,8 @@ install_subdir( ) nix_nested_manpages = [ - [ 'nix-env', + [ + 'nix-env', [ 'delete-generations', 'install', @@ -148,7 +152,8 @@ nix_nested_manpages = [ 'upgrade', ], ], - [ 'nix-store', + [ + 'nix-store', [ 'add-fixed', 'add', diff --git a/doc/manual/source/command-ref/meson.build b/doc/manual/source/command-ref/meson.build index 2976f69ff..92998dec1 100644 --- a/doc/manual/source/command-ref/meson.build +++ b/doc/manual/source/command-ref/meson.build @@ -1,13 +1,12 @@ xp_features_json = custom_target( - command : [nix, '__dump-xp-features'], + command : [ nix, '__dump-xp-features' ], capture : true, output : 'xp-features.json', ) experimental_features_shortlist_md = custom_target( command : nix_eval_for_docs + [ - '--expr', - 'import @INPUT0@ (builtins.fromJSON (builtins.readFile ./@INPUT1@))', + '--expr', 'import @INPUT0@ (builtins.fromJSON (builtins.readFile ./@INPUT1@))', ], input : [ '../../generate-xp-features-shortlist.nix', @@ -19,14 +18,8 @@ experimental_features_shortlist_md = custom_target( ) nix3_cli_files = custom_target( - command : [ - python.full_path(), - '@INPUT0@', - '@OUTPUT@', - '--' - ] + nix_eval_for_docs + [ - '--expr', - 'import @INPUT1@ true (builtins.readFile ./@INPUT2@)', + command : [ python.full_path(), '@INPUT0@', '@OUTPUT@', '--' ] + nix_eval_for_docs + [ + '--expr', 'import @INPUT1@ true (builtins.readFile ./@INPUT2@)', ], input : [ '../../remove_before_wrapper.py', @@ -40,8 +33,7 @@ nix3_cli_files = custom_target( conf_file_md_body = custom_target( command : [ nix_eval_for_docs, - '--expr', - 'import @INPUT0@ { prefix = "conf"; } (builtins.fromJSON (builtins.readFile ./@INPUT1@))', + '--expr', 'import @INPUT0@ { prefix = "conf"; } (builtins.fromJSON (builtins.readFile ./@INPUT1@))', ], capture : true, input : [ diff --git a/doc/manual/source/development/meson.build b/doc/manual/source/development/meson.build index 5ffbfe394..4831cf8f0 100644 --- a/doc/manual/source/development/meson.build +++ b/doc/manual/source/development/meson.build @@ -1,7 +1,6 @@ experimental_feature_descriptions_md = custom_target( command : nix_eval_for_docs + [ - '--expr', - 'import @INPUT0@ (builtins.fromJSON (builtins.readFile @INPUT1@))', + '--expr', 'import @INPUT0@ (builtins.fromJSON (builtins.readFile @INPUT1@))', ], input : [ '../../generate-xp-features.nix', diff --git a/doc/manual/source/language/meson.build b/doc/manual/source/language/meson.build index 97469e2f3..ed4995bfb 100644 --- a/doc/manual/source/language/meson.build +++ b/doc/manual/source/language/meson.build @@ -1,19 +1,13 @@ builtins_md = custom_target( - command : [ - python.full_path(), - '@INPUT0@', - '@OUTPUT@', - '--' - ] + nix_eval_for_docs + [ - '--expr', - '(builtins.readFile @INPUT3@) + import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)) + (builtins.readFile @INPUT4@)', + command : [ python.full_path(), '@INPUT0@', '@OUTPUT@', '--' ] + nix_eval_for_docs + [ + '--expr', '(builtins.readFile @INPUT3@) + import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)) + (builtins.readFile @INPUT4@)', ], input : [ '../../remove_before_wrapper.py', '../../generate-builtins.nix', language_json, 'builtins-prefix.md', - 'builtins-suffix.md' + 'builtins-suffix.md', ], output : 'builtins.md', env : nix_env_for_docs, diff --git a/doc/manual/source/meson.build b/doc/manual/source/meson.build index 098a29897..949d26526 100644 --- a/doc/manual/source/meson.build +++ b/doc/manual/source/meson.build @@ -1,7 +1,8 @@ summary_rl_next = custom_target( command : [ bash, - '-euo', 'pipefail', + '-euo', + 'pipefail', '-c', ''' if [ -e "@INPUT@" ]; then @@ -12,6 +13,6 @@ summary_rl_next = custom_target( input : [ rl_next_generated, ], - capture: true, + capture : true, output : 'SUMMARY-rl-next.md', ) diff --git a/doc/manual/source/store/meson.build b/doc/manual/source/store/meson.build index e3006020d..ec4659ebc 100644 --- a/doc/manual/source/store/meson.build +++ b/doc/manual/source/store/meson.build @@ -1,12 +1,6 @@ types_dir = custom_target( - command : [ - python.full_path(), - '@INPUT0@', - '@OUTPUT@', - '--' - ] + nix_eval_for_docs + [ - '--expr', - 'import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)).stores', + command : [ python.full_path(), '@INPUT0@', '@OUTPUT@', '--' ] + nix_eval_for_docs + [ + '--expr', 'import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)).stores', ], input : [ '../../remove_before_wrapper.py', diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 1905d6e6b..4815313dd 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -59,108 +59,6 @@ ${lib.getExe meson} format -ic ${../meson.format} "$file" done ''}"; - excludes = [ - # We haven't applied formatting to these files yet - ''^doc/manual/meson.build$'' - ''^doc/manual/source/command-ref/meson.build$'' - ''^doc/manual/source/development/meson.build$'' - ''^doc/manual/source/language/meson.build$'' - ''^doc/manual/source/meson.build$'' - ''^doc/manual/source/release-notes/meson.build$'' - ''^doc/manual/source/store/meson.build$'' - ''^misc/bash/meson.build$'' - ''^misc/fish/meson.build$'' - ''^misc/launchd/meson.build$'' - ''^misc/meson.build$'' - ''^misc/systemd/meson.build$'' - ''^misc/zsh/meson.build$'' - ''^nix-meson-build-support/$'' - ''^nix-meson-build-support/big-objs/meson.build$'' - ''^nix-meson-build-support/common/meson.build$'' - ''^nix-meson-build-support/deps-lists/meson.build$'' - ''^nix-meson-build-support/export/meson.build$'' - ''^nix-meson-build-support/export-all-symbols/meson.build$'' - ''^nix-meson-build-support/generate-header/meson.build$'' - ''^nix-meson-build-support/libatomic/meson.build$'' - ''^nix-meson-build-support/subprojects/meson.build$'' - ''^scripts/meson.build$'' - ''^src/external-api-docs/meson.build$'' - ''^src/internal-api-docs/meson.build$'' - ''^src/libcmd/include/nix/cmd/meson.build$'' - ''^src/libcmd/meson.build$'' - ''^src/libcmd/nix-meson-build-support$'' - ''^src/libexpr/include/nix/expr/meson.build$'' - ''^src/libexpr/meson.build$'' - ''^src/libexpr/nix-meson-build-support$'' - ''^src/libexpr-c/meson.build$'' - ''^src/libexpr-c/nix-meson-build-support$'' - ''^src/libexpr-test-support/meson.build$'' - ''^src/libexpr-test-support/nix-meson-build-support$'' - ''^src/libexpr-tests/meson.build$'' - ''^src/libexpr-tests/nix-meson-build-support$'' - ''^src/libfetchers/include/nix/fetchers/meson.build$'' - ''^src/libfetchers/meson.build$'' - ''^src/libfetchers/nix-meson-build-support$'' - ''^src/libfetchers-c/meson.build$'' - ''^src/libfetchers-c/nix-meson-build-support$'' - ''^src/libfetchers-tests/meson.build$'' - ''^src/libfetchers-tests/nix-meson-build-support$'' - ''^src/libflake/include/nix/flake/meson.build$'' - ''^src/libflake/meson.build$'' - ''^src/libflake/nix-meson-build-support$'' - ''^src/libflake-c/meson.build$'' - ''^src/libflake-c/nix-meson-build-support$'' - ''^src/libflake-tests/meson.build$'' - ''^src/libflake-tests/nix-meson-build-support$'' - ''^src/libmain/include/nix/main/meson.build$'' - ''^src/libmain/meson.build$'' - ''^src/libmain/nix-meson-build-support$'' - ''^src/libmain-c/meson.build$'' - ''^src/libmain-c/nix-meson-build-support$'' - ''^src/libstore/include/nix/store/meson.build$'' - ''^src/libstore/meson.build$'' - ''^src/libstore/nix-meson-build-support$'' - ''^src/libstore/unix/include/nix/store/meson.build$'' - ''^src/libstore/unix/meson.build$'' - ''^src/libstore/windows/meson.build$'' - ''^src/libstore-c/meson.build$'' - ''^src/libstore-c/nix-meson-build-support$'' - ''^src/libstore-test-support/include/nix/store/tests/meson.build$'' - ''^src/libstore-test-support/meson.build$'' - ''^src/libstore-test-support/nix-meson-build-support$'' - ''^src/libstore-tests/meson.build$'' - ''^src/libstore-tests/nix-meson-build-support$'' - ''^src/libutil/meson.build$'' - ''^src/libutil/nix-meson-build-support$'' - ''^src/libutil/unix/include/nix/util/meson.build$'' - ''^src/libutil/unix/meson.build$'' - ''^src/libutil/windows/meson.build$'' - ''^src/libutil-c/meson.build$'' - ''^src/libutil-c/nix-meson-build-support$'' - ''^src/libutil-test-support/include/nix/util/tests/meson.build$'' - ''^src/libutil-test-support/meson.build$'' - ''^src/libutil-test-support/nix-meson-build-support$'' - ''^src/libutil-tests/meson.build$'' - ''^src/libutil-tests/nix-meson-build-support$'' - ''^src/nix/meson.build$'' - ''^src/nix/nix-meson-build-support$'' - ''^src/perl/lib/Nix/meson.build$'' - ''^src/perl/meson.build$'' - ''^tests/functional/ca/meson.build$'' - ''^tests/functional/common/meson.build$'' - ''^tests/functional/dyn-drv/meson.build$'' - ''^tests/functional/flakes/meson.build$'' - ''^tests/functional/git-hashing/meson.build$'' - ''^tests/functional/local-overlay-store/meson.build$'' - ''^tests/functional/meson.build$'' - ''^src/libcmd/meson.options$'' - ''^src/libexpr/meson.options$'' - ''^src/libstore/meson.options$'' - ''^src/libutil/meson.options$'' - ''^src/libutil-c/meson.options$'' - ''^src/nix/meson.options$'' - ''^src/perl/meson.options$'' - ]; }; nixfmt-rfc-style = { enable = true; diff --git a/misc/launchd/meson.build b/misc/launchd/meson.build index 5168131d1..53a57b65b 100644 --- a/misc/launchd/meson.build +++ b/misc/launchd/meson.build @@ -9,5 +9,5 @@ configure_file( # 'storedir' : store_dir, # 'localstatedir' : localstatedir, # 'bindir' : bindir, - }, +}, ) diff --git a/nix-meson-build-support/big-objs/meson.build b/nix-meson-build-support/big-objs/meson.build index 7e422abd8..f8dd8d1a3 100644 --- a/nix-meson-build-support/big-objs/meson.build +++ b/nix-meson-build-support/big-objs/meson.build @@ -2,5 +2,5 @@ if host_machine.system() == 'windows' # libexpr's primops creates a large object # Without the following flag, we'll get errors when cross-compiling to mingw32: # Fatal error: can't write 66 bytes to section .text of src/libexpr/libnixexpr.dll.p/primops.cc.obj: 'file too big' - add_project_arguments([ '-Wa,-mbig-obj' ], language: 'cpp') + add_project_arguments([ '-Wa,-mbig-obj' ], language : 'cpp') endif diff --git a/nix-meson-build-support/deps-lists/meson.build b/nix-meson-build-support/deps-lists/meson.build index 237eac545..b4609c176 100644 --- a/nix-meson-build-support/deps-lists/meson.build +++ b/nix-meson-build-support/deps-lists/meson.build @@ -6,7 +6,7 @@ # *interface*. # # See `man pkg-config` for some details. -deps_private = [ ] +deps_private = [] # These are public dependencies with pkg-config files. Public is the # opposite of private: these dependencies are used in installed header @@ -23,14 +23,14 @@ deps_private = [ ] # N.B. For distributions that care about "ABI" stability and not just # "API" stability, the private dependencies also matter as they can # potentially affect the public ABI. -deps_public = [ ] +deps_public = [] # These are subproject deps (type == "internal"). They are other # packages in `/src` in this repo. The private vs public distinction is # the same as above. -deps_private_subproject = [ ] -deps_public_subproject = [ ] +deps_private_subproject = [] +deps_public_subproject = [] # These are dependencencies without pkg-config files. Ideally they are # just private, but they may also be public (e.g. boost). -deps_other = [ ] +deps_other = [] diff --git a/nix-meson-build-support/export-all-symbols/meson.build b/nix-meson-build-support/export-all-symbols/meson.build index d7c086749..6a562e80d 100644 --- a/nix-meson-build-support/export-all-symbols/meson.build +++ b/nix-meson-build-support/export-all-symbols/meson.build @@ -5,7 +5,7 @@ if host_machine.system() == 'cygwin' or host_machine.system() == 'windows' # and not detail with this yet. # # TODO do not do this, and instead do fine-grained export annotations. - linker_export_flags = ['-Wl,--export-all-symbols'] + linker_export_flags = [ '-Wl,--export-all-symbols' ] else linker_export_flags = [] endif diff --git a/nix-meson-build-support/export/meson.build b/nix-meson-build-support/export/meson.build index 950bd9544..26f778324 100644 --- a/nix-meson-build-support/export/meson.build +++ b/nix-meson-build-support/export/meson.build @@ -1,12 +1,12 @@ requires_private = [] foreach dep : deps_private_subproject - requires_private += dep.name() + requires_private += dep.name() endforeach requires_private += deps_private -requires_public = [] +requires_public = [] foreach dep : deps_public_subproject - requires_public += dep.name() + requires_public += dep.name() endforeach requires_public += deps_public @@ -14,7 +14,7 @@ extra_pkg_config_variables = get_variable('extra_pkg_config_variables', {}) extra_cflags = [] if not meson.project_name().endswith('-c') - extra_cflags += ['-std=c++2a'] + extra_cflags += [ '-std=c++2a' ] endif import('pkgconfig').generate( @@ -29,10 +29,13 @@ import('pkgconfig').generate( variables : extra_pkg_config_variables, ) -meson.override_dependency(meson.project_name(), declare_dependency( - include_directories : include_dirs, - link_with : this_library, - compile_args : ['-std=c++2a'], - dependencies : deps_public_subproject + deps_public, - variables : extra_pkg_config_variables, -)) +meson.override_dependency( + meson.project_name(), + declare_dependency( + include_directories : include_dirs, + link_with : this_library, + compile_args : [ '-std=c++2a' ], + dependencies : deps_public_subproject + deps_public, + variables : extra_pkg_config_variables, + ), +) diff --git a/nix-meson-build-support/generate-header/meson.build b/nix-meson-build-support/generate-header/meson.build index dfbe1375f..a6523bc9f 100644 --- a/nix-meson-build-support/generate-header/meson.build +++ b/nix-meson-build-support/generate-header/meson.build @@ -1,7 +1,12 @@ -bash = find_program('bash', native: true) +bash = find_program('bash', native : true) gen_header = generator( bash, - arguments : [ '-c', '{ echo \'R"__NIX_STR(\' && cat @INPUT@ && echo \')__NIX_STR"\'; } > "$1"', '_ignored_argv0', '@OUTPUT@' ], + arguments : [ + '-c', + '{ echo \'R"__NIX_STR(\' && cat @INPUT@ && echo \')__NIX_STR"\'; } > "$1"', + '_ignored_argv0', + '@OUTPUT@', + ], output : '@PLAINNAME@.gen.hh', ) diff --git a/scripts/meson.build b/scripts/meson.build index 777da42b1..bbcf3ef56 100644 --- a/scripts/meson.build +++ b/scripts/meson.build @@ -2,19 +2,19 @@ configure_file( input : 'nix-profile.sh.in', output : 'nix-profile.sh', configuration : { - 'localstatedir': localstatedir, - } + 'localstatedir' : localstatedir, + }, ) foreach rc : [ '.sh', '.fish', '-daemon.sh', '-daemon.fish' ] configure_file( - input : 'nix-profile' + rc + '.in', + input : 'nix-profile' + rc + '.in', output : 'nix' + rc, install : true, install_dir : get_option('profile-dir'), install_mode : 'rw-r--r--', configuration : { - 'localstatedir': localstatedir, + 'localstatedir' : localstatedir, }, ) endforeach diff --git a/src/external-api-docs/meson.build b/src/external-api-docs/meson.build index 62474ffe4..cba6f646b 100644 --- a/src/external-api-docs/meson.build +++ b/src/external-api-docs/meson.build @@ -1,4 +1,5 @@ -project('nix-external-api-docs', +project( + 'nix-external-api-docs', version : files('.version'), meson_version : '>= 1.1', license : 'LGPL-2.1-or-later', @@ -10,7 +11,7 @@ doxygen_cfg = configure_file( input : 'doxygen.cfg.in', output : 'doxygen.cfg', configuration : { - 'PROJECT_NUMBER': meson.project_version(), + 'PROJECT_NUMBER' : meson.project_version(), 'OUTPUT_DIRECTORY' : meson.current_build_dir(), 'src' : fs.parent(fs.parent(meson.project_source_root())), }, @@ -20,7 +21,7 @@ doxygen = find_program('doxygen', native : true, required : true) custom_target( 'external-api-docs', - command : [ doxygen , doxygen_cfg ], + command : [ doxygen, doxygen_cfg ], input : [ doxygen_cfg, ], diff --git a/src/internal-api-docs/meson.build b/src/internal-api-docs/meson.build index c0426621e..daab4c93c 100644 --- a/src/internal-api-docs/meson.build +++ b/src/internal-api-docs/meson.build @@ -1,4 +1,5 @@ -project('nix-internal-api-docs', +project( + 'nix-internal-api-docs', version : files('.version'), meson_version : '>= 1.1', license : 'LGPL-2.1-or-later', @@ -10,7 +11,7 @@ doxygen_cfg = configure_file( input : 'doxygen.cfg.in', output : 'doxygen.cfg', configuration : { - 'PROJECT_NUMBER': meson.project_version(), + 'PROJECT_NUMBER' : meson.project_version(), 'OUTPUT_DIRECTORY' : meson.current_build_dir(), 'BUILD_ROOT' : meson.build_root(), 'src' : fs.parent(fs.parent(meson.project_source_root())) / 'src', @@ -21,7 +22,7 @@ doxygen = find_program('doxygen', native : true, required : true) custom_target( 'internal-api-docs', - command : [ doxygen , doxygen_cfg ], + command : [ doxygen, doxygen_cfg ], input : [ doxygen_cfg, ], diff --git a/src/libcmd/include/nix/cmd/meson.build b/src/libcmd/include/nix/cmd/meson.build index 368edb28e..119d0814b 100644 --- a/src/libcmd/include/nix/cmd/meson.build +++ b/src/libcmd/include/nix/cmd/meson.build @@ -1,6 +1,6 @@ # Public headers directory -include_dirs = [include_directories('../..')] +include_dirs = [ include_directories('../..') ] headers = files( 'built-path.hh', diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 0cb41b46f..2f8079496 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -1,4 +1,6 @@ -project('nix-cmd', 'cpp', +project( + 'nix-cmd', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -16,8 +18,7 @@ subdir('nix-meson-build-support/deps-lists') configdata = configuration_data() -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-store'), @@ -31,11 +32,18 @@ subdir('nix-meson-build-support/subprojects') nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') deps_public += nlohmann_json -lowdown = dependency('lowdown', version : '>= 0.9.0', required : get_option('markdown')) +lowdown = dependency( + 'lowdown', + version : '>= 0.9.0', + required : get_option('markdown'), +) deps_private += lowdown configdata.set('HAVE_LOWDOWN', lowdown.found().to_int()) # The API changed slightly around terminal initialization. -configdata.set('HAVE_LOWDOWN_1_4', lowdown.version().version_compare('>= 1.4.0').to_int()) +configdata.set( + 'HAVE_LOWDOWN_1_4', + lowdown.version().version_compare('>= 1.4.0').to_int(), +) readline_flavor = get_option('readline-flavor') if readline_flavor == 'editline' @@ -50,7 +58,7 @@ endif configdata.set( 'USE_READLINE', (readline_flavor == 'readline').to_int(), - description: 'Use readline instead of editline', + description : 'Use readline instead of editline', ) config_priv_h = configure_file( @@ -89,10 +97,10 @@ this_library = library( config_priv_h, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) install_headers(headers, subdir : 'nix/cmd', preserve_path : true) diff --git a/src/libcmd/meson.options b/src/libcmd/meson.options index 79ae4fa55..31178d82f 100644 --- a/src/libcmd/meson.options +++ b/src/libcmd/meson.options @@ -2,14 +2,14 @@ option( 'markdown', - type: 'feature', - description: 'Enable Markdown rendering in the Nix binary (requires lowdown)', + type : 'feature', + description : 'Enable Markdown rendering in the Nix binary (requires lowdown)', ) option( 'readline-flavor', type : 'combo', - choices : ['editline', 'readline'], + choices : [ 'editline', 'readline' ], value : 'editline', description : 'Which library to use for nice line editing with the Nix language REPL', ) diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index ed4582e40..606b93e70 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -1,4 +1,6 @@ -project('nix-expr-c', 'cpp', +project( + 'nix-expr-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -33,7 +35,7 @@ sources = files( 'nix_api_value.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_expr.h', @@ -50,7 +52,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index b97f94362..89dc1d20a 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -1,4 +1,6 @@ -project('nix-expr-test-support', 'cpp', +project( + 'nix-expr-test-support', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -14,8 +16,7 @@ cxx = meson.get_compiler('cpp') subdir('nix-meson-build-support/deps-lists') -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-util-test-support'), @@ -47,7 +48,7 @@ this_library = library( include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 # is available. See also ../libutil/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index 35ae8a9d0..1f3973681 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -1,4 +1,6 @@ -project('nix-expr-tests', 'cpp', +project( + 'nix-expr-tests', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -19,8 +21,7 @@ deps_private_maybe_subproject = [ dependency('nix-expr-c'), dependency('nix-expr-test-support'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -62,7 +63,7 @@ sources = files( 'value/value.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( @@ -72,7 +73,7 @@ this_exe = executable( dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, # TODO: -lrapidcheck, see ../libutil-support/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], install : true, ) @@ -80,7 +81,7 @@ test( meson.project_name(), this_exe, env : { - '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', ) diff --git a/src/libexpr/include/nix/expr/meson.build b/src/libexpr/include/nix/expr/meson.build index 333490ee4..04f8eaf71 100644 --- a/src/libexpr/include/nix/expr/meson.build +++ b/src/libexpr/include/nix/expr/meson.build @@ -1,13 +1,13 @@ # Public headers directory -include_dirs = [include_directories('../..')] +include_dirs = [ include_directories('../..') ] config_pub_h = configure_file( configuration : configdata_pub, output : 'config.hh', ) -headers = [config_pub_h] + files( +headers = [ config_pub_h ] + files( 'attr-path.hh', 'attr-set.hh', 'eval-cache.hh', diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 9c8569293..adf26008d 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -1,4 +1,6 @@ -project('nix-expr', 'cpp', +project( + 'nix-expr', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -17,8 +19,7 @@ subdir('nix-meson-build-support/deps-lists') configdata_pub = configuration_data() configdata_priv = configuration_data() -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-store'), @@ -39,8 +40,8 @@ endforeach boost = dependency( 'boost', - modules : ['container', 'context'], - include_type: 'system', + modules : [ 'container', 'context' ], + include_type : 'system', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. @@ -70,7 +71,7 @@ toml11 = dependency( 'toml11', version : '>=3.7.0', method : 'cmake', - include_type: 'system', + include_type : 'system', ) deps_other += toml11 @@ -113,8 +114,7 @@ lexer_tab = custom_target( command : [ 'flex', '-Cf', # Use full scanner tables - '--outfile', - '@OUTPUT0@', + '--outfile', '@OUTPUT0@', '--header-file=' + '@OUTPUT1@', '@INPUT0@', ], @@ -175,10 +175,10 @@ this_library = library( generated_headers, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) install_headers(headers, subdir : 'nix/expr', preserve_path : true) diff --git a/src/libexpr/meson.options b/src/libexpr/meson.options index 242d30ea7..847bb211d 100644 --- a/src/libexpr/meson.options +++ b/src/libexpr/meson.options @@ -1,3 +1,5 @@ -option('gc', type : 'feature', +option( + 'gc', + type : 'feature', description : 'enable garbage collection in the Nix expression evaluator (requires Boehm GC)', ) diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index e34997f09..74ec9c9c2 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -1,4 +1,6 @@ -project('nix-fetchers-c', 'cpp', +project( + 'nix-fetchers-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -35,7 +37,7 @@ sources = files( 'nix_api_fetchers.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_fetchers.h', @@ -53,7 +55,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index 33bc7f30e..51373aefa 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -1,4 +1,6 @@ -project('nix-fetchers-tests', 'cpp', +project( + 'nix-fetchers-tests', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -19,8 +21,7 @@ deps_private_maybe_subproject = [ dependency('nix-fetchers'), dependency('nix-fetchers-c'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -44,7 +45,7 @@ sources = files( 'public-key.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( @@ -53,7 +54,7 @@ this_exe = executable( dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, # TODO: -lrapidcheck, see ../libutil-support/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, ) @@ -62,7 +63,7 @@ test( meson.project_name(), this_exe, env : { - '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', ) diff --git a/src/libfetchers/include/nix/fetchers/meson.build b/src/libfetchers/include/nix/fetchers/meson.build index e6ddedd97..fcd446a6d 100644 --- a/src/libfetchers/include/nix/fetchers/meson.build +++ b/src/libfetchers/include/nix/fetchers/meson.build @@ -1,4 +1,4 @@ -include_dirs = [include_directories('../..')] +include_dirs = [ include_directories('../..') ] headers = files( 'attrs.hh', diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index b549735e5..0b53ef44d 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -1,4 +1,6 @@ -project('nix-fetchers', 'cpp', +project( + 'nix-fetchers', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -16,8 +18,7 @@ subdir('nix-meson-build-support/deps-lists') configuration_data() -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-store'), @@ -62,7 +63,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index 5a81618c8..8b844371d 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -1,4 +1,6 @@ -project('nix-flake-c', 'cpp', +project( + 'nix-flake-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -35,7 +37,7 @@ sources = files( 'nix_api_flake.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_flake.h', @@ -53,7 +55,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 8c082c7e0..08c48f137 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -1,4 +1,6 @@ -project('nix-flake-tests', 'cpp', +project( + 'nix-flake-tests', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -19,8 +21,7 @@ deps_private_maybe_subproject = [ dependency('nix-flake'), dependency('nix-flake-c'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -40,7 +41,7 @@ sources = files( 'url-name.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( @@ -49,7 +50,7 @@ this_exe = executable( dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, # TODO: -lrapidcheck, see ../libutil-support/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, ) @@ -58,9 +59,9 @@ test( meson.project_name(), this_exe, env : { - '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', - 'NIX_CONFIG': 'extra-experimental-features = flakes', - 'HOME': meson.current_build_dir() / 'test-home', + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', + 'NIX_CONFIG' : 'extra-experimental-features = flakes', + 'HOME' : meson.current_build_dir() / 'test-home', }, protocol : 'gtest', ) diff --git a/src/libflake/include/nix/flake/meson.build b/src/libflake/include/nix/flake/meson.build index ece1ad4ea..fc580164e 100644 --- a/src/libflake/include/nix/flake/meson.build +++ b/src/libflake/include/nix/flake/meson.build @@ -1,6 +1,6 @@ # Public headers directory -include_dirs = [include_directories('../..')] +include_dirs = [ include_directories('../..') ] headers = files( 'flake.hh', diff --git a/src/libflake/meson.build b/src/libflake/meson.build index bc8533e15..faa12e7a9 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -1,4 +1,6 @@ -project('nix-flake', 'cpp', +project( + 'nix-flake', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -14,8 +16,7 @@ cxx = meson.get_compiler('cpp') subdir('nix-meson-build-support/deps-lists') -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-store'), @@ -40,10 +41,10 @@ endforeach sources = files( 'config.cc', + 'flake-primops.cc', 'flake.cc', 'flakeref.cc', 'lockfile.cc', - 'flake-primops.cc', 'settings.cc', 'url-name.cc', ) @@ -59,7 +60,7 @@ this_library = library( generated_headers, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index e420520e6..2c08cac41 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -1,4 +1,6 @@ -project('nix-main-c', 'cpp', +project( + 'nix-main-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -31,7 +33,7 @@ sources = files( 'nix_api_main.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_main.h', @@ -45,7 +47,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libmain/include/nix/main/meson.build b/src/libmain/include/nix/main/meson.build index 992a5ff0e..338fa8257 100644 --- a/src/libmain/include/nix/main/meson.build +++ b/src/libmain/include/nix/main/meson.build @@ -1,6 +1,6 @@ # Public headers directory -include_dirs = [include_directories('../..')] +include_dirs = [ include_directories('../..') ] headers = files( 'common-args.hh', diff --git a/src/libmain/meson.build b/src/libmain/meson.build index 65fcb6239..252b28169 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -1,4 +1,6 @@ -project('nix-main', 'cpp', +project( + 'nix-main', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -16,8 +18,7 @@ subdir('nix-meson-build-support/deps-lists') configdata = configuration_data() -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-store'), @@ -43,7 +44,7 @@ int main() { configdata.set( 'HAVE_PUBSETBUF', cxx.compiles(pubsetbuf_test).to_int(), - description: 'Optionally used for buffering on standard error' + description : 'Optionally used for buffering on standard error', ) config_priv_h = configure_file( @@ -78,7 +79,7 @@ this_library = library( config_priv_h, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index eb5563161..1d01aa3aa 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -1,4 +1,6 @@ -project('nix-store-c', 'cpp', +project( + 'nix-store-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -29,7 +31,7 @@ sources = files( 'nix_api_store.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_store.h', @@ -46,7 +48,7 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libstore-test-support/include/nix/store/tests/meson.build b/src/libstore-test-support/include/nix/store/tests/meson.build index ae5db049e..f79769d41 100644 --- a/src/libstore-test-support/include/nix/store/tests/meson.build +++ b/src/libstore-test-support/include/nix/store/tests/meson.build @@ -1,6 +1,6 @@ # Public headers directory -include_dirs = [include_directories('../../..')] +include_dirs = [ include_directories('../../..') ] headers = files( 'derived-path.hh', diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index 779b122fa..26da5d0f2 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -1,4 +1,6 @@ -project('nix-store-test-support', 'cpp', +project( + 'nix-store-test-support', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -14,8 +16,7 @@ cxx = meson.get_compiler('cpp') subdir('nix-meson-build-support/deps-lists') -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-util-test-support'), @@ -47,7 +48,7 @@ this_library = library( include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 # is available. See also ../libutil/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index bba991388..e5995bcb1 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -1,4 +1,6 @@ -project('nix-store-tests', 'cpp', +project( + 'nix-store-tests', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -21,8 +23,7 @@ deps_private_maybe_subproject = [ dependency('nix-store-c'), dependency('nix-store-test-support'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -80,7 +81,7 @@ sources = files( 'worker-protocol.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( @@ -90,7 +91,7 @@ this_exe = executable( dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, # TODO: -lrapidcheck, see ../libutil-support/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, ) @@ -99,9 +100,9 @@ test( meson.project_name(), this_exe, env : { - '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', - 'HOME': meson.current_build_dir() / 'test-home', - 'NIX_REMOTE': meson.current_build_dir() / 'test-home' / 'store', + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', + 'HOME' : meson.current_build_dir() / 'test-home', + 'NIX_REMOTE' : meson.current_build_dir() / 'test-home' / 'store', }, protocol : 'gtest', ) @@ -120,11 +121,15 @@ if get_option('benchmarks') 'nix-store-benchmarks', benchmark_sources, config_priv_h, - dependencies : deps_private_subproject + deps_private + deps_other + [gbenchmark], + dependencies : deps_private_subproject + deps_private + deps_other + [ + gbenchmark, + ], include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, install : true, - cpp_args : ['-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"'], + cpp_args : [ + '-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"', + ], ) benchmark('nix-store-benchmarks', benchmark_exe) diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index 44d9815de..e883a89e4 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -9,12 +9,12 @@ config_pub_h = configure_file( output : 'config.hh', ) -headers = [config_pub_h] + files( +headers = [ config_pub_h ] + files( 'binary-cache-store.hh', 'build-result.hh', - 'build/derivation-goal.hh', 'build/derivation-building-goal.hh', 'build/derivation-building-misc.hh', + 'build/derivation-goal.hh', 'build/derivation-trampoline-goal.hh', 'build/drv-output-substitution-goal.hh', 'build/goal.hh', @@ -27,8 +27,8 @@ headers = [config_pub_h] + files( 'common-ssh-store-config.hh', 'content-address.hh', 'daemon.hh', - 'derivations.hh', 'derivation-options.hh', + 'derivations.hh', 'derived-path-map.hh', 'derived-path.hh', 'downstream-placeholder.hh', diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 708188178..0b6471af3 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -1,4 +1,6 @@ -project('nix-store', 'cpp', +project( + 'nix-store', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -27,31 +29,34 @@ subdir('nix-meson-build-support/default-system-cpu') configdata_pub.set_quoted( 'NIX_LOCAL_SYSTEM', nix_system_cpu + '-' + host_machine.system(), - description : - 'This is the system name Nix expects for local running instance of Nix.\n\n' + description : 'This is the system name Nix expects for local running instance of Nix.\n\n' + 'See the "system" setting for additional details', ) -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), ] subdir('nix-meson-build-support/subprojects') -run_command('ln', '-s', +run_command( + 'ln', + '-s', meson.project_build_root() / '__nothing_link_target', meson.project_build_root() / '__nothing_symlink', # native doesn't allow dangling symlinks, which the tests require - env : { 'MSYS' : 'winsymlinks:lnk' }, + env : {'MSYS' : 'winsymlinks:lnk'}, check : true, ) -can_link_symlink = run_command('ln', +can_link_symlink = run_command( + 'ln', meson.project_build_root() / '__nothing_symlink', meson.project_build_root() / '__nothing_hardlink', check : false, ).returncode() == 0 -run_command('rm', '-f', +run_command( + 'rm', + '-f', meson.project_build_root() / '__nothing_symlink', meson.project_build_root() / '__nothing_hardlink', check : true, @@ -84,20 +89,20 @@ configdata_pub.set( if host_machine.system() == 'darwin' sandbox = cxx.find_library('sandbox') - deps_other += [sandbox] + deps_other += [ sandbox ] endif if host_machine.system() == 'windows' wsock32 = cxx.find_library('wsock32') - deps_other += [wsock32] + deps_other += [ wsock32 ] endif subdir('nix-meson-build-support/libatomic') boost = dependency( 'boost', - modules : ['container', 'regex'], - include_type: 'system', + modules : [ 'container', 'regex' ], + include_type : 'system', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. @@ -112,9 +117,16 @@ seccomp_required = get_option('seccomp-sandboxing') if not is_linux and seccomp_required.enabled() warning('Force-enabling seccomp on non-Linux does not make sense') endif -seccomp = dependency('libseccomp', 'seccomp', required : seccomp_required, version : '>=2.5.5') +seccomp = dependency( + 'libseccomp', + 'seccomp', + required : seccomp_required, + version : '>=2.5.5', +) if is_linux and not seccomp.found() - warning('Sandbox security is reduced because libseccomp has not been found! Please provide libseccomp if it supports your CPU architecture.') + warning( + 'Sandbox security is reduced because libseccomp has not been found! Please provide libseccomp if it supports your CPU architecture.', + ) endif configdata_priv.set('HAVE_SECCOMP', seccomp.found().to_int()) deps_private += seccomp @@ -132,8 +144,8 @@ aws_s3 = dependency('aws-cpp-sdk-s3', required : false) configdata_pub.set('NIX_WITH_S3_SUPPORT', aws_s3.found().to_int()) if aws_s3.found() aws_s3 = declare_dependency( - include_directories: include_directories(aws_s3.get_variable('includedir')), - link_args: [ + include_directories : include_directories(aws_s3.get_variable('includedir')), + link_args : [ '-L' + aws_s3.get_variable('libdir'), '-laws-cpp-sdk-transfer', '-laws-cpp-sdk-s3', @@ -159,7 +171,10 @@ endforeach busybox = find_program(get_option('sandbox-shell'), required : false) -configdata_priv.set('HAVE_EMBEDDED_SANDBOX_SHELL', get_option('embedded-sandbox-shell').to_int()) +configdata_priv.set( + 'HAVE_EMBEDDED_SANDBOX_SHELL', + get_option('embedded-sandbox-shell').to_int(), +) if get_option('embedded-sandbox-shell') configdata_priv.set_quoted('SANDBOX_SHELL', '__embedded_sandbox_shell__') @@ -171,12 +186,7 @@ if get_option('embedded-sandbox-shell') hexdump = find_program('hexdump', native : true) embedded_sandbox_shell_gen = custom_target( 'embedded-sandbox-shell.gen.hh', - command : [ - hexdump, - '-v', - '-e', - '1/1 "0x%x," "\n"' - ], + command : [ hexdump, '-v', '-e', '1/1 "0x%x," "\n"' ], input : busybox.full_path(), output : 'embedded-sandbox-shell.gen.hh', capture : true, @@ -228,21 +238,20 @@ endif # Aside from prefix itself, each of these was made into an absolute path # by joining it with prefix, unless it was already an absolute path # (which is the default for store-dir, localstatedir, and log-dir). -configdata_priv.set_quoted('NIX_PREFIX', prefix) +configdata_priv.set_quoted('NIX_PREFIX', prefix) configdata_priv.set_quoted('NIX_STORE_DIR', store_dir) -configdata_priv.set_quoted('NIX_DATA_DIR', datadir) +configdata_priv.set_quoted('NIX_DATA_DIR', datadir) configdata_priv.set_quoted('NIX_STATE_DIR', localstatedir / 'nix') -configdata_priv.set_quoted('NIX_LOG_DIR', log_dir) -configdata_priv.set_quoted('NIX_CONF_DIR', sysconfdir / 'nix') -configdata_priv.set_quoted('NIX_MAN_DIR', mandir) +configdata_priv.set_quoted('NIX_LOG_DIR', log_dir) +configdata_priv.set_quoted('NIX_CONF_DIR', sysconfdir / 'nix') +configdata_priv.set_quoted('NIX_MAN_DIR', mandir) lsof = find_program('lsof', required : false) configdata_priv.set_quoted( 'LSOF', - lsof.found() - ? lsof.full_path() - # Just look up on the PATH - : 'lsof', + lsof.found() ? lsof.full_path() + # Just look up on the PATH +: 'lsof', ) config_priv_h = configure_file( @@ -255,8 +264,8 @@ subdir('nix-meson-build-support/common') sources = files( 'binary-cache-store.cc', 'build-result.cc', - 'build/derivation-goal.cc', 'build/derivation-building-goal.cc', + 'build/derivation-goal.cc', 'build/derivation-trampoline-goal.cc', 'build/drv-output-substitution-goal.cc', 'build/entry-points.cc', @@ -270,8 +279,8 @@ sources = files( 'common-ssh-store-config.cc', 'content-address.cc', 'daemon.cc', - 'derivations.cc', 'derivation-options.cc', + 'derivations.cc', 'derived-path-map.cc', 'derived-path.cc', 'downstream-placeholder.cc', @@ -318,8 +327,8 @@ sources = files( 'ssh.cc', 'store-api.cc', 'store-dir-config.cc', - 'store-registration.cc', 'store-reference.cc', + 'store-registration.cc', 'uds-remote-store.cc', 'worker-protocol-connection.cc', 'worker-protocol.cc', @@ -347,10 +356,10 @@ this_library = library( config_priv_h, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) install_headers(headers, subdir : 'nix/store', preserve_path : true) diff --git a/src/libstore/meson.options b/src/libstore/meson.options index ebad24dc4..b8414068d 100644 --- a/src/libstore/meson.options +++ b/src/libstore/meson.options @@ -1,21 +1,35 @@ # vim: filetype=meson -option('embedded-sandbox-shell', type : 'boolean', value : false, +option( + 'embedded-sandbox-shell', + type : 'boolean', + value : false, description : 'include the sandbox shell in the Nix binary', ) -option('seccomp-sandboxing', type : 'feature', +option( + 'seccomp-sandboxing', + type : 'feature', description : 'build support for seccomp sandboxing (recommended unless your arch doesn\'t support libseccomp, only relevant on Linux)', ) -option('sandbox-shell', type : 'string', value : 'busybox', +option( + 'sandbox-shell', + type : 'string', + value : 'busybox', description : 'path to a statically-linked shell to use as /bin/sh in sandboxes (usually busybox)', ) -option('store-dir', type : 'string', value : '/nix/store', +option( + 'store-dir', + type : 'string', + value : '/nix/store', description : 'path of the Nix store', ) -option('log-dir', type : 'string', value : '/nix/var/log/nix', +option( + 'log-dir', + type : 'string', + value : '/nix/var/log/nix', description : 'path to store logs in for Nix', ) diff --git a/src/libstore/windows/meson.build b/src/libstore/windows/meson.build index b81c5b2af..1cc30602a 100644 --- a/src/libstore/windows/meson.build +++ b/src/libstore/windows/meson.build @@ -7,5 +7,4 @@ include_dirs += include_directories( #'build', ) -headers += files( -) +headers += files() diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 3414a6d31..3688ddeb6 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -1,4 +1,6 @@ -project('nix-util-c', 'cpp', +project( + 'nix-util-c', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -19,8 +21,7 @@ configdata = configuration_data() deps_private_maybe_subproject = [ dependency('nix-util'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') configdata.set_quoted('PACKAGE_VERSION', meson.project_version()) @@ -36,7 +37,7 @@ sources = files( 'nix_api_util.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] headers = files( 'nix_api_util.h', @@ -54,7 +55,7 @@ this_library = library( config_priv_h, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libutil-test-support/include/nix/util/tests/meson.build b/src/libutil-test-support/include/nix/util/tests/meson.build index e6697b517..ab143757c 100644 --- a/src/libutil-test-support/include/nix/util/tests/meson.build +++ b/src/libutil-test-support/include/nix/util/tests/meson.build @@ -1,6 +1,6 @@ # Public headers directory -include_dirs = [include_directories('../../..')] +include_dirs = [ include_directories('../../..') ] headers = files( 'characterization.hh', diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index ec6bc15d9..9bee4a1a6 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -1,4 +1,6 @@ -project('nix-util-test-support', 'cpp', +project( + 'nix-util-test-support', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -14,8 +16,7 @@ cxx = meson.get_compiler('cpp') subdir('nix-meson-build-support/deps-lists') -deps_private_maybe_subproject = [ -] +deps_private_maybe_subproject = [] deps_public_maybe_subproject = [ dependency('nix-util'), dependency('nix-util-c'), @@ -44,7 +45,7 @@ this_library = library( include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 # is available. See also ../libutil/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], prelink : true, # For C++ static initializers install : true, ) diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 0097611c6..2dbb4d129 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -1,4 +1,6 @@ -project('nix-util-tests', 'cpp', +project( + 'nix-util-tests', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -19,8 +21,7 @@ deps_private_maybe_subproject = [ dependency('nix-util-c'), dependency('nix-util-test-support'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -76,7 +77,7 @@ sources = files( 'xml-writer.cc', ) -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( @@ -86,7 +87,7 @@ this_exe = executable( dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, # TODO: -lrapidcheck, see ../libutil-support/build.meson - link_args: linker_export_flags + ['-lrapidcheck'], + link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, ) @@ -95,7 +96,7 @@ test( meson.project_name(), this_exe, env : { - '_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data', + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', }, protocol : 'gtest', ) diff --git a/src/libutil/meson.build b/src/libutil/meson.build index ea2cb679e..afdddc6b5 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -1,4 +1,6 @@ -project('nix-util', 'cpp', +project( + 'nix-util', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -16,10 +18,8 @@ subdir('nix-meson-build-support/deps-lists') configdata = configuration_data() -deps_private_maybe_subproject = [ -] -deps_public_maybe_subproject = [ -] +deps_private_maybe_subproject = [] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') # Check for each of these functions, and create a define like `#define @@ -34,7 +34,7 @@ check_funcs = [ foreach funcspec : check_funcs define_name = 'HAVE_' + funcspec[0].underscorify().to_upper() define_value = cxx.has_function(funcspec[0]).to_int() - configdata.set(define_name, define_value, description: funcspec[1]) + configdata.set(define_name, define_value, description : funcspec[1]) endforeach subdir('nix-meson-build-support/libatomic') @@ -45,21 +45,21 @@ if host_machine.system() == 'windows' elif host_machine.system() == 'sunos' socket = cxx.find_library('socket') network_service_library = cxx.find_library('nsl') - deps_other += [socket, network_service_library] + deps_other += [ socket, network_service_library ] endif blake3 = dependency( 'libblake3', - version: '>= 1.8.2', + version : '>= 1.8.2', method : 'pkg-config', ) deps_private += blake3 boost = dependency( 'boost', - modules : ['context', 'coroutine', 'iostreams', 'url'], - include_type: 'system', - version: '>=1.82.0' + modules : [ 'context', 'coroutine', 'iostreams', 'url' ], + include_type : 'system', + version : '>=1.82.0', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. @@ -93,7 +93,12 @@ cpuid_required = get_option('cpuid') if host_machine.cpu_family() != 'x86_64' and cpuid_required.enabled() warning('Force-enabling seccomp on non-x86_64 does not make sense') endif -cpuid = dependency('libcpuid', 'cpuid', version : '>= 0.7.0', required : cpuid_required) +cpuid = dependency( + 'libcpuid', + 'cpuid', + version : '>= 0.7.0', + required : cpuid_required, +) configdata.set('HAVE_LIBCPUID', cpuid.found().to_int()) deps_private += cpuid @@ -109,7 +114,7 @@ config_priv_h = configure_file( subdir('nix-meson-build-support/common') -sources = [config_priv_h] + files( +sources = [ config_priv_h ] + files( 'archive.cc', 'args.cc', 'base-n.cc', @@ -117,8 +122,8 @@ sources = [config_priv_h] + files( 'canon-path.cc', 'compression.cc', 'compute-levels.cc', - 'configuration.cc', 'config-global.cc', + 'configuration.cc', 'current-process.cc', 'english.cc', 'environment-variables.cc', @@ -137,8 +142,8 @@ sources = [config_priv_h] + files( 'logging.cc', 'memory-source-accessor.cc', 'mounted-source-accessor.cc', - 'position.cc', 'pos-table.cc', + 'position.cc', 'posix-source-accessor.cc', 'references.cc', 'serialise.cc', @@ -146,8 +151,8 @@ sources = [config_priv_h] + files( 'signature/signer.cc', 'source-accessor.cc', 'source-path.cc', - 'subdir-source-accessor.cc', 'strings.cc', + 'subdir-source-accessor.cc', 'suggestions.cc', 'tarfile.cc', 'tee-logger.cc', @@ -190,10 +195,10 @@ this_library = library( sources, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, - cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) install_headers(headers, subdir : 'nix/util', preserve_path : true) @@ -202,7 +207,7 @@ libraries_private = [] if host_machine.system() == 'windows' # `libraries_private` cannot contain ad-hoc dependencies (from # `find_library), so we need to do this manually - libraries_private += ['-lws2_32'] + libraries_private += [ '-lws2_32' ] endif subdir('nix-meson-build-support/export') diff --git a/src/libutil/meson.options b/src/libutil/meson.options index 21883af01..a82806162 100644 --- a/src/libutil/meson.options +++ b/src/libutil/meson.options @@ -1,5 +1,7 @@ # vim: filetype=meson -option('cpuid', type : 'feature', +option( + 'cpuid', + type : 'feature', description : 'determine microarchitecture levels with libcpuid (only relevant on x86_64)', ) diff --git a/src/libutil/unix/meson.build b/src/libutil/unix/meson.build index ea2391d05..13bb380b4 100644 --- a/src/libutil/unix/meson.build +++ b/src/libutil/unix/meson.build @@ -5,7 +5,7 @@ configdata_unix = configuration_data() configdata_unix.set( 'HAVE_DECL_AT_SYMLINK_NOFOLLOW', cxx.has_header_symbol('fcntl.h', 'AT_SYMLINK_NOFOLLOW').to_int(), - description : 'Optionally used for changing the files and symlinks.' + description : 'Optionally used for changing the files and symlinks.', ) # Check for each of these functions, and create a define like `#define @@ -39,7 +39,7 @@ check_funcs_unix = [ foreach funcspec : check_funcs_unix define_name = 'HAVE_' + funcspec[0].underscorify().to_upper() define_value = cxx.has_function(funcspec[0]).to_int() - configdata_unix.set(define_name, define_value, description: funcspec[1]) + configdata_unix.set(define_name, define_value, description : funcspec[1]) endforeach config_unix_priv_h = configure_file( diff --git a/src/nix/meson.build b/src/nix/meson.build index 23b876690..c6dc9c530 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -1,4 +1,6 @@ -project('nix', 'cpp', +project( + 'nix', + 'cpp', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -26,8 +28,7 @@ deps_private_maybe_subproject = [ dependency('nix-main'), dependency('nix-cmd'), ] -deps_public_maybe_subproject = [ -] +deps_public_maybe_subproject = [] subdir('nix-meson-build-support/subprojects') subdir('nix-meson-build-support/export-all-symbols') @@ -57,10 +58,9 @@ config_priv_h = configure_file( subdir('nix-meson-build-support/common') subdir('nix-meson-build-support/generate-header') -nix_sources = [config_priv_h] + files( +nix_sources = [ config_priv_h ] + files( 'add-to-store.cc', 'app.cc', - 'self-exe.cc', 'build.cc', 'bundle.cc', 'cat.cc', @@ -96,6 +96,7 @@ nix_sources = [config_priv_h] + files( 'repl.cc', 'run.cc', 'search.cc', + 'self-exe.cc', 'sigs.cc', 'store-copy-log.cc', 'store-delete.cc', @@ -177,16 +178,16 @@ if host_machine.system() != 'windows' ] endif -include_dirs = [include_directories('.')] +include_dirs = [ include_directories('.') ] this_exe = executable( meson.project_name(), sources, dependencies : deps_private_subproject + deps_private + deps_other, include_directories : include_dirs, - link_args: linker_export_flags, + link_args : linker_export_flags, install : true, - cpp_pch : do_pch ? ['pch/precompiled-headers.hh'] : [] + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) meson.override_find_program('nix', this_exe) @@ -217,15 +218,15 @@ foreach linkname : nix_symlinks pointing_to : fs.name(this_exe), install_dir : get_option('bindir'), # The 'runtime' tag is what executables default to, which we want to emulate here. - install_tag : 'runtime' + install_tag : 'runtime', ) custom_target( - command: ['ln', '-sf', fs.name(this_exe), '@OUTPUT@'], - output: linkname + executable_suffix, + command : [ 'ln', '-sf', fs.name(this_exe), '@OUTPUT@' ], + output : linkname + executable_suffix, # native doesn't allow dangling symlinks, but the target executable often doesn't exist at this time - env : { 'MSYS' : 'winsymlinks:lnk' }, + env : {'MSYS' : 'winsymlinks:lnk'}, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) - build_by_default: true + build_by_default : true, ) # TODO(Ericson3214): Doesn't yet work #meson.override_find_program(linkname, t) @@ -233,19 +234,19 @@ endforeach install_symlink( 'build-remote', - pointing_to : '..' / '..'/ get_option('bindir') / fs.name(this_exe), + pointing_to : '..' / '..' / get_option('bindir') / fs.name(this_exe), install_dir : get_option('libexecdir') / fs.name(this_exe), # The 'runtime' tag is what executables default to, which we want to emulate here. - install_tag : 'runtime' + install_tag : 'runtime', ) custom_target( - command: ['ln', '-sf', fs.name(this_exe), '@OUTPUT@'], - output: 'build-remote' + executable_suffix, + command : [ 'ln', '-sf', fs.name(this_exe), '@OUTPUT@' ], + output : 'build-remote' + executable_suffix, # native doesn't allow dangling symlinks, but the target executable often doesn't exist at this time - env : { 'MSYS' : 'winsymlinks:lnk' }, + env : {'MSYS' : 'winsymlinks:lnk'}, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) - build_by_default: true + build_by_default : true, ) # TODO(Ericson3214): Doesn't yet work #meson.override_find_program(linkname, t) diff --git a/src/nix/meson.options b/src/nix/meson.options index 8430dd669..0fc680cfe 100644 --- a/src/nix/meson.options +++ b/src/nix/meson.options @@ -1,6 +1,9 @@ # vim: filetype=meson # A relative path means it gets appended to prefix. -option('profile-dir', type : 'string', value : 'etc/profile.d', +option( + 'profile-dir', + type : 'string', + value : 'etc/profile.d', description : 'the path to install shell profile files', ) diff --git a/src/perl/lib/Nix/meson.build b/src/perl/lib/Nix/meson.build index 256e66096..5f8baee69 100644 --- a/src/perl/lib/Nix/meson.build +++ b/src/perl/lib/Nix/meson.build @@ -34,7 +34,7 @@ nix_perl_store_cc = custom_target( 'Store.cc', output : 'Store.cc', input : nix_perl_store_xs, - command : [xsubpp, '@INPUT@', '-output', '@OUTPUT@'], + command : [ xsubpp, '@INPUT@', '-output', '@OUTPUT@' ], ) # Build Nix::Store Library @@ -42,7 +42,7 @@ nix_perl_store_cc = custom_target( nix_perl_store_lib = library( 'Store', sources : nix_perl_store_cc, - name_prefix : '', + name_prefix : '', prelink : true, # For C++ static initializers install : true, install_mode : 'rwxr-xr-x', @@ -56,5 +56,5 @@ nix_perl_store_lib = library( install_data( nix_perl_scripts, install_mode : 'rw-r--r--', - install_dir : join_paths(nix_perl_install_dir,'Nix'), + install_dir : join_paths(nix_perl_install_dir, 'Nix'), ) diff --git a/src/perl/meson.build b/src/perl/meson.build index 599e91710..f33291051 100644 --- a/src/perl/meson.build +++ b/src/perl/meson.build @@ -4,7 +4,7 @@ # init project #============================================================================ -project ( +project( 'nix-perl', 'cpp', version : files('.version'), @@ -65,7 +65,7 @@ yath = find_program('yath', required : false) # Required Libraries #------------------------------------------------- -bzip2_dep = dependency('bzip2', required: false) +bzip2_dep = dependency('bzip2', required : false) if not bzip2_dep.found() bzip2_dep = cpp.find_library('bz2') if not bzip2_dep.found() @@ -87,15 +87,36 @@ nix_store_dep = dependency('nix-store') # the perl bindings does not appear to be possible. #------------------------------------------------- perl_archname = run_command( - perl, '-e', 'use Config; print $Config{archname};', check: true).stdout() + perl, + '-e', + 'use Config; print $Config{archname};', + check : true, +).stdout() perl_version = run_command( - perl, '-e', 'use Config; print $Config{version};', check: true).stdout() + perl, + '-e', + 'use Config; print $Config{version};', + check : true, +).stdout() perl_archlibexp = run_command( - perl, '-e', 'use Config; print $Config{archlibexp};', check: true).stdout() + perl, + '-e', + 'use Config; print $Config{archlibexp};', + check : true, +).stdout() perl_site_libdir = run_command( - perl, '-e', 'use Config; print $Config{installsitearch};', check: true).stdout() + perl, + '-e', + 'use Config; print $Config{installsitearch};', + check : true, +).stdout() nix_perl_install_dir = join_paths( - libdir, 'perl5', 'site_perl', perl_version, perl_archname) + libdir, + 'perl5', + 'site_perl', + perl_version, + perl_archname, +) # print perl hints for logs @@ -110,10 +131,11 @@ message('Assumed Nix-Perl install dir: @0@'.format(nix_perl_install_dir)) #------------------------------------------------- perl_check_dbi = run_command( perl, - '-e', 'use DBI; use DBD::SQLite;', + '-e', + 'use DBI; use DBD::SQLite;', '-I@0@'.format(get_option('dbi_path')), '-I@0@'.format(get_option('dbd_sqlite_path')), - check: true + check : true, ) if perl_check_dbi.returncode() == 2 @@ -128,10 +150,11 @@ endif #------------------------------------------------- perl_dep = declare_dependency( dependencies : cpp.find_library( - 'perl', - has_headers : [ - join_paths(perl_archlibexp, 'CORE', 'perl.h'), - join_paths(perl_archlibexp, 'CORE', 'EXTERN.h')], + 'perl', + has_headers : [ + join_paths(perl_archlibexp, 'CORE', 'perl.h'), + join_paths(perl_archlibexp, 'CORE', 'EXTERN.h'), + ], dirs : [ join_paths(perl_archlibexp, 'CORE'), ], @@ -166,8 +189,8 @@ if get_option('tests').enabled() test( 'nix-perl-test', yath, - args : ['test'], + args : [ 'test' ], workdir : meson.current_build_dir(), - depends : [nix_perl_store_lib], + depends : [ nix_perl_store_lib ], ) endif diff --git a/src/perl/meson.options b/src/perl/meson.options index 9b5b6b1d9..03ddf57f1 100644 --- a/src/perl/meson.options +++ b/src/perl/meson.options @@ -9,7 +9,8 @@ option( 'tests', type : 'feature', value : 'disabled', - description : 'run nix-perl tests') + description : 'run nix-perl tests', +) # Location of Perl Modules @@ -18,10 +19,12 @@ option( 'dbi_path', type : 'string', value : '/usr', - description : 'path to perl::dbi') + description : 'path to perl::dbi', +) option( 'dbd_sqlite_path', type : 'string', value : '/usr', - description : 'path to perl::dbd-SQLite') + description : 'path to perl::dbd-SQLite', +) diff --git a/tests/functional/ca/meson.build b/tests/functional/ca/meson.build index a4611ca42..ec34e9644 100644 --- a/tests/functional/ca/meson.build +++ b/tests/functional/ca/meson.build @@ -5,9 +5,9 @@ configure_file( ) suites += { - 'name': 'ca', - 'deps': [], - 'tests': [ + 'name' : 'ca', + 'deps' : [], + 'tests' : [ 'build-cache.sh', 'build-with-garbage-path.sh', 'build.sh', @@ -30,5 +30,5 @@ suites += { 'substitute.sh', 'why-depends.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } diff --git a/tests/functional/dyn-drv/meson.build b/tests/functional/dyn-drv/meson.build index 07145000d..78950a954 100644 --- a/tests/functional/dyn-drv/meson.build +++ b/tests/functional/dyn-drv/meson.build @@ -5,9 +5,9 @@ configure_file( ) suites += { - 'name': 'dyn-drv', - 'deps': [], - 'tests': [ + 'name' : 'dyn-drv', + 'deps' : [], + 'tests' : [ 'text-hashed-output.sh', 'recursive-mod-json.sh', 'build-built-drv.sh', @@ -17,5 +17,5 @@ suites += { 'old-daemon-error-hack.sh', 'dep-built-drv-2.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } diff --git a/tests/functional/flakes/meson.build b/tests/functional/flakes/meson.build index 801fefc6f..de76a5580 100644 --- a/tests/functional/flakes/meson.build +++ b/tests/functional/flakes/meson.build @@ -1,7 +1,7 @@ suites += { - 'name': 'flakes', - 'deps': [], - 'tests': [ + 'name' : 'flakes', + 'deps' : [], + 'tests' : [ 'flakes.sh', 'develop.sh', 'edit.sh', @@ -35,5 +35,5 @@ suites += { 'old-lockfiles.sh', 'trace-ifd.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } diff --git a/tests/functional/git-hashing/meson.build b/tests/functional/git-hashing/meson.build index 342c2799c..8e7f693c1 100644 --- a/tests/functional/git-hashing/meson.build +++ b/tests/functional/git-hashing/meson.build @@ -1,10 +1,10 @@ suites += { - 'name': 'git-hashing', - 'deps': [], - 'tests': [ + 'name' : 'git-hashing', + 'deps' : [], + 'tests' : [ 'simple-sha1.sh', 'simple-sha256.sh', 'fixed.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } diff --git a/tests/functional/local-overlay-store/meson.build b/tests/functional/local-overlay-store/meson.build index b7ba5a323..6db2c728d 100644 --- a/tests/functional/local-overlay-store/meson.build +++ b/tests/functional/local-overlay-store/meson.build @@ -1,7 +1,7 @@ suites += { - 'name': 'local-overlay-store', - 'deps': [], - 'tests': [ + 'name' : 'local-overlay-store', + 'deps' : [], + 'tests' : [ 'check-post-init.sh', 'redundant-add.sh', 'build.sh', @@ -14,5 +14,5 @@ suites += { 'optimise.sh', 'stale-file-handle.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } diff --git a/tests/functional/meson.build b/tests/functional/meson.build index d2989ee84..85373a70a 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -1,4 +1,5 @@ -project('nix-functional-tests', +project( + 'nix-functional-tests', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -26,13 +27,13 @@ nix_bin_dir = fs.parent(nix.full_path()) subdir('nix-meson-build-support/default-system-cpu') test_confdata = { - 'bindir': nix_bin_dir, - 'coreutils': fs.parent(coreutils.full_path()), - 'dot': dot.found() ? dot.full_path() : '', - 'bash': bash.full_path(), - 'sandbox_shell': busybox.found() ? busybox.full_path() : '', - 'PACKAGE_VERSION': meson.project_version(), - 'system': nix_system_cpu + '-' + host_machine.system(), + 'bindir' : nix_bin_dir, + 'coreutils' : fs.parent(coreutils.full_path()), + 'dot' : dot.found() ? dot.full_path() : '', + 'bash' : bash.full_path(), + 'sandbox_shell' : busybox.found() ? busybox.full_path() : '', + 'PACKAGE_VERSION' : meson.project_version(), + 'system' : nix_system_cpu + '-' + host_machine.system(), } # Just configures `common/vars-and-functions.sh.in`. @@ -48,8 +49,8 @@ configure_file( suites = [ { 'name' : 'main', - 'deps': [], - 'tests': [ + 'deps' : [], + 'tests' : [ 'test-infra.sh', 'gc.sh', 'nix-collect-garbage-d.sh', @@ -174,7 +175,7 @@ suites = [ 'help.sh', 'symlinks.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), }, ] @@ -183,14 +184,14 @@ if nix_store.found() add_languages('cpp') subdir('test-libstoreconsumer') suites += { - 'name': 'libstoreconsumer', - 'deps': [ + 'name' : 'libstoreconsumer', + 'deps' : [ libstoreconsumer_tester, ], - 'tests': [ + 'tests' : [ 'test-libstoreconsumer.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } endif @@ -201,14 +202,14 @@ if nix_expr.found() and get_option('default_library') != 'static' add_languages('cpp') subdir('plugins') suites += { - 'name': 'plugins', - 'deps': [ + 'name' : 'plugins', + 'deps' : [ libplugintest, ], - 'tests': [ + 'tests' : [ 'plugins.sh', ], - 'workdir': meson.current_source_dir(), + 'workdir' : meson.current_source_dir(), } endif @@ -230,20 +231,21 @@ foreach suite : suites test( name, bash, - args: [ + args : [ '-x', '-e', '-u', - '-o', 'pipefail', + '-o', + 'pipefail', script, ], suite : suite_name, env : { - '_NIX_TEST_SOURCE_DIR': meson.current_source_dir(), - '_NIX_TEST_BUILD_DIR': meson.current_build_dir(), - 'TEST_NAME': suite_name / name, - 'NIX_REMOTE': '', - 'PS4': '+(${BASH_SOURCE[0]-$0}:$LINENO) ', + '_NIX_TEST_SOURCE_DIR' : meson.current_source_dir(), + '_NIX_TEST_BUILD_DIR' : meson.current_build_dir(), + 'TEST_NAME' : suite_name / name, + 'NIX_REMOTE' : '', + 'PS4' : '+(${BASH_SOURCE[0]-$0}:$LINENO) ', }, # Some tests take 15+ seconds even on an otherwise idle machine; # on a loaded machine this can easily drive them to failure. Give From cf100859401ef30cc45cd83f94b005d32de55787 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 7 Aug 2025 03:01:40 +0300 Subject: [PATCH 120/382] git-blame-ignore-revs: Add the meson reformatting commit --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index bda571a5e..0c0d7fde8 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,4 @@ # bulk initial re-formatting with clang-format e4f62e46088919428a68bd8014201dc8e379fed7 # !autorebase ./maintainers/format.sh --until-stable +# meson re-formatting +385e2c3542c707d95e3784f7f6d623f67e77ab61 # !autorebase ./maintainers/format.sh --until-stable From 9d8d8df729a0a0cd3ba21cc0ca42e8b9a7bf1432 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 6 Aug 2025 19:35:35 -0400 Subject: [PATCH 121/382] Slight clean up the constructors for `Hash` - No more private constructor that is kinda weird - Two new static functions, `baseFromSize` and `baseFromSize`, that do one thing, and one thing only (simple). - Two `Hash::parse*` that previously used the private constructor now can use these two functions directly. - The remaining `Hash::parseAny*` methods, which are inherently more complex, are written in terms of a `parseAnyHelper` static function which is also complex, but keeps the complexity in one spot. --- src/libutil/hash.cc | 165 ++++++++++++++++----------- src/libutil/include/nix/util/hash.hh | 7 -- 2 files changed, 99 insertions(+), 73 deletions(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 1319924bf..de2fc5a48 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -94,6 +94,60 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const Hash Hash::dummy(HashAlgorithm::SHA256); +namespace { + +/// Private convenience +struct DecodeNamePair +{ + decltype(base16::decode) * decode; + std::string_view encodingName; +}; + +} // namespace + +/** + * Given the expected size of the message once decoded it, figure out + * which encoding we are using by looking at the size of the encoded + * message. + */ +static DecodeNamePair baseFromSize(std::string_view rest, HashAlgorithm algo) +{ + auto hashSize = regularHashSize(algo); + if (rest.size() == base16::encodedLength(hashSize)) + return {base16::decode, "base16"}; + + if (rest.size() == BaseNix32::encodedLength(hashSize)) + return {BaseNix32::decode, "nix32"}; + + if (rest.size() == base64::encodedLength(hashSize)) + return {base64::decode, "Base64"}; + + throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(algo)); +} + +/** + * Given the exact decoding function, and a display name for in error + * messages. + * + * @param rest the string view to parse. Must not include any `(:|-)` prefix. + */ +static Hash parseLowLevel(std::string_view rest, HashAlgorithm algo, DecodeNamePair pair) +{ + Hash res{algo}; + std::string d; + try { + d = pair.decode(rest); + } catch (Error & e) { + e.addTrace({}, "While decoding hash '%s'", rest); + } + if (d.size() != res.hashSize) + throw BadHash("invalid %s hash '%s' %d %d", pair.encodingName, rest); + assert(res.hashSize); + memcpy(res.hash, d.data(), res.hashSize); + + return res; +} + Hash Hash::parseSRI(std::string_view original) { auto rest = original; @@ -104,16 +158,21 @@ Hash Hash::parseSRI(std::string_view original) throw BadHash("hash '%s' is not SRI", original); HashAlgorithm parsedType = parseHashAlgo(*hashRaw); - return Hash(rest, parsedType, true); + return parseLowLevel(rest, parsedType, {base64::decode, "SRI"}); } -// Mutates the string to eliminate the prefixes when found -static std::pair, bool> getParsedTypeAndSRI(std::string_view & rest) +/** + * @param rest is the string to parse + * + * @param resolveAlgo resolves the parsed type (or throws an error when it is not + * possible.) + */ +static Hash parseAnyHelper(std::string_view rest, auto resolveAlgo) { bool isSRI = false; // Parse the hash type before the separator, if there was one. - std::optional optParsedType; + std::optional optParsedAlgo; { auto hashRaw = splitPrefixTo(rest, ':'); @@ -123,50 +182,12 @@ static std::pair, bool> getParsedTypeAndSRI(std::st isSRI = true; } if (hashRaw) - optParsedType = parseHashAlgo(*hashRaw); + optParsedAlgo = parseHashAlgo(*hashRaw); } - return {optParsedType, isSRI}; -} + HashAlgorithm algo = resolveAlgo(std::move(optParsedAlgo)); -Hash Hash::parseAnyPrefixed(std::string_view original) -{ - auto rest = original; - auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest); - - // Either the string or user must provide the type, if they both do they - // must agree. - if (!optParsedType) - throw BadHash("hash '%s' does not include a type", rest); - - return Hash(rest, *optParsedType, isSRI); -} - -Hash Hash::parseAny(std::string_view original, std::optional optAlgo) -{ - auto rest = original; - auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest); - - // Either the string or user must provide the type, if they both do they - // must agree. - if (!optParsedType && !optAlgo) - throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest); - else if (optParsedType && optAlgo && *optParsedType != *optAlgo) - throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo)); - - HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo; - return Hash(rest, hashAlgo, isSRI); -} - -Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) -{ - return Hash(s, algo, false); -} - -Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) - : Hash(algo) -{ - auto [decode, formatName] = [&]() -> std::pair { + auto [decode, formatName] = [&]() -> DecodeNamePair { if (isSRI) { /* In the SRI case, we always are using Base64. If the length is wrong, get an error later. */ @@ -174,30 +195,42 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) } else { /* Otherwise, decide via the length of the hash (for the given algorithm) what base encoding it is. */ - - if (rest.size() == base16::encodedLength(hashSize)) - return {base16::decode, "base16"}; - - if (rest.size() == BaseNix32::encodedLength(hashSize)) - return {BaseNix32::decode, "nix32"}; - - if (rest.size() == base64::encodedLength(hashSize)) - return {base64::decode, "Base64"}; + return baseFromSize(rest, algo); } - - throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); }(); - std::string d; - try { - d = decode(rest); - } catch (Error & e) { - e.addTrace({}, "While decoding hash '%s'", rest); - } - if (d.size() != hashSize) - throw BadHash("invalid %s hash '%s' %d %d", formatName, rest); - assert(hashSize); - memcpy(hash, d.data(), hashSize); + return parseLowLevel(rest, algo, {decode, formatName}); +} + +Hash Hash::parseAnyPrefixed(std::string_view original) +{ + return parseAnyHelper(original, [&](std::optional optParsedAlgo) { + // Either the string or user must provide the type, if they both do they + // must agree. + if (!optParsedAlgo) + throw BadHash("hash '%s' does not include a type", original); + + return *optParsedAlgo; + }); +} + +Hash Hash::parseAny(std::string_view original, std::optional optAlgo) +{ + return parseAnyHelper(original, [&](std::optional optParsedAlgo) { + // Either the string or user must provide the type, if they both do they + // must agree. + if (!optParsedAlgo && !optAlgo) + throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", original); + else if (optParsedAlgo && optAlgo && *optParsedAlgo != *optAlgo) + throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo)); + + return optParsedAlgo ? *optParsedAlgo : *optAlgo; + }); +} + +Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) +{ + return parseLowLevel(s, algo, baseFromSize(s, algo)); } Hash Hash::random(HashAlgorithm algo) diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index fdd4c6fa7..ea9c71ac7 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -92,13 +92,6 @@ struct Hash static Hash parseSRI(std::string_view original); -private: - /** - * The type must be provided, the string view must not include - * prefix. `isSRI` helps disambigate the various base-* encodings. - */ - Hash(std::string_view s, HashAlgorithm algo, bool isSRI); - public: /** * Check whether two hashes are equal. From e07440665c80e941b1049b7e94a025853eba4dd2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 6 Aug 2025 19:54:56 -0400 Subject: [PATCH 122/382] Move some `MixStoreDirMethods` members to the right `.cc` file I had not wanted to cause unncessary churn before, but now that we've bitten the bullet with the Big Reformat, I feel it is the right time. Future readers will appreciate that the declarations and definitions files are one-to-one as they should be, and `store-api.cc` is good to shrink in any event. I don't think there are outstanding PRs changing this code either. (I had some for a while, but they are all merged.) --- src/libstore/path.cc | 54 ---------- src/libstore/store-api.cc | 113 -------------------- src/libstore/store-dir-config.cc | 171 ++++++++++++++++++++++++++++++- 3 files changed, 170 insertions(+), 168 deletions(-) diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 3f7745288..516b01571 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -74,58 +74,4 @@ StorePath StorePath::random(std::string_view name) return StorePath(Hash::random(HashAlgorithm::SHA1), name); } -StorePath MixStoreDirMethods::parseStorePath(std::string_view path) const -{ - // On Windows, `/nix/store` is not a canonical path. More broadly it - // is unclear whether this function should be using the native - // notion of a canonical path at all. For example, it makes to - // support remote stores whose store dir is a non-native path (e.g. - // Windows <-> Unix ssh-ing). - auto p = -#ifdef _WIN32 - path -#else - canonPath(std::string(path)) -#endif - ; - if (dirOf(p) != storeDir) - throw BadStorePath("path '%s' is not in the Nix store", p); - return StorePath(baseNameOf(p)); -} - -std::optional MixStoreDirMethods::maybeParseStorePath(std::string_view path) const -{ - try { - return parseStorePath(path); - } catch (Error &) { - return {}; - } -} - -bool MixStoreDirMethods::isStorePath(std::string_view path) const -{ - return (bool) maybeParseStorePath(path); -} - -StorePathSet MixStoreDirMethods::parseStorePathSet(const PathSet & paths) const -{ - StorePathSet res; - for (auto & i : paths) - res.insert(parseStorePath(i)); - return res; -} - -std::string MixStoreDirMethods::printStorePath(const StorePath & path) const -{ - return (storeDir + "/").append(path.to_string()); -} - -PathSet MixStoreDirMethods::printStorePathSet(const StorePathSet & paths) const -{ - PathSet res; - for (auto & i : paths) - res.insert(printStorePath(i)); - return res; -} - } // namespace nix diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 1465d9b42..468aeecf1 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -64,119 +64,6 @@ StorePath Store::followLinksToStorePath(std::string_view path) const return toStorePath(followLinksToStore(path)).first; } -/* -The exact specification of store paths is in `protocols/store-path.md` -in the Nix manual. These few functions implement that specification. - -If changes to these functions go beyond mere implementation changes i.e. -also update the user-visible behavior, please update the specification -to match. -*/ - -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const -{ - /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ - auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); - auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20); - return StorePath(h, name); -} - -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const -{ - return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); -} - -StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const -{ - return makeStorePath("output:" + std::string{id}, hash, outputPathName(name, id)); -} - -/* Stuff the references (if any) into the type. This is a bit - hacky, but we can't put them in, say, (per the grammar above) - since that would be ambiguous. */ -static std::string makeType(const MixStoreDirMethods & store, std::string && type, const StoreReferences & references) -{ - for (auto & i : references.others) { - type += ":"; - type += store.printStorePath(i); - } - if (references.self) - type += ":self"; - return std::move(type); -} - -StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const -{ - if (info.method == FileIngestionMethod::Git - && !(info.hash.algo == HashAlgorithm::SHA1 || info.hash.algo == HashAlgorithm::SHA256)) { - throw Error( - "Git file ingestion must use SHA-1 or SHA-256 hash, but instead using: %s", printHashAlgo(info.hash.algo)); - } - - if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) { - return makeStorePath(makeType(*this, "source", info.references), info.hash, name); - } else { - if (!info.references.empty()) { - throw Error( - "fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", - name); - } - // make a unique digest based on the parameters for creating this store object - auto payload = - "fixed:out:" + makeFileIngestionPrefix(info.method) + info.hash.to_string(HashFormat::Base16, true) + ":"; - auto digest = hashString(HashAlgorithm::SHA256, payload); - return makeStorePath("output:out", digest, name); - } -} - -StorePath -MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const -{ - // New template - return std::visit( - overloaded{ - [&](const TextInfo & ti) { - assert(ti.hash.algo == HashAlgorithm::SHA256); - return makeStorePath( - makeType( - *this, - "text", - StoreReferences{ - .others = ti.references, - .self = false, - }), - ti.hash, - name); - }, - [&](const FixedOutputInfo & foi) { return makeFixedOutputPath(name, foi); }}, - ca.raw); -} - -std::pair MixStoreDirMethods::computeStorePath( - std::string_view name, - const SourcePath & path, - ContentAddressMethod method, - HashAlgorithm hashAlgo, - const StorePathSet & references, - PathFilter & filter) const -{ - auto [h, size] = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter); - if (settings.warnLargePathThreshold && size && *size >= settings.warnLargePathThreshold) - warn("hashed large path '%s' (%s)", path, renderSize(*size)); - return { - makeFixedOutputPathFromCA( - name, - ContentAddressWithReferences::fromParts( - method, - h, - { - .others = references, - .self = false, - })), - h, - }; -} - StorePath Store::addToStore( std::string_view name, const SourcePath & path, diff --git a/src/libstore/store-dir-config.cc b/src/libstore/store-dir-config.cc index 069c484ba..62f08d819 100644 --- a/src/libstore/store-dir-config.cc +++ b/src/libstore/store-dir-config.cc @@ -1,9 +1,178 @@ -#include "nix/store/store-dir-config.hh" +#include "nix/util/source-path.hh" #include "nix/util/util.hh" +#include "nix/store/store-dir-config.hh" +#include "nix/store/derivations.hh" #include "nix/store/globals.hh" namespace nix { +StorePath MixStoreDirMethods::parseStorePath(std::string_view path) const +{ + // On Windows, `/nix/store` is not a canonical path. More broadly it + // is unclear whether this function should be using the native + // notion of a canonical path at all. For example, it makes to + // support remote stores whose store dir is a non-native path (e.g. + // Windows <-> Unix ssh-ing). + auto p = +#ifdef _WIN32 + path +#else + canonPath(std::string(path)) +#endif + ; + if (dirOf(p) != storeDir) + throw BadStorePath("path '%s' is not in the Nix store", p); + return StorePath(baseNameOf(p)); +} + +std::optional MixStoreDirMethods::maybeParseStorePath(std::string_view path) const +{ + try { + return parseStorePath(path); + } catch (Error &) { + return {}; + } +} + +bool MixStoreDirMethods::isStorePath(std::string_view path) const +{ + return (bool) maybeParseStorePath(path); +} + +StorePathSet MixStoreDirMethods::parseStorePathSet(const PathSet & paths) const +{ + StorePathSet res; + for (auto & i : paths) + res.insert(parseStorePath(i)); + return res; +} + +std::string MixStoreDirMethods::printStorePath(const StorePath & path) const +{ + return (storeDir + "/").append(path.to_string()); +} + +PathSet MixStoreDirMethods::printStorePathSet(const StorePathSet & paths) const +{ + PathSet res; + for (auto & i : paths) + res.insert(printStorePath(i)); + return res; +} + +/* +The exact specification of store paths is in `protocols/store-path.md` +in the Nix manual. These few functions implement that specification. + +If changes to these functions go beyond mere implementation changes i.e. +also update the user-visible behavior, please update the specification +to match. +*/ + +StorePath MixStoreDirMethods::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const +{ + /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ + auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); + auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20); + return StorePath(h, name); +} + +StorePath MixStoreDirMethods::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const +{ + return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); +} + +StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const +{ + return makeStorePath("output:" + std::string{id}, hash, outputPathName(name, id)); +} + +/* Stuff the references (if any) into the type. This is a bit + hacky, but we can't put them in, say, (per the grammar above) + since that would be ambiguous. */ +static std::string makeType(const MixStoreDirMethods & store, std::string && type, const StoreReferences & references) +{ + for (auto & i : references.others) { + type += ":"; + type += store.printStorePath(i); + } + if (references.self) + type += ":self"; + return std::move(type); +} + +StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const +{ + if (info.method == FileIngestionMethod::Git + && !(info.hash.algo == HashAlgorithm::SHA1 || info.hash.algo == HashAlgorithm::SHA256)) { + throw Error( + "Git file ingestion must use SHA-1 or SHA-256 hash, but instead using: %s", printHashAlgo(info.hash.algo)); + } + + if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::NixArchive) { + return makeStorePath(makeType(*this, "source", info.references), info.hash, name); + } else { + if (!info.references.empty()) { + throw Error( + "fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", + name); + } + // make a unique digest based on the parameters for creating this store object + auto payload = + "fixed:out:" + makeFileIngestionPrefix(info.method) + info.hash.to_string(HashFormat::Base16, true) + ":"; + auto digest = hashString(HashAlgorithm::SHA256, payload); + return makeStorePath("output:out", digest, name); + } +} + +StorePath +MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const +{ + // New template + return std::visit( + overloaded{ + [&](const TextInfo & ti) { + assert(ti.hash.algo == HashAlgorithm::SHA256); + return makeStorePath( + makeType( + *this, + "text", + StoreReferences{ + .others = ti.references, + .self = false, + }), + ti.hash, + name); + }, + [&](const FixedOutputInfo & foi) { return makeFixedOutputPath(name, foi); }}, + ca.raw); +} + +std::pair MixStoreDirMethods::computeStorePath( + std::string_view name, + const SourcePath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter) const +{ + auto [h, size] = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter); + if (settings.warnLargePathThreshold && size && *size >= settings.warnLargePathThreshold) + warn("hashed large path '%s' (%s)", path, renderSize(*size)); + return { + makeFixedOutputPathFromCA( + name, + ContentAddressWithReferences::fromParts( + method, + h, + { + .others = references, + .self = false, + })), + h, + }; +} + StoreDirConfig::StoreDirConfig(const Params & params) : StoreDirConfigBase(params) , MixStoreDirMethods{storeDir_} From a3ff648f90c31cba2cc9e63fa93d61a310d33d95 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 4 Aug 2025 19:01:33 +0200 Subject: [PATCH 123/382] Call GC_allow_register_threads() to enable parallel marking in Boehm GC --- src/libexpr/eval-gc.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 5a4ecf035..ce2bcae8a 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -53,6 +53,9 @@ static inline void initGCReal() GC_INIT(); + /* Enable parallel marking. */ + GC_allow_register_threads(); + /* Register valid displacements in case we are using alignment niches for storing the type information. This way tagged pointers are considered to be valid, even when they are not aligned. */ From 4c1c4f79a3286d76ec5f2e4746b89af009b55e46 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 4 Aug 2025 10:57:24 +0200 Subject: [PATCH 124/382] Move setting GC_THREADS into eval-gc.hh --- src/libexpr/eval-gc.cc | 2 -- src/libexpr/include/nix/expr/eval-gc.hh | 3 ++- src/libexpr/include/nix/expr/eval-inline.hh | 3 --- src/libexpr/meson.build | 2 -- 4 files changed, 2 insertions(+), 8 deletions(-) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index ce2bcae8a..b17336a90 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -15,8 +15,6 @@ # include # endif -# include -# include # include # include diff --git a/src/libexpr/include/nix/expr/eval-gc.hh b/src/libexpr/include/nix/expr/eval-gc.hh index 25144d40c..813c2920d 100644 --- a/src/libexpr/include/nix/expr/eval-gc.hh +++ b/src/libexpr/include/nix/expr/eval-gc.hh @@ -3,12 +3,13 @@ #include -// For `NIX_USE_BOEHMGC`, and if that's set, `GC_THREADS` +// For `NIX_USE_BOEHMGC` #include "nix/expr/config.hh" #if NIX_USE_BOEHMGC # define GC_INCLUDE_NEW +# define GC_THREADS 1 # include # include diff --git a/src/libexpr/include/nix/expr/eval-inline.hh b/src/libexpr/include/nix/expr/eval-inline.hh index a1fd0ae4a..749e51537 100644 --- a/src/libexpr/include/nix/expr/eval-inline.hh +++ b/src/libexpr/include/nix/expr/eval-inline.hh @@ -6,9 +6,6 @@ #include "nix/expr/eval-error.hh" #include "nix/expr/eval-settings.hh" -// For `NIX_USE_BOEHMGC`, and if that's set, `GC_THREADS` -#include "nix/expr/config.hh" - namespace nix { /** diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index adf26008d..e1a12106d 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -61,8 +61,6 @@ if bdw_gc.found() define_value = cxx.has_function(funcspec).to_int() configdata_priv.set(define_name, define_value) endforeach - # Affects ABI, because it changes what bdw_gc itself does! - configdata_pub.set('GC_THREADS', 1) endif # Used in public header. Affects ABI! configdata_pub.set('NIX_USE_BOEHMGC', bdw_gc.found().to_int()) From 0d559f0c1301af9bd2c6751761bff6490962d0ee Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 4 Aug 2025 21:07:26 +0200 Subject: [PATCH 125/382] Increase the initial Boehm GC mark stack size If the mark stack size is too small, it greatly inhibits parallel marking, which is very bad for performance on multi-core systems. --- packaging/dependencies.nix | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index dda9ef8dc..17ba06b4d 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -50,9 +50,19 @@ scope: { requiredSystemFeatures = [ ]; }; - boehmgc = pkgs.boehmgc.override { - enableLargeConfig = true; - }; + boehmgc = + (pkgs.boehmgc.override { + enableLargeConfig = true; + }).overrideAttrs + (attrs: { + # Increase the initial mark stack size to avoid stack + # overflows, since these inhibit parallel marking (see + # GC_mark_some()). To check whether the mark stack is too + # small, run Nix with GC_PRINT_STATS=1 and look for messages + # such as `Mark stack overflow`, `No room to copy back mark + # stack`, and `Grew mark stack to ... frames`. + NIX_CFLAGS_COMPILE = "-DINITIAL_MARK_STACK_SIZE=1048576"; + }); # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. boost = From 3ba103865d4273c07b4bea702ea594f67fc14e89 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Aug 2025 16:37:11 +0200 Subject: [PATCH 126/382] Restore fixupBoehmStackPointer This was removed in https://github.com/NixOS/nix/pull/11152. However, we need it for the multi-threaded evaluator, because otherwise Boehm GC will crash while scanning the thread stack: #0 GC_push_all_eager (bottom=, top=) at extra/../mark.c:1488 #1 0x00007ffff74691d5 in GC_push_all_stack_sections (lo=, hi=, traced_stack_sect=0x0) at extra/../mark_rts.c:704 #2 GC_push_all_stacks () at extra/../pthread_stop_world.c:876 #3 GC_default_push_other_roots () at extra/../os_dep.c:2893 #4 0x00007ffff746235c in GC_mark_some (cold_gc_frame=0x7ffee8ecaa50 "`\304G\367\377\177") at extra/../mark.c:374 #5 0x00007ffff7465a8d in GC_stopped_mark (stop_func=stop_func@entry=0x7ffff7453c80 ) at extra/../alloc.c:875 #6 0x00007ffff7466724 in GC_try_to_collect_inner (stop_func=0x7ffff7453c80 ) at extra/../alloc.c:624 #7 0x00007ffff7466a22 in GC_collect_or_expand (needed_blocks=needed_blocks@entry=1, ignore_off_page=ignore_off_page@entry=0, retry=retry@entry=0) at extra/../alloc.c:1688 #8 0x00007ffff746878f in GC_allocobj (gran=, kind=) at extra/../alloc.c:1798 #9 GC_generic_malloc_inner (lb=, k=k@entry=1) at extra/../malloc.c:193 #10 0x00007ffff746cd40 in GC_generic_malloc_many (lb=, k=, result=) at extra/../mallocx.c:477 #11 0x00007ffff746cf35 in GC_malloc_kind (bytes=120, kind=1) at extra/../thread_local_alloc.c:187 #12 0x00007ffff796ede5 in nix::allocBytes (n=, n=) at ../src/libexpr/include/nix/expr/eval-inline.hh:19 This is because it will use the stack pointer of the coroutine, so it will scan a region of memory that doesn't exist, e.g. Stack for thread 0x7ffea4ff96c0 is [0x7ffe80197af0w,0x7ffea4ffa000) (where 0x7ffe80197af0w is the sp of the coroutine and 0x7ffea4ffa000 is the base of the thread stack). We don't scan coroutine stacks, because currently they don't have GC roots (there is no evaluation happening in coroutines). So there is currently no need to restore the other parts of the original patch, such as BoehmGCStackAllocator. --- src/libexpr/eval-gc.cc | 64 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 5a4ecf035..cd37650ec 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -35,6 +35,67 @@ static void * oomHandler(size_t requested) throw std::bad_alloc(); } +/** + * When a thread goes into a coroutine, we lose its original sp until + * control flow returns to the thread. This causes Boehm GC to crash + * since it will scan memory between the coroutine's sp and the + * original stack base of the thread. Therefore, we detect when the + * current sp is outside of the original thread stack and push the + * entire thread stack instead, as an approximation. + * + * This is not optimal, because it causes the stack below sp to be + * scanned. However, we usually we don't have active coroutines during + * evaluation, so this is acceptable. + * + * Note that we don't scan coroutine stacks. It's currently assumed + * that we don't have GC roots in coroutines. + */ +void fixupBoehmStackPointer(void ** sp_ptr, void * _pthread_id) +{ + void *& sp = *sp_ptr; + auto pthread_id = reinterpret_cast(_pthread_id); + size_t osStackSize; + // The low address of the stack, which grows down. + void * osStackLimit; + +# ifdef __APPLE__ + osStackSize = pthread_get_stacksize_np(pthread_id); + osStackLimit = pthread_get_stackaddr_np(pthread_id); +# else + pthread_attr_t pattr; + if (pthread_attr_init(&pattr)) { + throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); + } +# ifdef HAVE_PTHREAD_GETATTR_NP + if (pthread_getattr_np(pthread_id, &pattr)) { + throw Error("fixupBoehmStackPointer: pthread_getattr_np failed"); + } +# elif HAVE_PTHREAD_ATTR_GET_NP + if (!pthread_attr_init(&pattr)) { + throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); + } + if (!pthread_attr_get_np(pthread_id, &pattr)) { + throw Error("fixupBoehmStackPointer: pthread_attr_get_np failed"); + } +# else +# error "Need one of `pthread_attr_get_np` or `pthread_getattr_np`" +# endif + if (pthread_attr_getstack(&pattr, &osStackLimit, &osStackSize)) { + throw Error("fixupBoehmStackPointer: pthread_attr_getstack failed"); + } + if (pthread_attr_destroy(&pattr)) { + throw Error("fixupBoehmStackPointer: pthread_attr_destroy failed"); + } +# endif + + void * osStackBase = (char *) osStackLimit + osStackSize; + // NOTE: We assume the stack grows down, as it does on all architectures we support. + // Architectures that grow the stack up are rare. + if (sp >= osStackBase || sp < osStackLimit) { // sp is outside the os stack + sp = osStackLimit; + } +} + static inline void initGCReal() { /* Initialise the Boehm garbage collector. */ @@ -62,6 +123,9 @@ static inline void initGCReal() GC_set_oom_fn(oomHandler); + GC_set_sp_corrector(&fixupBoehmStackPointer); + assert(GC_get_sp_corrector()); + /* Set the initial heap size to something fairly big (25% of physical RAM, up to a maximum of 384 MiB) so that in most cases we don't need to garbage collect at all. (Collection has a From d4ef822add1074483627c5dbbaa9077f15daf7bc Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 7 Aug 2025 13:47:16 -0400 Subject: [PATCH 127/382] Factor out `ChrootDerivationBuilder` This will allow us to have non-Linux chroot-using sandboxed builds. --- .../unix/build/chroot-derivation-builder.cc | 208 ++++++++++++++++++ src/libstore/unix/build/derivation-builder.cc | 10 +- .../unix/build/linux-derivation-builder.cc | 195 +--------------- 3 files changed, 225 insertions(+), 188 deletions(-) create mode 100644 src/libstore/unix/build/chroot-derivation-builder.cc diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc new file mode 100644 index 000000000..ccf4f8e20 --- /dev/null +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -0,0 +1,208 @@ +#ifdef __linux__ + +namespace nix { + +struct ChrootDerivationBuilder : virtual DerivationBuilderImpl +{ + ChrootDerivationBuilder( + Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + : DerivationBuilderImpl{store, std::move(miscMethods), std::move(params)} + { + } + + /** + * The root of the chroot environment. + */ + Path chrootRootDir; + + /** + * RAII object to delete the chroot directory. + */ + std::shared_ptr autoDelChroot; + + PathsInChroot pathsInChroot; + + void deleteTmpDir(bool force) override + { + autoDelChroot.reset(); /* this runs the destructor */ + + DerivationBuilderImpl::deleteTmpDir(force); + } + + bool needsHashRewrite() override + { + return false; + } + + void setBuildTmpDir() override + { + /* If sandboxing is enabled, put the actual TMPDIR underneath + an inaccessible root-owned directory, to prevent outside + access. + + On macOS, we don't use an actual chroot, so this isn't + possible. Any mitigation along these lines would have to be + done directly in the sandbox profile. */ + tmpDir = topTmpDir + "/build"; + createDir(tmpDir, 0700); + } + + Path tmpDirInSandbox() override + { + /* In a sandbox, for determinism, always use the same temporary + directory. */ + return settings.sandboxBuildDir; + } + + virtual gid_t sandboxGid() + { + return buildUser->getGID(); + } + + void prepareSandbox() override + { + /* Create a temporary directory in which we set up the chroot + environment using bind-mounts. We put it in the Nix store + so that the build outputs can be moved efficiently from the + chroot to their final location. */ + auto chrootParentDir = store.Store::toRealPath(drvPath) + ".chroot"; + deletePath(chrootParentDir); + + /* Clean up the chroot directory automatically. */ + autoDelChroot = std::make_shared(chrootParentDir); + + printMsg(lvlChatty, "setting up chroot environment in '%1%'", chrootParentDir); + + if (mkdir(chrootParentDir.c_str(), 0700) == -1) + throw SysError("cannot create '%s'", chrootRootDir); + + chrootRootDir = chrootParentDir + "/root"; + + if (mkdir(chrootRootDir.c_str(), buildUser && buildUser->getUIDCount() != 1 ? 0755 : 0750) == -1) + throw SysError("cannot create '%1%'", chrootRootDir); + + if (buildUser + && chown( + chrootRootDir.c_str(), buildUser->getUIDCount() != 1 ? buildUser->getUID() : 0, buildUser->getGID()) + == -1) + throw SysError("cannot change ownership of '%1%'", chrootRootDir); + + /* Create a writable /tmp in the chroot. Many builders need + this. (Of course they should really respect $TMPDIR + instead.) */ + Path chrootTmpDir = chrootRootDir + "/tmp"; + createDirs(chrootTmpDir); + chmod_(chrootTmpDir, 01777); + + /* Create a /etc/passwd with entries for the build user and the + nobody account. The latter is kind of a hack to support + Samba-in-QEMU. */ + createDirs(chrootRootDir + "/etc"); + if (drvOptions.useUidRange(drv)) + chownToBuilder(chrootRootDir + "/etc"); + + if (drvOptions.useUidRange(drv) && (!buildUser || buildUser->getUIDCount() < 65536)) + throw Error("feature 'uid-range' requires the setting '%s' to be enabled", settings.autoAllocateUids.name); + + /* Declare the build user's group so that programs get a consistent + view of the system (e.g., "id -gn"). */ + writeFile( + chrootRootDir + "/etc/group", + fmt("root:x:0:\n" + "nixbld:!:%1%:\n" + "nogroup:x:65534:\n", + sandboxGid())); + + /* Create /etc/hosts with localhost entry. */ + if (derivationType.isSandboxed()) + writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); + + /* Make the closure of the inputs available in the chroot, + rather than the whole Nix store. This prevents any access + to undeclared dependencies. Directories are bind-mounted, + while other inputs are hard-linked (since only directories + can be bind-mounted). !!! As an extra security + precaution, make the fake Nix store only writable by the + build user. */ + Path chrootStoreDir = chrootRootDir + store.storeDir; + createDirs(chrootStoreDir); + chmod_(chrootStoreDir, 01775); + + if (buildUser && chown(chrootStoreDir.c_str(), 0, buildUser->getGID()) == -1) + throw SysError("cannot change ownership of '%1%'", chrootStoreDir); + + pathsInChroot = getPathsInSandbox(); + + for (auto & i : inputPaths) { + auto p = store.printStorePath(i); + pathsInChroot.insert_or_assign(p, store.toRealPath(p)); + } + + /* If we're repairing, checking or rebuilding part of a + multiple-outputs derivation, it's possible that we're + rebuilding a path that is in settings.sandbox-paths + (typically the dependencies of /bin/sh). Throw them + out. */ + for (auto & i : drv.outputsAndOptPaths(store)) { + /* If the name isn't known a priori (i.e. floating + content-addressing derivation), the temporary location we use + should be fresh. Freshness means it is impossible that the path + is already in the sandbox, so we don't need to worry about + removing it. */ + if (i.second.second) + pathsInChroot.erase(store.printStorePath(*i.second.second)); + } + } + + Strings getPreBuildHookArgs() override + { + assert(!chrootRootDir.empty()); + return Strings({store.printStorePath(drvPath), chrootRootDir}); + } + + Path realPathInSandbox(const Path & p) override + { + // FIXME: why the needsHashRewrite() conditional? + return !needsHashRewrite() ? chrootRootDir + p : store.toRealPath(p); + } + + void cleanupBuild() override + { + DerivationBuilderImpl::cleanupBuild(); + + /* Move paths out of the chroot for easier debugging of + build failures. */ + if (buildMode == bmNormal) + for (auto & [_, status] : initialOutputs) { + if (!status.known) + continue; + if (buildMode != bmCheck && status.known->isValid()) + continue; + auto p = store.toRealPath(status.known->path); + if (pathExists(chrootRootDir + p)) + std::filesystem::rename((chrootRootDir + p), p); + } + } + + std::pair addDependencyPrep(const StorePath & path) + { + DerivationBuilderImpl::addDependency(path); + + debug("materialising '%s' in the sandbox", store.printStorePath(path)); + + Path source = store.Store::toRealPath(path); + Path target = chrootRootDir + store.printStorePath(path); + + if (pathExists(target)) { + // There is a similar debug message in doBind, so only run it in this block to not have double messages. + debug("bind-mounting %s -> %s", target, source); + throw Error("store path '%s' already exists in the sandbox", store.printStorePath(path)); + } + + return {source, target}; + } +}; + +} // namespace nix + +#endif diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 7bb4f3177..038c844fa 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -302,12 +302,10 @@ public: void stopDaemon() override; -private: +protected: void addDependency(const StorePath & path) override; -protected: - /** * Make a file owned by the builder. * @@ -2159,6 +2157,7 @@ StorePath DerivationBuilderImpl::makeFallbackPath(const StorePath & path) } // namespace nix // FIXME: do this properly +#include "chroot-derivation-builder.cc" #include "linux-derivation-builder.cc" #include "darwin-derivation-builder.cc" @@ -2210,8 +2209,6 @@ std::unique_ptr makeDerivationBuilder( useSandbox = false; } - if (useSandbox) - return std::make_unique(store, std::move(miscMethods), std::move(params)); #endif if (!useSandbox && params.drvOptions.useUidRange(params.drv)) @@ -2220,6 +2217,9 @@ std::unique_ptr makeDerivationBuilder( #ifdef __APPLE__ return std::make_unique(store, std::move(miscMethods), std::move(params), useSandbox); #elif defined(__linux__) + if (useSandbox) + return std::make_unique(store, std::move(miscMethods), std::move(params)); + return std::make_unique(store, std::move(miscMethods), std::move(params)); #else if (useSandbox) diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index d56990d48..3e67cdd42 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -153,7 +153,7 @@ static void doBind(const Path & source, const Path & target, bool optional = fal } } -struct LinuxDerivationBuilder : DerivationBuilderImpl +struct LinuxDerivationBuilder : virtual DerivationBuilderImpl { using DerivationBuilderImpl::DerivationBuilderImpl; @@ -165,7 +165,7 @@ struct LinuxDerivationBuilder : DerivationBuilderImpl } }; -struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder +struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBuilder { /** * Pipe for synchronising updates to the builder namespaces. @@ -185,30 +185,17 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder */ bool usingUserNamespace = true; - /** - * The root of the chroot environment. - */ - Path chrootRootDir; - - /** - * RAII object to delete the chroot directory. - */ - std::shared_ptr autoDelChroot; - - PathsInChroot pathsInChroot; - /** * The cgroup of the builder, if any. */ std::optional cgroup; - using LinuxDerivationBuilder::LinuxDerivationBuilder; - - void deleteTmpDir(bool force) override + ChrootLinuxDerivationBuilder( + Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + : DerivationBuilderImpl{store, std::move(miscMethods), std::move(params)} + , ChrootDerivationBuilder{store, std::move(miscMethods), std::move(params)} + , LinuxDerivationBuilder{store, std::move(miscMethods), std::move(params)} { - autoDelChroot.reset(); /* this runs the destructor */ - - DerivationBuilderImpl::deleteTmpDir(force); } uid_t sandboxUid() @@ -216,14 +203,10 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 1000 : 0) : buildUser->getUID(); } - gid_t sandboxGid() + gid_t sandboxGid() override { - return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 100 : 0) : buildUser->getGID(); - } - - bool needsHashRewrite() override - { - return false; + return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 100 : 0) + : ChrootDerivationBuilder::sandboxGid(); } std::unique_ptr getBuildUser() override @@ -231,26 +214,6 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder return acquireUserLock(drvOptions.useUidRange(drv) ? 65536 : 1, true); } - void setBuildTmpDir() override - { - /* If sandboxing is enabled, put the actual TMPDIR underneath - an inaccessible root-owned directory, to prevent outside - access. - - On macOS, we don't use an actual chroot, so this isn't - possible. Any mitigation along these lines would have to be - done directly in the sandbox profile. */ - tmpDir = topTmpDir + "/build"; - createDir(tmpDir, 0700); - } - - Path tmpDirInSandbox() override - { - /* In a sandbox, for determinism, always use the same temporary - directory. */ - return settings.sandboxBuildDir; - } - void prepareUser() override { if ((buildUser && buildUser->getUIDCount() != 1) || settings.useCgroups) { @@ -298,97 +261,7 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder void prepareSandbox() override { - /* Create a temporary directory in which we set up the chroot - environment using bind-mounts. We put it in the Nix store - so that the build outputs can be moved efficiently from the - chroot to their final location. */ - auto chrootParentDir = store.Store::toRealPath(drvPath) + ".chroot"; - deletePath(chrootParentDir); - - /* Clean up the chroot directory automatically. */ - autoDelChroot = std::make_shared(chrootParentDir); - - printMsg(lvlChatty, "setting up chroot environment in '%1%'", chrootParentDir); - - if (mkdir(chrootParentDir.c_str(), 0700) == -1) - throw SysError("cannot create '%s'", chrootRootDir); - - chrootRootDir = chrootParentDir + "/root"; - - if (mkdir(chrootRootDir.c_str(), buildUser && buildUser->getUIDCount() != 1 ? 0755 : 0750) == -1) - throw SysError("cannot create '%1%'", chrootRootDir); - - if (buildUser - && chown( - chrootRootDir.c_str(), buildUser->getUIDCount() != 1 ? buildUser->getUID() : 0, buildUser->getGID()) - == -1) - throw SysError("cannot change ownership of '%1%'", chrootRootDir); - - /* Create a writable /tmp in the chroot. Many builders need - this. (Of course they should really respect $TMPDIR - instead.) */ - Path chrootTmpDir = chrootRootDir + "/tmp"; - createDirs(chrootTmpDir); - chmod_(chrootTmpDir, 01777); - - /* Create a /etc/passwd with entries for the build user and the - nobody account. The latter is kind of a hack to support - Samba-in-QEMU. */ - createDirs(chrootRootDir + "/etc"); - if (drvOptions.useUidRange(drv)) - chownToBuilder(chrootRootDir + "/etc"); - - if (drvOptions.useUidRange(drv) && (!buildUser || buildUser->getUIDCount() < 65536)) - throw Error("feature 'uid-range' requires the setting '%s' to be enabled", settings.autoAllocateUids.name); - - /* Declare the build user's group so that programs get a consistent - view of the system (e.g., "id -gn"). */ - writeFile( - chrootRootDir + "/etc/group", - fmt("root:x:0:\n" - "nixbld:!:%1%:\n" - "nogroup:x:65534:\n", - sandboxGid())); - - /* Create /etc/hosts with localhost entry. */ - if (derivationType.isSandboxed()) - writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); - - /* Make the closure of the inputs available in the chroot, - rather than the whole Nix store. This prevents any access - to undeclared dependencies. Directories are bind-mounted, - while other inputs are hard-linked (since only directories - can be bind-mounted). !!! As an extra security - precaution, make the fake Nix store only writable by the - build user. */ - Path chrootStoreDir = chrootRootDir + store.storeDir; - createDirs(chrootStoreDir); - chmod_(chrootStoreDir, 01775); - - if (buildUser && chown(chrootStoreDir.c_str(), 0, buildUser->getGID()) == -1) - throw SysError("cannot change ownership of '%1%'", chrootStoreDir); - - pathsInChroot = getPathsInSandbox(); - - for (auto & i : inputPaths) { - auto p = store.printStorePath(i); - pathsInChroot.insert_or_assign(p, store.toRealPath(p)); - } - - /* If we're repairing, checking or rebuilding part of a - multiple-outputs derivation, it's possible that we're - rebuilding a path that is in settings.sandbox-paths - (typically the dependencies of /bin/sh). Throw them - out. */ - for (auto & i : drv.outputsAndOptPaths(store)) { - /* If the name isn't known a priori (i.e. floating - content-addressing derivation), the temporary location we use - should be fresh. Freshness means it is impossible that the path - is already in the sandbox, so we don't need to worry about - removing it. */ - if (i.second.second) - pathsInChroot.erase(store.printStorePath(*i.second.second)); - } + ChrootDerivationBuilder::prepareSandbox(); if (cgroup) { if (mkdir(cgroup->c_str(), 0755) != 0) @@ -400,18 +273,6 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder } } - Strings getPreBuildHookArgs() override - { - assert(!chrootRootDir.empty()); - return Strings({store.printStorePath(drvPath), chrootRootDir}); - } - - Path realPathInSandbox(const Path & p) override - { - // FIXME: why the needsHashRewrite() conditional? - return !needsHashRewrite() ? chrootRootDir + p : store.toRealPath(p); - } - void startChild() override { /* Set up private namespaces for the build: @@ -820,41 +681,9 @@ struct ChrootLinuxDerivationBuilder : LinuxDerivationBuilder DerivationBuilderImpl::killSandbox(getStats); } - void cleanupBuild() override - { - DerivationBuilderImpl::cleanupBuild(); - - /* Move paths out of the chroot for easier debugging of - build failures. */ - if (buildMode == bmNormal) - for (auto & [_, status] : initialOutputs) { - if (!status.known) - continue; - if (buildMode != bmCheck && status.known->isValid()) - continue; - auto p = store.toRealPath(status.known->path); - if (pathExists(chrootRootDir + p)) - std::filesystem::rename((chrootRootDir + p), p); - } - } - void addDependency(const StorePath & path) override { - if (isAllowed(path)) - return; - - addedPaths.insert(path); - - debug("materialising '%s' in the sandbox", store.printStorePath(path)); - - Path source = store.Store::toRealPath(path); - Path target = chrootRootDir + store.printStorePath(path); - - if (pathExists(target)) { - // There is a similar debug message in doBind, so only run it in this block to not have double messages. - debug("bind-mounting %s -> %s", target, source); - throw Error("store path '%s' already exists in the sandbox", store.printStorePath(path)); - } + auto [source, target] = ChrootDerivationBuilder::addDependencyPrep(path); /* Bind-mount the path into the sandbox. This requires entering its mount namespace, which is not possible From 1cce358c40d4cc5b3d6128256cf281bdc7853367 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 7 Aug 2025 22:41:26 +0200 Subject: [PATCH 128/382] single-user-install: just call uname once --- scripts/install-nix-from-tarball.sh | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index 38fe3af74..87cdde622 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -26,8 +26,10 @@ if [ -z "$HOME" ]; then exit 1 fi +OS="$(uname -s)" + # macOS support for 10.12.6 or higher -if [ "$(uname -s)" = "Darwin" ]; then +if [ "$OS" = "Darwin" ]; then IFS='.' read -r macos_major macos_minor macos_patch << EOF $(sw_vers -productVersion) EOF @@ -39,11 +41,11 @@ EOF fi # Determine if we could use the multi-user installer or not -if [ "$(uname -s)" = "Linux" ]; then +if [ "$OS" = "Linux" ]; then echo "Note: a multi-user installation is possible. See https://nix.dev/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2 fi -case "$(uname -s)" in +case "$OS" in "Darwin") INSTALL_MODE=daemon;; *) @@ -60,7 +62,7 @@ while [ $# -gt 0 ]; do ACTION=install ;; --no-daemon) - if [ "$(uname -s)" = "Darwin" ]; then + if [ "$OS" = "Darwin" ]; then printf '\e[1;31mError: --no-daemon installs are no-longer supported on Darwin/macOS!\e[0m\n' >&2 exit 1 fi @@ -167,7 +169,7 @@ for i in $(cd "$self/store" >/dev/null && echo ./*); do rm -rf "$i_tmp" fi if ! [ -e "$dest/store/$i" ]; then - if [ "$(uname -s)" = "Darwin" ]; then + if [ "$OS" = "Darwin" ]; then cp -RPp "$self/store/$i" "$i_tmp" else cp -RP --preserve=ownership,timestamps "$self/store/$i" "$i_tmp" From ddd30349094aab896fcddfa9609ba0d8f862d381 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 7 Aug 2025 22:42:24 +0200 Subject: [PATCH 129/382] single-user-install: fix cp flags on freebsd --- scripts/install-nix-from-tarball.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index 87cdde622..70fd4897a 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -169,7 +169,7 @@ for i in $(cd "$self/store" >/dev/null && echo ./*); do rm -rf "$i_tmp" fi if ! [ -e "$dest/store/$i" ]; then - if [ "$OS" = "Darwin" ]; then + if [ "$OS" = "Darwin" ] || [ "$OS" = "FreeBSD" ]; then cp -RPp "$self/store/$i" "$i_tmp" else cp -RP --preserve=ownership,timestamps "$self/store/$i" "$i_tmp" From 143bd60136ed4d1e5843856152e58c00749b43bf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 8 Aug 2025 02:02:30 +0300 Subject: [PATCH 130/382] libutil: Make HashResult a proper struct This resolves an existing TODO and makes the code slightly more readable. --- src/libfetchers/git.cc | 2 +- src/libstore/binary-cache-store.cc | 10 +++---- src/libstore/export-import.cc | 2 +- src/libstore/local-store.cc | 26 +++++++++---------- src/libstore/make-content-addressed.cc | 2 +- src/libstore/optimise-store.cc | 4 +-- src/libstore/store-api.cc | 6 ++--- src/libstore/unix/build/derivation-builder.cc | 10 +++---- src/libutil-tests/git.cc | 2 +- src/libutil/file-content-address.cc | 2 +- src/libutil/git.cc | 2 +- src/libutil/hash.cc | 2 +- src/libutil/include/nix/util/hash.hh | 8 +++--- src/libutil/references.cc | 2 +- src/libutil/source-accessor.cc | 2 +- src/nix/hash.cc | 6 ++--- src/nix/nix-store/nix-store.cc | 14 ++++++---- src/nix/verify.cc | 4 +-- 18 files changed, 56 insertions(+), 50 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 1ab78c77b..4c2a655b9 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -901,7 +901,7 @@ struct GitInputScheme : InputScheme writeString(file.abs(), hashSink); } return makeFingerprint(*repoInfo.workdirInfo.headRev) - + ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false); + + ";d=" + hashSink.finish().hash.to_string(HashFormat::Base16, false); } return std::nullopt; } diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 5ac446639..276d1c78a 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -368,16 +368,16 @@ StorePath BinaryCacheStore::addToStoreFromDump( name, ContentAddressWithReferences::fromParts( hashMethod, - caHash ? *caHash : nar.first, + caHash ? *caHash : nar.hash, { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed // without modulus .self = false, }), - nar.first, + nar.hash, }; - info.narSize = nar.second; + info.narSize = nar.numBytesDigested; return info; }) ->path; @@ -493,9 +493,9 @@ StorePath BinaryCacheStore::addToStore( // without modulus .self = false, }), - nar.first, + nar.hash, }; - info.narSize = nar.second; + info.narSize = nar.numBytesDigested; return info; }) ->path; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index a199d9680..13444deb2 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -33,7 +33,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) /* Refuse to export paths that have changed. This prevents filesystem corruption from spreading to other machines. Don't complain if the stored hash is zero (unknown). */ - Hash hash = hashSink.currentHash().first; + Hash hash = hashSink.currentHash().hash; if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) throw Error( "hash of path '%s' has changed from '%s' to '%s'!", diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 49c499e3f..d8540be86 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1072,19 +1072,19 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF auto hashResult = hashSink.finish(); - if (hashResult.first != info.narHash) + if (hashResult.hash != info.narHash) throw Error( "hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), - hashResult.first.to_string(HashFormat::Nix32, true)); + hashResult.hash.to_string(HashFormat::Nix32, true)); - if (hashResult.second != info.narSize) + if (hashResult.numBytesDigested != info.narSize) throw Error( "size mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), info.narSize, - hashResult.second); + hashResult.numBytesDigested); if (info.ca) { auto & specified = *info.ca; @@ -1101,7 +1101,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF std::string{info.path.hashPart()}, }; dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim); - h = caSink.finish().first; + h = caSink.finish().hash; break; } case FileIngestionMethod::Git: @@ -1279,7 +1279,7 @@ StorePath LocalStore::addToStoreFromDump( /* For computing the nar hash. In recursive SHA-256 mode, this is the same as the store hash, so no need to do it again. */ - auto narHash = std::pair{dumpHash, size}; + HashResult narHash = {dumpHash, size}; if (dumpMethod != FileSerialisationMethod::NixArchive || hashAlgo != HashAlgorithm::SHA256) { HashSink narSink{HashAlgorithm::SHA256}; dumpPath(realPath, narSink); @@ -1295,8 +1295,8 @@ StorePath LocalStore::addToStoreFromDump( syncParent(realPath); } - ValidPathInfo info{*this, name, std::move(desc), narHash.first}; - info.narSize = narHash.second; + ValidPathInfo info{*this, name, std::move(desc), narHash.hash}; + info.narSize = narHash.numBytesDigested; registerValidPath(info); } @@ -1402,12 +1402,12 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) dumpPath(Store::toRealPath(i), hashSink); auto current = hashSink.finish(); - if (info->narHash != nullHash && info->narHash != current.first) { + if (info->narHash != nullHash && info->narHash != current.hash) { printError( "path '%s' was modified! expected hash '%s', got '%s'", printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), - current.first.to_string(HashFormat::Nix32, true)); + current.hash.to_string(HashFormat::Nix32, true)); if (repair) repairPath(i); else @@ -1419,14 +1419,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) /* Fill in missing hashes. */ if (info->narHash == nullHash) { printInfo("fixing missing hash on '%s'", printStorePath(i)); - info->narHash = current.first; + info->narHash = current.hash; update = true; } /* Fill in missing narSize fields (from old stores). */ if (info->narSize == 0) { - printInfo("updating size field on '%s' to %s", printStorePath(i), current.second); - info->narSize = current.second; + printInfo("updating size field on '%s' to %s", printStorePath(i), current.numBytesDigested); + info->narSize = current.numBytesDigested; update = true; } diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index 2de18fe83..831542943 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -43,7 +43,7 @@ std::map makeContentAddressed(Store & srcStore, Store & ds HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart); hashModuloSink(sink.s); - auto narModuloHash = hashModuloSink.finish().first; + auto narModuloHash = hashModuloSink.finish().hash; ValidPathInfo info{ dstStore, diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 8073ee41b..1cf28e022 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -160,7 +160,7 @@ void LocalStore::optimisePath_( {make_ref(), CanonPath(path)}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256) - .first; + .hash; }); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); @@ -175,7 +175,7 @@ void LocalStore::optimisePath_( PosixSourceAccessor::createAtRoot(linkPath), FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256) - .first; + .hash; }))) { // XXX: Consider overwriting linkPath with our valid version. warn("removing corrupted link %s", linkPath); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 468aeecf1..3e2a8e553 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -254,7 +254,7 @@ ValidPathInfo Store::addToStoreSlow( auto hash = method == ContentAddressMethod::Raw::NixArchive && hashAlgo == HashAlgorithm::SHA256 ? narHash : method == ContentAddressMethod::Raw::Git ? git::dumpHash(hashAlgo, srcPath).hash - : caHashSink.finish().first; + : caHashSink.finish().hash; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); @@ -1035,8 +1035,8 @@ decodeValidPathInfo(const Store & store, std::istream & str, std::optionalfirst); - info.narSize = hashGiven->second; + ValidPathInfo info(store.parseStorePath(path), hashGiven->hash); + info.narSize = hashGiven->numBytesDigested; std::string deriver; getline(str, deriver); if (deriver != "") diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 038c844fa..ed493b8f4 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1676,7 +1676,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() HashModuloSink caSink{outputHash.hashAlgo, oldHashPart}; auto fim = outputHash.method.getFileIngestionMethod(); dumpPath({getFSSourceAccessor(), CanonPath(actualPath)}, caSink, (FileSerialisationMethod) fim); - return caSink.finish().first; + return caSink.finish().hash; } case FileIngestionMethod::Git: { return git::dumpHash(outputHash.hashAlgo, {getFSSourceAccessor(), CanonPath(actualPath)}).hash; @@ -1705,8 +1705,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() {getFSSourceAccessor(), CanonPath(actualPath)}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); - newInfo0.narHash = narHashAndSize.first; - newInfo0.narSize = narHashAndSize.second; + newInfo0.narHash = narHashAndSize.hash; + newInfo0.narSize = narHashAndSize.numBytesDigested; } assert(newInfo0.ca); @@ -1729,8 +1729,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() {getFSSourceAccessor(), CanonPath(actualPath)}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); - ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.first}; - newInfo0.narSize = narHashAndSize.second; + ValidPathInfo newInfo0{requiredFinalPath, narHashAndSize.hash}; + newInfo0.narSize = narHashAndSize.numBytesDigested; auto refs = rewriteRefs(); newInfo0.references = std::move(refs.others); if (refs.self) diff --git a/src/libutil-tests/git.cc b/src/libutil-tests/git.cc index d9926b9b6..6180a4cfc 100644 --- a/src/libutil-tests/git.cc +++ b/src/libutil-tests/git.cc @@ -270,7 +270,7 @@ TEST_F(GitTest, both_roundrip) HashSink hashSink{hashAlgo}; TeeSink s2{s, hashSink}; auto mode = dump(path, s2, dumpHook, defaultPathFilter, mockXpSettings); - auto hash = hashSink.finish().first; + auto hash = hashSink.finish().hash; cas.insert_or_assign(hash, std::move(s.s)); return TreeEntry{ .mode = mode, diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc index be381abfd..df1b09f6e 100644 --- a/src/libutil/file-content-address.cc +++ b/src/libutil/file-content-address.cc @@ -101,7 +101,7 @@ hashPath(const SourcePath & path, FileIngestionMethod method, HashAlgorithm ht, case FileIngestionMethod::Flat: case FileIngestionMethod::NixArchive: { auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter); - return {res.first, {res.second}}; + return {res.hash, res.numBytesDigested}; } case FileIngestionMethod::Git: return {git::dumpHash(ht, path, filter).hash, std::nullopt}; diff --git a/src/libutil/git.cc b/src/libutil/git.cc index bee354da4..b17fdf145 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -329,7 +329,7 @@ TreeEntry dumpHash(HashAlgorithm ha, const SourcePath & path, PathFilter & filte hook = [&](const SourcePath & path) -> TreeEntry { auto hashSink = HashSink(ha); auto mode = dump(path, hashSink, hook, filter); - auto hash = hashSink.finish().first; + auto hash = hashSink.finish().hash; return { .mode = mode, .hash = hash, diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index de2fc5a48..fe7e9ab3b 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -338,7 +338,7 @@ Hash hashFile(HashAlgorithm ha, const Path & path) { HashSink sink(ha); readFile(path, sink); - return sink.finish().first; + return sink.finish().hash; } HashSink::HashSink(HashAlgorithm ha) diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index ea9c71ac7..f5c01d2e1 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -153,10 +153,12 @@ Hash hashFile(HashAlgorithm ha, const Path & path); /** * The final hash and the number of bytes digested. - * - * @todo Convert to proper struct */ -typedef std::pair HashResult; +struct HashResult +{ + Hash hash; + uint64_t numBytesDigested; +}; /** * Compress a hash to the specified number of bytes by cyclically diff --git a/src/libutil/references.cc b/src/libutil/references.cc index 0f5164f6b..42076acff 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -120,7 +120,7 @@ HashResult HashModuloSink::finish() hashSink(fmt("|%d", pos)); auto h = hashSink.finish(); - return {h.first, rewritingSink.pos}; + return {.hash = h.hash, .numBytesDigested = rewritingSink.pos}; } } // namespace nix diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc index 9a0625828..3c2d65829 100644 --- a/src/libutil/source-accessor.cc +++ b/src/libutil/source-accessor.cc @@ -65,7 +65,7 @@ Hash SourceAccessor::hashPath(const CanonPath & path, PathFilter & filter, HashA { HashSink sink(ha); dumpPath(path, sink, filter); - return sink.finish().first; + return sink.finish().hash; } SourceAccessor::Stat SourceAccessor::lstat(const CanonPath & path) diff --git a/src/nix/hash.cc b/src/nix/hash.cc index cc62aeb86..9858386c5 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -100,14 +100,14 @@ struct CmdHashBase : Command // so we don't need to go low-level, or reject symlink `path`s. auto hashSink = makeSink(); readFile(path, *hashSink); - h = hashSink->finish().first; + h = hashSink->finish().hash; break; } case FileIngestionMethod::NixArchive: { auto sourcePath = makeSourcePath(); auto hashSink = makeSink(); dumpPath(sourcePath, *hashSink, (FileSerialisationMethod) mode); - h = hashSink->finish().first; + h = hashSink->finish().hash; break; } case FileIngestionMethod::Git: { @@ -116,7 +116,7 @@ struct CmdHashBase : Command hook = [&](const SourcePath & path) -> git::TreeEntry { auto hashSink = makeSink(); auto mode = dump(path, *hashSink, hook); - auto hash = hashSink->finish().first; + auto hash = hashSink->finish().hash; return { .mode = mode, .hash = hash, diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 5ada44949..93fe4df45 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -582,7 +582,11 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) while (1) { // We use a dummy value because we'll set it below. FIXME be correct by // construction and avoid dummy value. - auto hashResultOpt = !hashGiven ? std::optional{{Hash::dummy, -1}} : std::nullopt; + auto hashResultOpt = !hashGiven ? std::optional{{ + Hash::dummy, + std::numeric_limits::max(), + }} + : std::nullopt; auto info = decodeValidPathInfo(*store, cin, hashResultOpt); if (!info) break; @@ -599,8 +603,8 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) {store->getFSAccessor(false), CanonPath{info->path.to_string()}}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); - info->narHash = hash.first; - info->narSize = hash.second; + info->narHash = hash.hash; + info->narSize = hash.numBytesDigested; } infos.insert_or_assign(info->path, *info); } @@ -836,12 +840,12 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) HashSink sink(info->narHash.algo); store->narFromPath(path, sink); auto current = sink.finish(); - if (current.first != info->narHash) { + if (current.hash != info->narHash) { printError( "path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), - current.first.to_string(HashFormat::Nix32, true)); + current.hash.to_string(HashFormat::Nix32, true)); status = 1; } } diff --git a/src/nix/verify.cc b/src/nix/verify.cc index d5e9ab0d3..309d19a1d 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -103,14 +103,14 @@ struct CmdVerify : StorePathsCommand auto hash = hashSink.finish(); - if (hash.first != info->narHash) { + if (hash.hash != info->narHash) { corrupted++; act2.result(resCorruptedPath, store->printStorePath(info->path)); printError( "path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), info->narHash.to_string(HashFormat::Nix32, true), - hash.first.to_string(HashFormat::Nix32, true)); + hash.hash.to_string(HashFormat::Nix32, true)); } } From bf320465ae7951b78387481f68fb0b72b3ad785e Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Thu, 24 Jul 2025 20:42:40 -0700 Subject: [PATCH 131/382] Make functional tests depend on nix binary so they auto recompile With this I'm able to do a fresh config + meson test with all dependencies correctly propagated. Co-authored-by: Sergei Zimmerman --- src/nix/meson.build | 4 +++- src/perl/lib/Nix/meson.build | 3 ++- src/perl/meson.build | 2 +- src/perl/t/meson.build | 4 ++-- tests/functional/meson.build | 14 +++++++++++++- 5 files changed, 21 insertions(+), 6 deletions(-) diff --git a/src/nix/meson.build b/src/nix/meson.build index c6dc9c530..e17b39f98 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -211,6 +211,7 @@ if host_machine.system() == 'windows' executable_suffix = '.exe' endif +nix_symlinks_targets = [] foreach linkname : nix_symlinks install_symlink( linkname + executable_suffix, @@ -220,7 +221,7 @@ foreach linkname : nix_symlinks # The 'runtime' tag is what executables default to, which we want to emulate here. install_tag : 'runtime', ) - custom_target( + symlink_target = custom_target( command : [ 'ln', '-sf', fs.name(this_exe), '@OUTPUT@' ], output : linkname + executable_suffix, # native doesn't allow dangling symlinks, but the target executable often doesn't exist at this time @@ -230,6 +231,7 @@ foreach linkname : nix_symlinks ) # TODO(Ericson3214): Doesn't yet work #meson.override_find_program(linkname, t) + nix_symlinks_targets += symlink_target endforeach install_symlink( diff --git a/src/perl/lib/Nix/meson.build b/src/perl/lib/Nix/meson.build index 5f8baee69..dd5560e21 100644 --- a/src/perl/lib/Nix/meson.build +++ b/src/perl/lib/Nix/meson.build @@ -16,8 +16,9 @@ nix_perl_scripts = files( 'Utils.pm', ) +nix_perl_scripts_copy_tgts = [] foreach f : nix_perl_scripts - fs.copyfile(f) + nix_perl_scripts_copy_tgts += fs.copyfile(f) endforeach diff --git a/src/perl/meson.build b/src/perl/meson.build index f33291051..59f2a66b8 100644 --- a/src/perl/meson.build +++ b/src/perl/meson.build @@ -191,6 +191,6 @@ if get_option('tests').enabled() yath, args : [ 'test' ], workdir : meson.current_build_dir(), - depends : [ nix_perl_store_lib ], + depends : [ nix_perl_store_lib ] + nix_perl_tests_copy_tgts + nix_perl_scripts_copy_tgts, ) endif diff --git a/src/perl/t/meson.build b/src/perl/t/meson.build index dbd1139f3..f95bee2ff 100644 --- a/src/perl/t/meson.build +++ b/src/perl/t/meson.build @@ -9,7 +9,7 @@ nix_perl_tests = files( 'init.t', ) - +nix_perl_tests_copy_tgts = [] foreach f : nix_perl_tests - fs.copyfile(f) + nix_perl_tests_copy_tgts += fs.copyfile(f) endforeach diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 85373a70a..e501aa102 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -223,6 +223,18 @@ subdir('local-overlay-store') foreach suite : suites workdir = suite['workdir'] suite_name = suite['name'] + # This is workaround until [1] is resolved. When building in a devshell + # as a subproject we want the tests to depend on the nix build target, so + # that it gets automatically rebuilt. + # However, when the functional test suite is built separately (via componentized + # builds or in NixOS tests) we can't depend on the nix executable, since it's + # an external program. The following is a simple heuristic that suffices for now. + # [1]: https://github.com/mesonbuild/meson/issues/13877 + deps = suite['deps'] + if meson.is_subproject() + nix_subproject = subproject('nix') + deps += [ nix ] + nix_subproject.get_variable('nix_symlinks_targets') + endif foreach script : suite['tests'] # Turns, e.g., `tests/functional/flakes/show.sh` into a Meson test target called # `functional-flakes-show`. @@ -252,7 +264,7 @@ foreach suite : suites # them more time than the default of 30 seconds. timeout : 300, # Used for target dependency/ordering tracking, not adding compiler flags or anything. - depends : suite['deps'], + depends : deps, workdir : workdir, ) endforeach From 73d09e67a7e79c1bad57f2eddd51b7f00e0eb374 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 8 Aug 2025 08:02:37 +0200 Subject: [PATCH 132/382] add rc.d script for the nix-daemon --- misc/freebsd/meson.build | 10 ++++++++ misc/freebsd/nix-daemon.in | 49 ++++++++++++++++++++++++++++++++++++++ misc/meson.build | 4 ++++ 3 files changed, 63 insertions(+) create mode 100644 misc/freebsd/meson.build create mode 100644 misc/freebsd/nix-daemon.in diff --git a/misc/freebsd/meson.build b/misc/freebsd/meson.build new file mode 100644 index 000000000..d94149883 --- /dev/null +++ b/misc/freebsd/meson.build @@ -0,0 +1,10 @@ +configure_file( + input : 'nix-daemon.in', + output : 'nix-daemon', + install : true, + install_dir : get_option('prefix') / 'etc/rc.d', + install_mode : 'rwxr-xr-x', + configuration : { + 'bindir' : bindir, + }, +) diff --git a/misc/freebsd/nix-daemon.in b/misc/freebsd/nix-daemon.in new file mode 100644 index 000000000..9eb269850 --- /dev/null +++ b/misc/freebsd/nix-daemon.in @@ -0,0 +1,49 @@ +#!/bin/sh +# +# PROVIDE: nix_daemon +# REQUIRE: DAEMON +# KEYWORD: shutdown +# +# Add the following lines to /etc/rc.conf to enable nix-daemon: +# +# nix_daemon_enable="YES" +# + +# shellcheck source=/dev/null +. /etc/rc.subr + +name="nix_daemon" +# shellcheck disable=SC2034 +rcvar="nix_daemon_enable" + +load_rc_config $name + +: "${nix_daemon_enable:=NO}" + +command="@bindir@/nix-daemon" +command_args="" +pidfile="/var/run/nix-daemon.pid" + +# shellcheck disable=SC2034 +start_cmd="${name}_start" +# shellcheck disable=SC2034 +stop_cmd="${name}_stop" + +nix_daemon_start() { + echo "Starting ${name}." + # command_args is intentionally unquoted to allow multiple arguments + # shellcheck disable=SC2086 + /usr/sbin/daemon -c -f -p "${pidfile}" "${command}" ${command_args} +} + +nix_daemon_stop() { + if [ -f "${pidfile}" ]; then + echo "Stopping ${name}." + kill -TERM "$(cat "${pidfile}")" + rm -f "${pidfile}" + else + echo "${name} is not running." + fi +} + +run_rc_command "$1" \ No newline at end of file diff --git a/misc/meson.build b/misc/meson.build index 82f2b0c65..fc2bca07f 100644 --- a/misc/meson.build +++ b/misc/meson.build @@ -9,3 +9,7 @@ endif if host_machine.system() == 'darwin' subdir('launchd') endif + +if host_machine.system() == 'freebsd' + subdir('freebsd') +endif From 11d03893f81dc2399ea511e059d8d98676d7d0d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Thu, 7 Aug 2025 23:51:21 +0200 Subject: [PATCH 133/382] add freebsd multi-user installer --- packaging/binary-tarball.nix | 6 + scripts/install-freebsd-multi-user.sh | 173 ++++++++++++++++++++++++++ scripts/install-multi-user.sh | 28 ++++- scripts/install-nix-from-tarball.sh | 9 +- 4 files changed, 213 insertions(+), 3 deletions(-) create mode 100644 scripts/install-freebsd-multi-user.sh diff --git a/packaging/binary-tarball.nix b/packaging/binary-tarball.nix index 2050384b0..86aae0ac5 100644 --- a/packaging/binary-tarball.nix +++ b/packaging/binary-tarball.nix @@ -37,6 +37,9 @@ runCommand "nix-binary-tarball-${version}" env '' substitute ${../scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} + substitute ${../scripts/install-freebsd-multi-user.sh} $TMPDIR/install-freebsd-multi-user.sh \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} substitute ${../scripts/install-multi-user.sh} $TMPDIR/install-multi-user \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} @@ -48,6 +51,7 @@ runCommand "nix-binary-tarball-${version}" env '' shellcheck $TMPDIR/create-darwin-volume.sh shellcheck $TMPDIR/install-darwin-multi-user.sh shellcheck $TMPDIR/install-systemd-multi-user.sh + shellcheck $TMPDIR/install-freebsd-multi-user.sh # SC1091: Don't panic about not being able to source # /etc/profile @@ -64,6 +68,7 @@ runCommand "nix-binary-tarball-${version}" env '' chmod +x $TMPDIR/create-darwin-volume.sh chmod +x $TMPDIR/install-darwin-multi-user.sh chmod +x $TMPDIR/install-systemd-multi-user.sh + chmod +x $TMPDIR/install-freebsd-multi-user.sh chmod +x $TMPDIR/install-multi-user dir=nix-${version}-${system} fn=$out/$dir.tar.xz @@ -82,6 +87,7 @@ runCommand "nix-binary-tarball-${version}" env '' $TMPDIR/create-darwin-volume.sh \ $TMPDIR/install-darwin-multi-user.sh \ $TMPDIR/install-systemd-multi-user.sh \ + $TMPDIR/install-freebsd-multi-user.sh \ $TMPDIR/install-multi-user \ $TMPDIR/reginfo \ $(cat ${installerClosureInfo}/store-paths) diff --git a/scripts/install-freebsd-multi-user.sh b/scripts/install-freebsd-multi-user.sh new file mode 100644 index 000000000..0d8b85ec4 --- /dev/null +++ b/scripts/install-freebsd-multi-user.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash + +set -eu +set -o pipefail + +# System specific settings +# FreeBSD typically uses UIDs from 1001+ for regular users, +# so we'll use a range that's unlikely to conflict +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" +export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" +export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" + +# FreeBSD service paths +readonly SERVICE_SRC=/etc/rc.d/nix-daemon +readonly SERVICE_DEST=/usr/local/etc/rc.d/nix-daemon + +poly_cure_artifacts() { + : +} + +poly_service_installed_check() { + if [ -f "$SERVICE_DEST" ]; then + return 0 + else + return 1 + fi +} + +poly_service_uninstall_directions() { + cat < /dev/null 2>&1 +} + +poly_group_id_get() { + pw group show "$1" | cut -d: -f3 +} + +poly_create_build_group() { + _sudo "Create the Nix build group, $NIX_BUILD_GROUP_NAME" \ + pw groupadd -n "$NIX_BUILD_GROUP_NAME" -g "$NIX_BUILD_GROUP_ID" >&2 +} + +poly_user_exists() { + pw user show "$1" > /dev/null 2>&1 +} + +poly_user_id_get() { + pw user show "$1" | cut -d: -f3 +} + +poly_user_hidden_get() { + # FreeBSD doesn't have a concept of hidden users like macOS + echo "0" +} + +poly_user_hidden_set() { + # No-op on FreeBSD + true +} + +poly_user_home_get() { + pw user show "$1" | cut -d: -f9 +} + +poly_user_home_set() { + _sudo "in order to give $1 a safe home directory" \ + pw usermod -n "$1" -d "$2" +} + +poly_user_note_get() { + pw user show "$1" | cut -d: -f8 +} + +poly_user_note_set() { + _sudo "in order to give $1 a useful comment" \ + pw usermod -n "$1" -c "$2" +} + +poly_user_shell_get() { + pw user show "$1" | cut -d: -f10 +} + +poly_user_shell_set() { + _sudo "in order to prevent $1 from logging in" \ + pw usermod -n "$1" -s "$2" +} + +poly_user_in_group_check() { + groups "$1" 2>/dev/null | grep -q "\<$2\>" +} + +poly_user_in_group_set() { + _sudo "Add $1 to the $2 group" \ + pw groupmod -n "$2" -m "$1" +} + +poly_user_primary_group_get() { + pw user show "$1" | cut -d: -f4 +} + +poly_user_primary_group_set() { + _sudo "to let the nix daemon use this user for builds" \ + pw usermod -n "$1" -g "$2" +} + +poly_create_build_user() { + username=$1 + uid=$2 + builder_num=$3 + + _sudo "Creating the Nix build user, $username" \ + pw useradd \ + -n "$username" \ + -u "$uid" \ + -g "$NIX_BUILD_GROUP_NAME" \ + -G "$NIX_BUILD_GROUP_NAME" \ + -d /var/empty \ + -s /sbin/nologin \ + -c "Nix build user $builder_num" +} + +poly_prepare_to_install() { + # FreeBSD-specific preparation steps + : +} + +poly_configure_default_profile_targets() { + # FreeBSD-specific profile locations + # FreeBSD uses /usr/local/etc for third-party shell configurations + # Include both profile (for login shells) and bashrc (for interactive shells) + echo "/usr/local/etc/profile /usr/local/etc/bashrc /usr/local/etc/profile.d/nix.sh /usr/local/etc/zshrc" +} diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index e9ddfc014..477eb1fd6 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -33,7 +33,8 @@ readonly NIX_BUILD_GROUP_NAME="nixbld" readonly NIX_ROOT="/nix" readonly NIX_EXTRA_CONF=${NIX_EXTRA_CONF:-} -readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/etc/bash.bashrc" "/etc/zsh/zshrc") +# PROFILE_TARGETS will be set later after OS-specific scripts are loaded +PROFILE_TARGETS=() readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" @@ -99,6 +100,14 @@ is_os_darwin() { fi } +is_os_freebsd() { + if [ "$(uname -s)" = "FreeBSD" ]; then + return 0 + else + return 1 + fi +} + contact_us() { echo "You can open an issue at" echo "https://github.com/NixOS/nix/issues/new?labels=installer&template=installer.md" @@ -498,6 +507,10 @@ You have aborted the installation. EOF fi fi + + if is_os_freebsd; then + ok "Detected FreeBSD, will set up rc.d service for nix-daemon" + fi } setup_report() { @@ -834,7 +847,7 @@ install_from_extracted_nix() { ( cd "$EXTRACTED_NIX_PATH" - if is_os_darwin; then + if is_os_darwin || is_os_freebsd; then _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ cp -RPp ./store/* "$NIX_ROOT/store/" else @@ -989,11 +1002,22 @@ main() { # shellcheck source=./install-systemd-multi-user.sh . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also check_required_system_specific_settings "install-systemd-multi-user.sh" + elif is_os_freebsd; then + # shellcheck source=./install-freebsd-multi-user.sh + . "$EXTRACTED_NIX_PATH/install-freebsd-multi-user.sh" + check_required_system_specific_settings "install-freebsd-multi-user.sh" else failure "Sorry, I don't know what to do on $(uname)" fi + # Set profile targets after OS-specific scripts are loaded + if command -v poly_configure_default_profile_targets > /dev/null 2>&1; then + PROFILE_TARGETS=($(poly_configure_default_profile_targets)) + else + PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/etc/bash.bashrc" "/etc/zsh/zshrc") + fi + welcome_to_nix if ! is_root; then diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index 70fd4897a..fd00460ec 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -41,7 +41,7 @@ EOF fi # Determine if we could use the multi-user installer or not -if [ "$OS" = "Linux" ]; then +if [ "$OS" = "Linux" ] || [ "$OS" = "FreeBSD" ]; then echo "Note: a multi-user installation is possible. See https://nix.dev/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2 fi @@ -125,6 +125,13 @@ while [ $# -gt 0 ]; do done if [ "$INSTALL_MODE" = "daemon" ]; then + # Check for bash on systems that don't have it by default + if [ "$OS" = "FreeBSD" ] && ! command -v bash >/dev/null 2>&1; then + printf '\e[1;31mError: bash is required for multi-user installation but was not found.\e[0m\n' >&2 + printf 'Please install bash first:\n' >&2 + printf ' pkg install bash\n' >&2 + exit 1 + fi printf '\e[1;31mSwitching to the Multi-user Installer\e[0m\n' exec "$self/install-multi-user" $ACTION exit 0 From e74cfc52fe4ce1352e5fa885c275764d2c6233a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 8 Aug 2025 08:37:59 +0200 Subject: [PATCH 134/382] docs/uninstall: add instructions for freebsd --- doc/manual/source/installation/uninstall.md | 32 +++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/doc/manual/source/installation/uninstall.md b/doc/manual/source/installation/uninstall.md index 8d45da6bb..69d59847b 100644 --- a/doc/manual/source/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -41,6 +41,38 @@ There may also be references to Nix in which you may remove. +### FreeBSD + +1. Stop and remove the Nix daemon service: + + ```console + sudo service nix-daemon stop + sudo rm -f /usr/local/etc/rc.d/nix-daemon + sudo sysrc -x nix_daemon_enable + ``` + +2. Remove files created by Nix: + + ```console + sudo rm -rf /etc/nix /usr/local/etc/profile.d/nix.sh /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile ~root/.cache/nix + ``` + +3. Remove build users and their group: + + ```console + for i in $(seq 1 32); do + sudo pw userdel nixbld$i + done + sudo pw groupdel nixbld + ``` + +4. There may also be references to Nix in: + - `/usr/local/etc/bashrc` + - `/usr/local/etc/zshrc` + - Shell configuration files in users' home directories + + which you may remove. + ### macOS > **Updating to macOS 15 Sequoia** From 1b4aa5c1ef7d3a04ca4ec01a98306a180636cfc8 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 8 Aug 2025 01:20:50 +0300 Subject: [PATCH 135/382] libstore: Remove unused overload of scanForReferences This doesn't seem to be used anywhere at the moment. It might be used out-of-tree, but this is a small convenience function that is not worth keeping without in-tree usage. --- src/libstore/include/nix/store/path-references.hh | 2 -- src/libstore/path-references.cc | 8 -------- 2 files changed, 10 deletions(-) diff --git a/src/libstore/include/nix/store/path-references.hh b/src/libstore/include/nix/store/path-references.hh index fad1e57a3..55535f666 100644 --- a/src/libstore/include/nix/store/path-references.hh +++ b/src/libstore/include/nix/store/path-references.hh @@ -6,8 +6,6 @@ namespace nix { -std::pair scanForReferences(const Path & path, const StorePathSet & refs); - StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs); class PathRefScanSink : public RefScanSink diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index 2c71f437f..8b167e902 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -43,14 +43,6 @@ StorePathSet PathRefScanSink::getResultPaths() return found; } -std::pair scanForReferences(const std::string & path, const StorePathSet & refs) -{ - HashSink hashSink{HashAlgorithm::SHA256}; - auto found = scanForReferences(hashSink, path, refs); - auto hash = hashSink.finish(); - return std::pair(found, hash); -} - StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs) { PathRefScanSink refsSink = PathRefScanSink::fromPaths(refs); From 2e3ebfb82922cc19f1966a6d3a60272564fb811f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 8 Aug 2025 01:35:34 +0300 Subject: [PATCH 136/382] libutil: Move references.{hh,cc} to libstore The implicit dependency on refLength (which is the StorePath::HashLen) is not good. Also the companion tests and benchmarks are already in libstore-tests. --- src/libstore-tests/ref-scan-bench.cc | 2 +- src/libstore-tests/references.cc | 39 +++++++++++++++- src/libstore/include/nix/store/meson.build | 1 + .../include/nix/store/path-references.hh | 2 +- .../include/nix/store}/references.hh | 0 src/libstore/local-store.cc | 2 +- src/libstore/make-content-addressed.cc | 2 +- src/libstore/meson.build | 1 + src/{libutil => libstore}/references.cc | 6 +-- src/libstore/store-api.cc | 2 - src/libutil-tests/meson.build | 1 - src/libutil-tests/references.cc | 45 ------------------- src/libutil/include/nix/util/meson.build | 1 - src/libutil/meson.build | 1 - src/nix/hash.cc | 2 +- 15 files changed, 48 insertions(+), 59 deletions(-) rename src/{libutil/include/nix/util => libstore/include/nix/store}/references.hh (100%) rename src/{libutil => libstore}/references.cc (96%) delete mode 100644 src/libutil-tests/references.cc diff --git a/src/libstore-tests/ref-scan-bench.cc b/src/libstore-tests/ref-scan-bench.cc index 011d53aec..ff0aa1815 100644 --- a/src/libstore-tests/ref-scan-bench.cc +++ b/src/libstore-tests/ref-scan-bench.cc @@ -1,4 +1,4 @@ -#include "nix/util/references.hh" +#include "nix/store/references.hh" #include "nix/store/path.hh" #include "nix/util/base-nix-32.hh" diff --git a/src/libstore-tests/references.cc b/src/libstore-tests/references.cc index c7b706c68..27ecad08f 100644 --- a/src/libstore-tests/references.cc +++ b/src/libstore-tests/references.cc @@ -1,9 +1,46 @@ -#include "nix/util/references.hh" +#include "nix/store/references.hh" #include namespace nix { +struct RewriteParams +{ + std::string originalString, finalString; + StringMap rewrites; + + friend std::ostream & operator<<(std::ostream & os, const RewriteParams & bar) + { + StringSet strRewrites; + for (auto & [from, to] : bar.rewrites) + strRewrites.insert(from + "->" + to); + return os << "OriginalString: " << bar.originalString << std::endl + << "Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl + << "Expected result: " << bar.finalString; + } +}; + +class RewriteTest : public ::testing::TestWithParam +{}; + +TEST_P(RewriteTest, IdentityRewriteIsIdentity) +{ + RewriteParams param = GetParam(); + StringSink rewritten; + auto rewriter = RewritingSink(param.rewrites, rewritten); + rewriter(param.originalString); + rewriter.flush(); + ASSERT_EQ(rewritten.s, param.finalString); +} + +INSTANTIATE_TEST_CASE_P( + references, + RewriteTest, + ::testing::Values( + RewriteParams{"foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}}, + RewriteParams{"foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}}, + RewriteParams{"foooo", "foooo", {}})); + TEST(references, scan) { std::string hash1 = "dc04vv14dak1c1r48qa0m23vr9jy8sm0"; diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index e883a89e4..e41a7da4d 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -62,6 +62,7 @@ headers = [ config_pub_h ] + files( 'posix-fs-canonicalise.hh', 'profiles.hh', 'realisation.hh', + 'references.hh', 'remote-fs-accessor.hh', 'remote-store-connection.hh', 'remote-store.hh', diff --git a/src/libstore/include/nix/store/path-references.hh b/src/libstore/include/nix/store/path-references.hh index 55535f666..66d0da268 100644 --- a/src/libstore/include/nix/store/path-references.hh +++ b/src/libstore/include/nix/store/path-references.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "nix/util/references.hh" +#include "nix/store/references.hh" #include "nix/store/path.hh" namespace nix { diff --git a/src/libutil/include/nix/util/references.hh b/src/libstore/include/nix/store/references.hh similarity index 100% rename from src/libutil/include/nix/util/references.hh rename to src/libstore/include/nix/store/references.hh diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index d8540be86..685402cfe 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -7,7 +7,7 @@ #include "nix/store/derivations.hh" #include "nix/store/realisation.hh" #include "nix/store/nar-info.hh" -#include "nix/util/references.hh" +#include "nix/store/references.hh" #include "nix/util/callback.hh" #include "nix/util/topo-sort.hh" #include "nix/util/finally.hh" diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index 831542943..ce4a36849 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -1,5 +1,5 @@ #include "nix/store/make-content-addressed.hh" -#include "nix/util/references.hh" +#include "nix/store/references.hh" namespace nix { diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 0b6471af3..ad76582d8 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -316,6 +316,7 @@ sources = files( 'posix-fs-canonicalise.cc', 'profiles.cc', 'realisation.cc', + 'references.cc', 'remote-fs-accessor.cc', 'remote-store.cc', 'restricted-store.cc', diff --git a/src/libutil/references.cc b/src/libstore/references.cc similarity index 96% rename from src/libutil/references.cc rename to src/libstore/references.cc index 42076acff..1620af26e 100644 --- a/src/libutil/references.cc +++ b/src/libstore/references.cc @@ -1,6 +1,6 @@ -#include "nix/util/references.hh" +#include "nix/store/references.hh" +#include "nix/store/path.hh" #include "nix/util/hash.hh" -#include "nix/util/archive.hh" #include "nix/util/base-nix-32.hh" #include @@ -10,7 +10,7 @@ namespace nix { -static size_t refLength = 32; /* characters */ +static constexpr auto refLength = StorePath::HashLen; static void search(std::string_view s, StringSet & hashes, StringSet & seen) { diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 3e2a8e553..b678833e6 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -9,7 +9,6 @@ #include "nix/util/util.hh" #include "nix/store/nar-info-disk-cache.hh" #include "nix/util/thread-pool.hh" -#include "nix/util/references.hh" #include "nix/util/archive.hh" #include "nix/util/callback.hh" #include "nix/util/git.hh" @@ -18,7 +17,6 @@ // `addMultipleToStore`. #include "nix/store/worker-protocol.hh" #include "nix/util/signals.hh" -#include "nix/util/users.hh" #include #include diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 2dbb4d129..e4a060d3e 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -66,7 +66,6 @@ sources = files( 'pool.cc', 'position.cc', 'processes.cc', - 'references.cc', 'sort.cc', 'spawn.cc', 'strings.cc', diff --git a/src/libutil-tests/references.cc b/src/libutil-tests/references.cc deleted file mode 100644 index b76db67cf..000000000 --- a/src/libutil-tests/references.cc +++ /dev/null @@ -1,45 +0,0 @@ -#include "nix/util/references.hh" -#include - -namespace nix { - -using std::string; - -struct RewriteParams -{ - string originalString, finalString; - StringMap rewrites; - - friend std::ostream & operator<<(std::ostream & os, const RewriteParams & bar) - { - StringSet strRewrites; - for (auto & [from, to] : bar.rewrites) - strRewrites.insert(from + "->" + to); - return os << "OriginalString: " << bar.originalString << std::endl - << "Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl - << "Expected result: " << bar.finalString; - } -}; - -class RewriteTest : public ::testing::TestWithParam -{}; - -TEST_P(RewriteTest, IdentityRewriteIsIdentity) -{ - RewriteParams param = GetParam(); - StringSink rewritten; - auto rewriter = RewritingSink(param.rewrites, rewritten); - rewriter(param.originalString); - rewriter.flush(); - ASSERT_EQ(rewritten.s, param.finalString); -} - -INSTANTIATE_TEST_CASE_P( - references, - RewriteTest, - ::testing::Values( - RewriteParams{"foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}}, - RewriteParams{"foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}}, - RewriteParams{"foooo", "foooo", {}})); - -} // namespace nix diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index bc58b4d5e..bdf114259 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -55,7 +55,6 @@ headers = files( 'posix-source-accessor.hh', 'processes.hh', 'ref.hh', - 'references.hh', 'regex-combinators.hh', 'repair-flag.hh', 'serialise.hh', diff --git a/src/libutil/meson.build b/src/libutil/meson.build index afdddc6b5..ffd1ebd49 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -145,7 +145,6 @@ sources = [ config_priv_h ] + files( 'pos-table.cc', 'position.cc', 'posix-source-accessor.cc', - 'references.cc', 'serialise.cc', 'signature/local-keys.cc', 'signature/signer.cc', diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 9858386c5..d3c9ccb66 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -3,7 +3,7 @@ #include "nix/store/content-address.hh" #include "nix/cmd/legacy.hh" #include "nix/main/shared.hh" -#include "nix/util/references.hh" +#include "nix/store/references.hh" #include "nix/util/archive.hh" #include "nix/util/git.hh" #include "nix/util/posix-source-accessor.hh" From 51c0e6bc63ace7655329fb32d15fe5d9d5842dd6 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 8 Aug 2025 12:05:01 +0300 Subject: [PATCH 137/382] hydra: Restore coverage job Sometime ago we lost the coverage job in the midst of meson migration. Until we have something like codecov it'd be very useful to restore this job with the html reports and historical metrics. As a bonus we get more coverage metrics by switching to LLVM tooling from LCOV. --- ci/gha/tests/default.nix | 52 +++++++++++++++++++++++++++------------- packaging/hydra.nix | 15 ++++++++---- 2 files changed, 47 insertions(+), 20 deletions(-) diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index d2bee699b..5dbb3f407 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -2,6 +2,12 @@ nixFlake ? builtins.getFlake ("git+file://" + toString ../../..), system ? builtins.currentSystem, pkgs ? nixFlake.inputs.nixpkgs.legacyPackages.${system}, + nixComponents ? ( + nixFlake.lib.makeComponents { + inherit pkgs; + inherit getStdenv; + } + ), getStdenv ? p: p.stdenv, componentTestsPrefix ? "", withSanitizers ? false, @@ -64,18 +70,13 @@ let in rec { - nixComponents = - (nixFlake.lib.makeComponents { - inherit pkgs; - inherit getStdenv; - }).overrideScope - ( - final: prev: { - nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; + nixComponentsInstrumented = nixComponents.overrideScope ( + final: prev: { + nix-store-tests = prev.nix-store-tests.override { withBenchmarks = true; }; - mesonComponentOverrides = lib.composeManyExtensions componentOverrides; - } - ); + mesonComponentOverrides = lib.composeManyExtensions componentOverrides; + } + ); /** Top-level tests for the flake outputs, as they would be built by hydra. @@ -120,15 +121,15 @@ rec { lib.concatMapAttrs (testName: test: { "${componentTestsPrefix}${pkgName}-${testName}" = test; }) (pkg.tests or { }) - ) nixComponents) + ) nixComponentsInstrumented) // lib.optionalAttrs (pkgs.stdenv.hostPlatform == pkgs.stdenv.buildPlatform) { - "${componentTestsPrefix}nix-functional-tests" = nixComponents.nix-functional-tests; + "${componentTestsPrefix}nix-functional-tests" = nixComponentsInstrumented.nix-functional-tests; }; codeCoverage = let componentsTestsToProfile = - (builtins.mapAttrs (n: v: nixComponents.${n}.tests.run) { + (builtins.mapAttrs (n: v: nixComponentsInstrumented.${n}.tests.run) { "nix-util-tests" = { }; "nix-store-tests" = { }; "nix-fetchers-tests" = { }; @@ -136,7 +137,7 @@ rec { "nix-flake-tests" = { }; }) // { - inherit (nixComponents) nix-functional-tests; + inherit (nixComponentsInstrumented) nix-functional-tests; }; coverageProfileDrvs = lib.mapAttrs ( @@ -170,12 +171,13 @@ rec { coverageReports = let - nixComponentDrvs = lib.filter (lib.isDerivation) (lib.attrValues nixComponents); + nixComponentDrvs = lib.filter (lib.isDerivation) (lib.attrValues nixComponentsInstrumented); in pkgs.runCommand "code-coverage-report" { nativeBuildInputs = [ pkgs.llvmPackages.libllvm + pkgs.jq ]; __structuredAttrs = true; nixComponents = nixComponentDrvs; @@ -201,6 +203,24 @@ rec { echo } >> $out/index.txt + llvm-cov export $arguments -instr-profile ${mergedProfdata} -format=text > $out/coverage.json + + mkdir -p $out/nix-support + + coverageTotals=$(jq ".data[0].totals" $out/coverage.json) + + # Mostly inline from pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh [1], + # which we can't use here, because we rely on LLVM's infra for source code coverage collection. + # [1]: https://github.com/NixOS/nixpkgs/blob/67bb48c4c8e327417d6d5aa7e538244b209e852b/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh#L16 + declare -A metricsArray=(["lineCoverage"]="lines" ["functionCoverage"]="functions" ["branchCoverage"]="branches") + + for metricName in "''\${!metricsArray[@]}"; do + key="''\${metricsArray[$metricName]}" + metric=$(echo "$coverageTotals" | jq ".$key.percent * 10 | round / 10") + echo "$metricName $metric %" >> $out/nix-support/hydra-metrics + done + + echo "report coverage $out" >> $out/nix-support/hydra-build-products ''; in assert withCoverage; diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 7a7569fa3..9f9749bde 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -223,10 +223,17 @@ in dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); # # Line coverage analysis. - # coverage = nixpkgsFor.x86_64-linux.native.nix.override { - # pname = "nix-coverage"; - # withCoverageChecks = true; - # }; + coverage = + (import ./../ci/gha/tests rec { + withCoverage = true; + pkgs = nixpkgsFor.x86_64-linux.nativeForStdenv.clangStdenv; + nixComponents = pkgs.nixComponents2; + nixFlake = null; + getStdenv = p: p.clangStdenv; + }).codeCoverage.coverageReports.overrideAttrs + { + name = "nix-coverage"; # For historical consistency + }; # Nix's manual manual = nixpkgsFor.x86_64-linux.native.nixComponents2.nix-manual; From d2022189a16addf9a67ab70989ae38f90e85e07c Mon Sep 17 00:00:00 2001 From: AGawas Date: Fri, 8 Aug 2025 11:47:47 -0400 Subject: [PATCH 138/382] Inline printHash16or32 and remove redundant helper (#13716) Fixes ##13692 --------- Co-authored-by: John Ericson Co-authored-by: Sergei Zimmerman --- src/libutil/hash.cc | 6 ------ src/libutil/include/nix/util/hash.hh | 5 ----- src/nix/prefetch.cc | 4 +++- 3 files changed, 3 insertions(+), 12 deletions(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index fe7e9ab3b..e469957a0 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -60,12 +60,6 @@ std::strong_ordering Hash::operator<=>(const Hash & h) const noexcept return std::strong_ordering::equivalent; } -std::string printHash16or32(const Hash & hash) -{ - assert(static_cast(hash.algo)); - return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false); -} - std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const { std::string s; diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index f5c01d2e1..f4d137bd0 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -133,11 +133,6 @@ public: */ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha); -/** - * Print a hash in base-16 if it's MD5, or base-32 otherwise. - */ -std::string printHash16or32(const Hash & hash); - /** * Compute the hash of the given string. */ diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 1423ce517..b651a4c97 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -247,7 +247,9 @@ static int main_nix_prefetch_url(int argc, char ** argv) if (!printPath) printInfo("path is '%s'", store->printStorePath(storePath)); - logger->cout(printHash16or32(hash)); + assert(static_cast(hash.algo)); + logger->cout(hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false)); + if (printPath) logger->cout(store->printStorePath(storePath)); From 6ec88b93ba548f48be1c364991d0c6b5e39d5dc9 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 9 Aug 2025 01:12:19 +0300 Subject: [PATCH 139/382] ci: Remove max-jobs = 1 This change was necessary when we were using `nix flake check` for CI (see 6db6b269ed70788314209d35499812c90949057f). Now this is not really necessary, because we are running the checks in a much saner way, that doesn't use up too much memory for evaluation. --- .github/workflows/ci.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1745988da..8c221ff5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,9 +56,7 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} dogfood: true # The sandbox would otherwise be disabled by default on Darwin - extra_nix_config: | - sandbox = true - max-jobs = 1 + extra_nix_config: "sandbox = true" - uses: DeterminateSystems/magic-nix-cache-action@main # Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user: # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces From 937780ea08674a37bd70ab92fce2642ab1e4371a Mon Sep 17 00:00:00 2001 From: a-kenji Date: Tue, 29 Jul 2025 22:59:14 +0200 Subject: [PATCH 140/382] docs: Add `self-attribute` documentation --- src/nix/flake.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/nix/flake.md b/src/nix/flake.md index 6cb39fd5f..950b9fa4f 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -573,6 +573,27 @@ The value of the `follows` attribute is a `/`-separated sequence of input names denoting the path of inputs to be followed from the root flake. +## Self-attributes + +Flakes can declare attributes about themselves that affect how they are fetched. +These attributes are specified using the special `self` input and are retroactively +applied to it: + +```nix +{ + inputs.self.submodules = true; + inputs.self.lfs = true; +} +``` + +The following self-attributes are supported: + +* `submodules`: A Boolean denoting whether Git submodules should be fetched when this flake is used as an input. When set to `true`, Git submodules will be automatically fetched without requiring callers to specify `submodules=1` in the flake reference URL. Defaults to `false`. + +* `lfs`: A Boolean denoting whether Git LFS (Large File Storage) files should be fetched when this flake is used as an input. When set to `true`, Git LFS files will be automatically fetched. Defaults to `false`. + +These self-attributes eliminate the need for consumers of your flake to manually specify fetching options in their flake references. + Overrides and `follows` can be combined, e.g. ```nix From e8e9376a7b38cb7606bb54e28bbc60a076463077 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 01:00:21 +0300 Subject: [PATCH 141/382] libfetchers: Remove badGitRefRegex and use libgit2 for reference validation Fixes usage of `#` symbol in the reference name. This also seems to identify several deficiencies in the libgit2 refname validation code wrt to DEL symbol and a singular `@` symbol [1]. [1]: https://git-scm.com/docs/git-check-ref-format#_description --- src/libfetchers-tests/git-utils.cc | 54 +++++++++++++++ src/libfetchers/git-utils.cc | 66 ++++++++++++++++++- src/libfetchers/git.cc | 6 +- src/libfetchers/github.cc | 6 +- .../include/nix/fetchers/git-utils.hh | 7 ++ src/libfetchers/indirect.cc | 5 +- src/libflake-tests/flakeref.cc | 19 ++++++ src/libutil/include/nix/util/url-parts.hh | 7 -- src/libutil/url.cc | 1 - tests/functional/fetchGitRefs.sh | 2 + 10 files changed, 154 insertions(+), 19 deletions(-) diff --git a/src/libfetchers-tests/git-utils.cc b/src/libfetchers-tests/git-utils.cc index bfba3d679..f9fae23da 100644 --- a/src/libfetchers-tests/git-utils.cc +++ b/src/libfetchers-tests/git-utils.cc @@ -173,4 +173,58 @@ TEST_F(GitUtilsTest, peel_reference) git_repository_free(rawRepo); } +TEST(GitUtils, isLegalRefName) +{ + ASSERT_TRUE(isLegalRefName("foox")); + ASSERT_TRUE(isLegalRefName("1337")); + ASSERT_TRUE(isLegalRefName("foo.baz")); + ASSERT_TRUE(isLegalRefName("foo/bar/baz")); + ASSERT_TRUE(isLegalRefName("foo./bar")); + ASSERT_TRUE(isLegalRefName("heads/foo@bar")); + ASSERT_TRUE(isLegalRefName("heads/fu\303\237")); + ASSERT_TRUE(isLegalRefName("foo-bar-baz")); + ASSERT_TRUE(isLegalRefName("branch#")); + ASSERT_TRUE(isLegalRefName("$1")); + ASSERT_TRUE(isLegalRefName("foo.locke")); + + ASSERT_FALSE(isLegalRefName("refs///heads/foo")); + ASSERT_FALSE(isLegalRefName("heads/foo/")); + ASSERT_FALSE(isLegalRefName("///heads/foo")); + ASSERT_FALSE(isLegalRefName(".foo")); + ASSERT_FALSE(isLegalRefName("./foo")); + ASSERT_FALSE(isLegalRefName("./foo/bar")); + ASSERT_FALSE(isLegalRefName("foo/./bar")); + ASSERT_FALSE(isLegalRefName("foo/bar/.")); + ASSERT_FALSE(isLegalRefName("foo bar")); + ASSERT_FALSE(isLegalRefName("foo?bar")); + ASSERT_FALSE(isLegalRefName("foo^bar")); + ASSERT_FALSE(isLegalRefName("foo~bar")); + ASSERT_FALSE(isLegalRefName("foo:bar")); + ASSERT_FALSE(isLegalRefName("foo[bar")); + ASSERT_FALSE(isLegalRefName("foo/bar/.")); + ASSERT_FALSE(isLegalRefName(".refs/foo")); + ASSERT_FALSE(isLegalRefName("refs/heads/foo.")); + ASSERT_FALSE(isLegalRefName("heads/foo..bar")); + ASSERT_FALSE(isLegalRefName("heads/foo?bar")); + ASSERT_FALSE(isLegalRefName("heads/foo.lock")); + ASSERT_FALSE(isLegalRefName("heads///foo.lock")); + ASSERT_FALSE(isLegalRefName("foo.lock/bar")); + ASSERT_FALSE(isLegalRefName("foo.lock///bar")); + ASSERT_FALSE(isLegalRefName("heads/v@{ation")); + ASSERT_FALSE(isLegalRefName("heads/foo\bar")); + + ASSERT_FALSE(isLegalRefName("@")); + ASSERT_FALSE(isLegalRefName("\37")); + ASSERT_FALSE(isLegalRefName("\177")); + + ASSERT_FALSE(isLegalRefName("foo/*")); + ASSERT_FALSE(isLegalRefName("*/foo")); + ASSERT_FALSE(isLegalRefName("foo/*/bar")); + ASSERT_FALSE(isLegalRefName("*")); + ASSERT_FALSE(isLegalRefName("foo/*/*")); + ASSERT_FALSE(isLegalRefName("*/foo/*")); + ASSERT_FALSE(isLegalRefName("/foo")); + ASSERT_FALSE(isLegalRefName("")); +} + } // namespace nix diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 993d7fb08..b8d9b03ce 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -93,8 +93,11 @@ Hash toHash(const git_oid & oid) static void initLibGit2() { - if (git_libgit2_init() < 0) - throw Error("initialising libgit2: %s", git_error_last()->message); + static std::once_flag initialized; + std::call_once(initialized, []() { + if (git_libgit2_init() < 0) + throw Error("initialising libgit2: %s", git_error_last()->message); + }); } git_oid hashToOID(const Hash & hash) @@ -1308,4 +1311,63 @@ GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path & return workdirInfo; } +/** + * Checks that the git reference is valid and normalizes slash '/' sequences. + * + * Accepts shorthand references (one-level refnames are allowed). + */ +bool isValidRefNameAllowNormalizations(const std::string & refName) +{ + /* Unfortunately libgit2 doesn't expose the limit in headers, but its internal + limit is also 1024. */ + std::array normalizedRefBuffer; + + /* It would be nice to have a better API like git_reference_name_is_valid, but + * with GIT_REFERENCE_FORMAT_REFSPEC_SHORTHAND flag. libgit2 uses it internally + * but doesn't expose it in public headers [1]. + * [1]: + * https://github.com/libgit2/libgit2/blob/9d5f1bacc23594c2ba324c8f0d41b88bf0e9ef04/src/libgit2/refs.c#L1362-L1365 + */ + + auto res = git_reference_normalize_name( + normalizedRefBuffer.data(), + normalizedRefBuffer.size(), + refName.c_str(), + GIT_REFERENCE_FORMAT_ALLOW_ONELEVEL | GIT_REFERENCE_FORMAT_REFSPEC_SHORTHAND); + + return res == 0; +} + +bool isLegalRefName(const std::string & refName) +{ + initLibGit2(); + + /* Since `git_reference_normalize_name` is the best API libgit2 has for verifying + * reference names with shorthands (see comment in normalizeRefName), we need to + * ensure that exceptions to the validity checks imposed by normalization [1] are checked + * explicitly. + * [1]: https://git-scm.com/docs/git-check-ref-format#Documentation/git-check-ref-format.txt---normalize + */ + + /* Check for cases that don't get rejected by libgit2. + * FIXME: libgit2 should reject this. */ + if (refName == "@") + return false; + + /* Leading slashes and consecutive slashes are stripped during normalizatiton. */ + if (refName.starts_with('/') || refName.find("//") != refName.npos) + return false; + + /* Refer to libgit2. */ + if (!isValidRefNameAllowNormalizations(refName)) + return false; + + /* libgit2 doesn't barf on DEL symbol. + * FIXME: libgit2 should reject this. */ + if (refName.find('\177') != refName.npos) + return false; + + return true; +} + } // namespace nix diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 4c2a655b9..43105c699 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -228,10 +228,8 @@ struct GitInputScheme : InputScheme maybeGetBoolAttr(attrs, "verifyCommit"); - if (auto ref = maybeGetStrAttr(attrs, "ref")) { - if (std::regex_search(*ref, badGitRefRegex)) - throw BadURL("invalid Git branch/tag name '%s'", *ref); - } + if (auto ref = maybeGetStrAttr(attrs, "ref"); ref && !isLegalRefName(*ref)) + throw BadURL("invalid Git branch/tag name '%s'", *ref); Input input{settings}; input.attrs = attrs; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index c91f3ad3a..841a9c2df 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -48,7 +48,7 @@ struct GitArchiveInputScheme : InputScheme if (size == 3) { if (std::regex_match(path[2], revRegex)) rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); - else if (std::regex_match(path[2], refRegex)) + else if (isLegalRefName(path[2])) ref = path[2]; else throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url, path[2]); @@ -61,7 +61,7 @@ struct GitArchiveInputScheme : InputScheme } } - if (std::regex_match(rs, refRegex)) { + if (isLegalRefName(rs)) { ref = rs; } else { throw BadURL("in URL '%s', '%s' is not a branch/tag name", url, rs); @@ -75,7 +75,7 @@ struct GitArchiveInputScheme : InputScheme throw BadURL("URL '%s' contains multiple commit hashes", url); rev = Hash::parseAny(value, HashAlgorithm::SHA1); } else if (name == "ref") { - if (!std::regex_match(value, refRegex)) + if (!isLegalRefName(value)) throw BadURL("URL '%s' contains an invalid branch/tag name", url); if (ref) throw BadURL("URL '%s' contains multiple branch/tag names", url); diff --git a/src/libfetchers/include/nix/fetchers/git-utils.hh b/src/libfetchers/include/nix/fetchers/git-utils.hh index 2ea2acd02..07b985541 100644 --- a/src/libfetchers/include/nix/fetchers/git-utils.hh +++ b/src/libfetchers/include/nix/fetchers/git-utils.hh @@ -157,4 +157,11 @@ struct Setter } }; +/** + * Checks that the git reference is valid and normalized. + * + * Accepts shorthand references (one-level refnames are allowed). + */ +bool isLegalRefName(const std::string & refName); + } // namespace nix diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index f949679c2..4bd4d890d 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -1,4 +1,5 @@ #include "nix/fetchers/fetchers.hh" +#include "nix/fetchers/git-utils.hh" #include "nix/util/url-parts.hh" #include "nix/store/path.hh" @@ -22,12 +23,12 @@ struct IndirectInputScheme : InputScheme } else if (path.size() == 2) { if (std::regex_match(path[1], revRegex)) rev = Hash::parseAny(path[1], HashAlgorithm::SHA1); - else if (std::regex_match(path[1], refRegex)) + else if (isLegalRefName(path[1])) ref = path[1]; else throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url, path[1]); } else if (path.size() == 3) { - if (!std::regex_match(path[1], refRegex)) + if (!isLegalRefName(path[1])) throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url, path[1]); ref = path[1]; if (!std::regex_match(path[2], revRegex)) diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index b8f1ef7c9..2f8deb123 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -57,6 +57,25 @@ TEST(parseFlakeRef, path) } } +TEST(parseFlakeRef, GitArchiveInput) +{ + experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes); + + fetchers::Settings fetchSettings; + + { + auto s = "github:foo/bar/branch%23"; // branch name with `#` + auto flakeref = parseFlakeRef(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "github:foo/bar/branch%23"); + } + + { + auto s = "github:foo/bar?ref=branch%23"; // branch name with `#` + auto flakeref = parseFlakeRef(fetchSettings, s); + ASSERT_EQ(flakeref.to_string(), "github:foo/bar/branch%23"); + } +} + TEST(to_string, doesntReencodeUrl) { fetchers::Settings fetchSettings; diff --git a/src/libutil/include/nix/util/url-parts.hh b/src/libutil/include/nix/util/url-parts.hh index 72c901b5d..c57c32e61 100644 --- a/src/libutil/include/nix/util/url-parts.hh +++ b/src/libutil/include/nix/util/url-parts.hh @@ -19,13 +19,6 @@ const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*"; const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@+-]*"; extern std::regex refRegex; -/// Instead of defining what a good Git Ref is, we define what a bad Git Ref is -/// This is because of the definition of a ref in refs.c in https://github.com/git/git -/// See tests/functional/fetchGitRefs.sh for the full definition -const static std::string badGitRefRegexS = - "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$"; -extern std::regex badGitRefRegex; - /// A Git revision (a SHA-1 commit hash). const static std::string revRegexS = "[0-9a-fA-F]{40}"; extern std::regex revRegex; diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 134d313ed..8f902552f 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -9,7 +9,6 @@ namespace nix { std::regex refRegex(refRegexS, std::regex::ECMAScript); -std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript); std::regex revRegex(revRegexS, std::regex::ECMAScript); /** diff --git a/tests/functional/fetchGitRefs.sh b/tests/functional/fetchGitRefs.sh index ee054fabc..258a65525 100755 --- a/tests/functional/fetchGitRefs.sh +++ b/tests/functional/fetchGitRefs.sh @@ -67,6 +67,7 @@ valid_ref 'foo./bar' valid_ref 'heads/foo@bar' valid_ref "$(printf 'heads/fu\303\237')" valid_ref 'foo-bar-baz' +valid_ref 'branch#' valid_ref '$1' valid_ref 'foo.locke' @@ -97,6 +98,7 @@ invalid_ref 'heads/v@{ation' invalid_ref 'heads/foo\.ar' # should fail due to \ invalid_ref 'heads/foo\bar' # should fail due to \ invalid_ref "$(printf 'heads/foo\t')" # should fail because it has a TAB +invalid_ref "$(printf 'heads/foo\37')" invalid_ref "$(printf 'heads/foo\177')" invalid_ref '@' From 06750948618f0486d05162d06544de69cc053ada Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 11 Aug 2025 09:27:18 +0200 Subject: [PATCH 142/382] Silence false positive clang-analyzer warning in repl.cc The clang-analyzer incorrectly flags a use-after-free for GC-managed objects when used with std::unique_ptr. Since NixRepl inherits from gc, its memory is properly managed by Boehm GC and this is a false positive. Added NOLINTNEXTLINE directive to suppress the warning. --- src/libcmd/repl.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 8170bd579..001da4deb 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -917,6 +917,7 @@ ReplExitStatus AbstractNixRepl::runSimple( return values; }; LookupPath lookupPath = {}; + // NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDelete) auto repl = std::make_unique( lookupPath, openStore(), From 46b69073465f58847676d46ffcbdc93a0a46ce08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 11 Aug 2025 19:18:04 +0200 Subject: [PATCH 143/382] Revert "Merge pull request #13709 from NixOS/boehm-coroutines-sp" This reverts commit 4b3ca9bd802f3cbdffb743cc399bad0af2ebfcf5, reversing changes made to 867b69f53324226c35455471f3b10f2ffd12e67c. Since this commit we get reproducible segfaults building Nix ci in macos github runners: https://github.com/NixOS/nix/actions/runs/16885882321/job/47837390248 --- src/libexpr/eval-gc.cc | 64 ------------------------------------------ 1 file changed, 64 deletions(-) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 9c050dc92..b17336a90 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -33,67 +33,6 @@ static void * oomHandler(size_t requested) throw std::bad_alloc(); } -/** - * When a thread goes into a coroutine, we lose its original sp until - * control flow returns to the thread. This causes Boehm GC to crash - * since it will scan memory between the coroutine's sp and the - * original stack base of the thread. Therefore, we detect when the - * current sp is outside of the original thread stack and push the - * entire thread stack instead, as an approximation. - * - * This is not optimal, because it causes the stack below sp to be - * scanned. However, we usually we don't have active coroutines during - * evaluation, so this is acceptable. - * - * Note that we don't scan coroutine stacks. It's currently assumed - * that we don't have GC roots in coroutines. - */ -void fixupBoehmStackPointer(void ** sp_ptr, void * _pthread_id) -{ - void *& sp = *sp_ptr; - auto pthread_id = reinterpret_cast(_pthread_id); - size_t osStackSize; - // The low address of the stack, which grows down. - void * osStackLimit; - -# ifdef __APPLE__ - osStackSize = pthread_get_stacksize_np(pthread_id); - osStackLimit = pthread_get_stackaddr_np(pthread_id); -# else - pthread_attr_t pattr; - if (pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } -# ifdef HAVE_PTHREAD_GETATTR_NP - if (pthread_getattr_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_getattr_np failed"); - } -# elif HAVE_PTHREAD_ATTR_GET_NP - if (!pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } - if (!pthread_attr_get_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_get_np failed"); - } -# else -# error "Need one of `pthread_attr_get_np` or `pthread_getattr_np`" -# endif - if (pthread_attr_getstack(&pattr, &osStackLimit, &osStackSize)) { - throw Error("fixupBoehmStackPointer: pthread_attr_getstack failed"); - } - if (pthread_attr_destroy(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_destroy failed"); - } -# endif - - void * osStackBase = (char *) osStackLimit + osStackSize; - // NOTE: We assume the stack grows down, as it does on all architectures we support. - // Architectures that grow the stack up are rare. - if (sp >= osStackBase || sp < osStackLimit) { // sp is outside the os stack - sp = osStackLimit; - } -} - static inline void initGCReal() { /* Initialise the Boehm garbage collector. */ @@ -124,9 +63,6 @@ static inline void initGCReal() GC_set_oom_fn(oomHandler); - GC_set_sp_corrector(&fixupBoehmStackPointer); - assert(GC_get_sp_corrector()); - /* Set the initial heap size to something fairly big (25% of physical RAM, up to a maximum of 384 MiB) so that in most cases we don't need to garbage collect at all. (Collection has a From 2b2c832f0a87697e2c0772ce2a92d314384df746 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 20:30:00 +0300 Subject: [PATCH 144/382] ci: Disable dogfooding To unbreak the CI. --- .github/workflows/ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8c221ff5d..0db256ff3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: fetch-depth: 0 - uses: ./.github/actions/install-nix-action with: - dogfood: true + dogfood: false extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -54,7 +54,7 @@ jobs: - uses: ./.github/actions/install-nix-action with: github_token: ${{ secrets.GITHUB_TOKEN }} - dogfood: true + dogfood: false # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - uses: DeterminateSystems/magic-nix-cache-action@main @@ -217,7 +217,7 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/install-nix-action with: - dogfood: true + dogfood: false extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -248,7 +248,7 @@ jobs: path: flake-regressions/tests - uses: ./.github/actions/install-nix-action with: - dogfood: true + dogfood: false extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -269,7 +269,7 @@ jobs: - uses: ./.github/actions/install-nix-action with: github_token: ${{ secrets.GITHUB_TOKEN }} - dogfood: true + dogfood: false extra_nix_config: | experimental-features = flakes nix-command ca-derivations impure-derivations max-jobs = 1 From 9d033733f21296ae929ba64efd2d00c4e8d36f16 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 20:35:09 +0300 Subject: [PATCH 145/382] ci: Bump pinned stable nix version 2.30.1 can't build our daemon tests. The fix is in 2.30.2. --- .github/actions/install-nix-action/action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index bc2c022c7..c299b3956 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -9,7 +9,7 @@ inputs: install_url: description: "URL of the Nix installer" required: false - default: "https://releases.nixos.org/nix/nix-2.30.1/install" + default: "https://releases.nixos.org/nix/nix-2.30.2/install" github_token: description: "Github token" required: true From a7fb257ec2b7cb2969b4a97b74aada92c397f3d6 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 18:40:35 +0300 Subject: [PATCH 146/382] libutil: Make AbstractConfig::getSettings const --- src/libutil/config-global.cc | 2 +- src/libutil/configuration.cc | 2 +- src/libutil/include/nix/util/config-global.hh | 2 +- src/libutil/include/nix/util/configuration.hh | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libutil/config-global.cc b/src/libutil/config-global.cc index 3b1bc5af9..cd461ea48 100644 --- a/src/libutil/config-global.cc +++ b/src/libutil/config-global.cc @@ -15,7 +15,7 @@ bool GlobalConfig::set(const std::string & name, const std::string & value) return false; } -void GlobalConfig::getSettings(std::map & res, bool overriddenOnly) +void GlobalConfig::getSettings(std::map & res, bool overriddenOnly) const { for (auto & config : configRegistrations()) config->getSettings(res, overriddenOnly); diff --git a/src/libutil/configuration.cc b/src/libutil/configuration.cc index 4db863e1f..dc9d91f63 100644 --- a/src/libutil/configuration.cc +++ b/src/libutil/configuration.cc @@ -85,7 +85,7 @@ void AbstractConfig::reapplyUnknownSettings() set(s.first, s.second); } -void Config::getSettings(std::map & res, bool overriddenOnly) +void Config::getSettings(std::map & res, bool overriddenOnly) const { for (const auto & opt : _settings) if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden) diff --git a/src/libutil/include/nix/util/config-global.hh b/src/libutil/include/nix/util/config-global.hh index 4a4277c48..0e6f43ec4 100644 --- a/src/libutil/include/nix/util/config-global.hh +++ b/src/libutil/include/nix/util/config-global.hh @@ -17,7 +17,7 @@ struct GlobalConfig : public AbstractConfig bool set(const std::string & name, const std::string & value) override; - void getSettings(std::map & res, bool overriddenOnly = false) override; + void getSettings(std::map & res, bool overriddenOnly = false) const override; void resetOverridden() override; diff --git a/src/libutil/include/nix/util/configuration.hh b/src/libutil/include/nix/util/configuration.hh index cc7e6aff7..65391721c 100644 --- a/src/libutil/include/nix/util/configuration.hh +++ b/src/libutil/include/nix/util/configuration.hh @@ -73,7 +73,7 @@ public: * - res: map to store settings in * - overriddenOnly: when set to true only overridden settings will be added to `res` */ - virtual void getSettings(std::map & res, bool overriddenOnly = false) = 0; + virtual void getSettings(std::map & res, bool overriddenOnly = false) const = 0; /** * Parses the configuration in `contents` and applies it @@ -160,7 +160,7 @@ public: void addSetting(AbstractSetting * setting); - void getSettings(std::map & res, bool overriddenOnly = false) override; + void getSettings(std::map & res, bool overriddenOnly = false) const override; void resetOverridden() override; From 28b74c3143387c76bff03de01580ddfe1364a300 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 17:51:03 +0300 Subject: [PATCH 147/382] libstore: Add forward declarations for SandboxMode serializers This is necessary to make libstore-tests for store configs work again. --- src/libstore/include/nix/store/globals.hh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 0014a6638..e97210892 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -18,6 +18,11 @@ namespace nix { typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode; +template<> +SandboxMode BaseSetting::parse(const std::string & str) const; +template<> +std::string BaseSetting::to_string() const; + struct MaxBuildJobsSetting : public BaseSetting { MaxBuildJobsSetting( From 2c38ad2cfaa394b3112326df5145fc62dfce78d6 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 19:21:53 +0300 Subject: [PATCH 148/382] libutil: Include necessary headers in config-impl.hh These headers need to be included before everything else (at least in GCC). --- src/libexpr/eval-profiler-settings.cc | 1 - src/libutil/include/nix/util/config-impl.hh | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libexpr/eval-profiler-settings.cc b/src/libexpr/eval-profiler-settings.cc index 1ee5e9231..57bd6a46d 100644 --- a/src/libexpr/eval-profiler-settings.cc +++ b/src/libexpr/eval-profiler-settings.cc @@ -1,6 +1,5 @@ #include "nix/expr/eval-profiler-settings.hh" #include "nix/util/configuration.hh" -#include "nix/util/logging.hh" /* Needs to be included before config-impl.hh */ #include "nix/util/config-impl.hh" #include "nix/util/abstract-setting-to-json.hh" diff --git a/src/libutil/include/nix/util/config-impl.hh b/src/libutil/include/nix/util/config-impl.hh index f72917b11..f407bc862 100644 --- a/src/libutil/include/nix/util/config-impl.hh +++ b/src/libutil/include/nix/util/config-impl.hh @@ -12,8 +12,10 @@ * instantiation. */ +#include "nix/util/util.hh" #include "nix/util/configuration.hh" #include "nix/util/args.hh" +#include "nix/util/logging.hh" namespace nix { From 73ebdf2497976c3a1ee8ce00bced35e93d05358a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 19:42:39 +0300 Subject: [PATCH 149/382] libstore-tests: Restore commented out tests --- src/libstore-tests/local-overlay-store.cc | 8 ++------ src/libstore-tests/local-store.cc | 14 +++++--------- src/libstore-tests/ssh-store.cc | 12 +++++------- src/libstore-tests/uds-remote-store.cc | 8 ++------ 4 files changed, 14 insertions(+), 28 deletions(-) diff --git a/src/libstore-tests/local-overlay-store.cc b/src/libstore-tests/local-overlay-store.cc index fe064c3a5..175e5d0f4 100644 --- a/src/libstore-tests/local-overlay-store.cc +++ b/src/libstore-tests/local-overlay-store.cc @@ -1,9 +1,6 @@ -// FIXME: Odd failures for templates that are causing the PR to break -// for now with discussion with @Ericson2314 to comment out. -#if 0 -# include +#include -# include "nix/store/local-overlay-store.hh" +#include "nix/store/local-overlay-store.hh" namespace nix { @@ -31,4 +28,3 @@ TEST(LocalOverlayStore, constructConfig_rootPath) } } // namespace nix -#endif diff --git a/src/libstore-tests/local-store.cc b/src/libstore-tests/local-store.cc index ece277609..cdbc29b03 100644 --- a/src/libstore-tests/local-store.cc +++ b/src/libstore-tests/local-store.cc @@ -1,15 +1,12 @@ -// FIXME: Odd failures for templates that are causing the PR to break -// for now with discussion with @Ericson2314 to comment out. -#if 0 -# include +#include -# include "nix/store/local-store.hh" +#include "nix/store/local-store.hh" // Needed for template specialisations. This is not good! When we // overhaul how store configs work, this should be fixed. -# include "nix/util/args.hh" -# include "nix/util/config-impl.hh" -# include "nix/util/abstract-setting-to-json.hh" +#include "nix/util/args.hh" +#include "nix/util/config-impl.hh" +#include "nix/util/abstract-setting-to-json.hh" namespace nix { @@ -37,4 +34,3 @@ TEST(LocalStore, constructConfig_rootPath) } } // namespace nix -#endif diff --git a/src/libstore-tests/ssh-store.cc b/src/libstore-tests/ssh-store.cc index ccb87b767..3c2af311f 100644 --- a/src/libstore-tests/ssh-store.cc +++ b/src/libstore-tests/ssh-store.cc @@ -1,9 +1,8 @@ -// FIXME: Odd failures for templates that are causing the PR to break -// for now with discussion with @Ericson2314 to comment out. -#if 0 -# include +#include -# include "nix/store/ssh-store.hh" +#include "nix/store/ssh-store.hh" +#include "nix/util/config-impl.hh" +#include "nix/util/abstract-setting-to-json.hh" namespace nix { @@ -51,5 +50,4 @@ TEST(MountedSSHStore, constructConfig) })); } -} -#endif +} // namespace nix diff --git a/src/libstore-tests/uds-remote-store.cc b/src/libstore-tests/uds-remote-store.cc index c6a926668..c215d6e18 100644 --- a/src/libstore-tests/uds-remote-store.cc +++ b/src/libstore-tests/uds-remote-store.cc @@ -1,9 +1,6 @@ -// FIXME: Odd failures for templates that are causing the PR to break -// for now with discussion with @Ericson2314 to comment out. -#if 0 -# include +#include -# include "nix/store/uds-remote-store.hh" +#include "nix/store/uds-remote-store.hh" namespace nix { @@ -20,4 +17,3 @@ TEST(UDSRemoteStore, constructConfigWrongScheme) } } // namespace nix -#endif From 41af53139248195188b2ec6557bf5c9f7f636905 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 11 Aug 2025 17:51:03 +0300 Subject: [PATCH 150/382] libstore: Correct getUri methods for all stores Previously `getUri` didn't include store query parameters, `ssh-ng` didn't include any information at all and the local store didn't have the path: ``` $ nix store info --store "local?root=/tmp/aaa&require-sigs=false" Store URL: local Version: 2.31.0 Trusted: 1 $ nix store info --store "ssh-ng://localhost?remote-program=nix-daemon" Store URL: ssh-ng:// Version: 2.31.0 Trusted: 1 $ nix store info --store "ssh://localhost?remote-program=nix-store" Store URL: ssh://localhost ``` This commit changes this to: ``` $ nix store info --store "local?root=/tmp/aaa&require-sigs=false" Store URL: local?require-sigs=false&root=/tmp/aaa Version: 2.31.0 Trusted: 1 $ nix store info --store "ssh-ng://localhost?remote-program=nix-daemon" Store URL: ssh-ng://localhost?remote-program=nix-daemon Version: 2.31.0 Trusted: 1 $ nix store info --store "ssh://localhost?remote-program=nix-store" Store URL: ssh://localhost?remote-program=nix-store ``` --- src/libstore-tests/legacy-ssh-store.cc | 14 ++++++++++---- src/libstore-tests/nix_api_store.cc | 9 ++++++++- src/libstore-tests/ssh-store.cc | 19 +++++++++++++------ src/libstore/include/nix/store/store-api.hh | 13 +++++++++++++ src/libstore/legacy-ssh-store.cc | 4 +++- src/libstore/local-store.cc | 9 ++++++++- src/libstore/ssh-store.cc | 6 +++--- 7 files changed, 58 insertions(+), 16 deletions(-) diff --git a/src/libstore-tests/legacy-ssh-store.cc b/src/libstore-tests/legacy-ssh-store.cc index 2ff5e69ed..c69bd9c28 100644 --- a/src/libstore-tests/legacy-ssh-store.cc +++ b/src/libstore-tests/legacy-ssh-store.cc @@ -6,21 +6,27 @@ namespace nix { TEST(LegacySSHStore, constructConfig) { - LegacySSHStoreConfig config{ + initLibStore(/*loadConfig=*/false); + + auto config = make_ref( "ssh", - "localhost", + "me@localhost:2222", StoreConfig::Params{ { "remote-program", // TODO #11106, no more split on space "foo bar", }, - }}; + }); + EXPECT_EQ( - config.remoteProgram.get(), + config->remoteProgram.get(), (Strings{ "foo", "bar", })); + + auto store = config->openStore(); + EXPECT_EQ(store->getUri(), "ssh://me@localhost:2222?remote-program=foo%20bar"); } } // namespace nix diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 05373cb88..b0707e9f4 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -5,6 +5,7 @@ #include "nix/store/tests/nix_api_store.hh" #include "nix/util/tests/string_callback.hh" +#include "nix/util/url.hh" #include "store-tests-config.hh" @@ -23,7 +24,13 @@ TEST_F(nix_api_store_test, nix_store_get_uri) std::string str; auto ret = nix_store_get_uri(ctx, store, OBSERVE_STRING(str)); ASSERT_EQ(NIX_OK, ret); - ASSERT_STREQ("local", str.c_str()); + auto expectedStoreURI = "local?" + + nix::encodeQuery({ + {"log", nixLogDir}, + {"state", nixStateDir}, + {"store", nixStoreDir}, + }); + ASSERT_EQ(expectedStoreURI, str); } TEST_F(nix_api_util_context, nix_store_get_storedir_default) diff --git a/src/libstore-tests/ssh-store.cc b/src/libstore-tests/ssh-store.cc index 3c2af311f..28ea0ee0b 100644 --- a/src/libstore-tests/ssh-store.cc +++ b/src/libstore-tests/ssh-store.cc @@ -8,24 +8,31 @@ namespace nix { TEST(SSHStore, constructConfig) { - SSHStoreConfig config{ - "ssh", - "localhost", + initLibStore(/*loadConfig=*/false); + + auto config = make_ref( + "ssh-ng", + "me@localhost:2222", StoreConfig::Params{ { "remote-program", // TODO #11106, no more split on space "foo bar", }, - }, - }; + }); EXPECT_EQ( - config.remoteProgram.get(), + config->remoteProgram.get(), (Strings{ "foo", "bar", })); + + auto store = config->openStore(); + EXPECT_EQ(store->getUri(), "ssh-ng://me@localhost:2222?remote-program=foo%20bar"); + config->resetOverridden(); + store = config->openStore(); + EXPECT_EQ(store->getUri(), "ssh-ng://me@localhost:2222"); } TEST(MountedSSHStore, constructConfig) diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 3fbb539a1..f8356be78 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -126,6 +126,19 @@ struct StoreConfig : public StoreDirConfig return ""; } + /** + * Get overridden store reference query parameters. + */ + StringMap getQueryParams() const + { + auto queryParams = std::map{}; + getSettings(queryParams, /*overriddenOnly=*/true); + StringMap res; + for (const auto & [name, info] : queryParams) + res.insert({name, info.value}); + return res; + } + /** * An experimental feature this type store is gated, if it is to be * experimental. diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 075702f93..43eaac68b 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -90,7 +90,9 @@ ref LegacySSHStore::openConnection() std::string LegacySSHStore::getUri() { - return *Config::uriSchemes().begin() + "://" + config->authority.to_string(); + return ParsedURL{ + .scheme = *Config::uriSchemes().begin(), .authority = config->authority, .query = config->getQueryParams()} + .to_string(); } std::map LegacySSHStore::queryPathInfosUncached(const StorePathSet & paths) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 685402cfe..48cb8d718 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -16,6 +16,7 @@ #include "nix/store/posix-fs-canonicalise.hh" #include "nix/util/posix-source-accessor.hh" #include "nix/store/keys.hh" +#include "nix/util/url.hh" #include "nix/util/users.hh" #include "nix/store/store-open.hh" #include "nix/store/store-registration.hh" @@ -440,7 +441,13 @@ LocalStore::~LocalStore() std::string LocalStore::getUri() { - return "local"; + std::ostringstream oss; + oss << *config->uriSchemes().begin(); + auto queryParams = config->getQueryParams(); + if (!queryParams.empty()) + oss << "?"; + oss << encodeQuery(queryParams); + return std::move(oss).str(); } int LocalStore::getSchema() diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 875a4fea5..a1b27cfb7 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -43,7 +43,9 @@ struct SSHStore : virtual RemoteStore std::string getUri() override { - return *Config::uriSchemes().begin() + "://" + host; + return ParsedURL{ + .scheme = *Config::uriSchemes().begin(), .authority = config->authority, .query = config->getQueryParams()} + .to_string(); } // FIXME extend daemon protocol, move implementation to RemoteStore @@ -66,8 +68,6 @@ protected: ref openConnection() override; - std::string host; - std::vector extraRemoteProgramArgs; SSHMaster master; From f62b022872e5920c920934b9d23b4bfea4fc90d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 11 Aug 2025 19:29:08 +0200 Subject: [PATCH 151/382] ci: add workflow dispatch to bypass nix installer dogfooding This helps to fix CI if our dogfooding Nix installer is broken --- .github/workflows/ci.yml | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0db256ff3..06ee05580 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,6 +3,13 @@ name: "CI" on: pull_request: push: + workflow_dispatch: + inputs: + dogfood: + description: 'Use dogfood Nix build' + required: false + default: true + type: boolean permissions: read-all @@ -15,7 +22,7 @@ jobs: fetch-depth: 0 - uses: ./.github/actions/install-nix-action with: - dogfood: false + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -54,7 +61,7 @@ jobs: - uses: ./.github/actions/install-nix-action with: github_token: ${{ secrets.GITHUB_TOKEN }} - dogfood: false + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - uses: DeterminateSystems/magic-nix-cache-action@main @@ -217,7 +224,7 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/install-nix-action with: - dogfood: false + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -248,7 +255,7 @@ jobs: path: flake-regressions/tests - uses: ./.github/actions/install-nix-action with: - dogfood: false + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} @@ -269,7 +276,7 @@ jobs: - uses: ./.github/actions/install-nix-action with: github_token: ${{ secrets.GITHUB_TOKEN }} - dogfood: false + dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} extra_nix_config: | experimental-features = flakes nix-command ca-derivations impure-derivations max-jobs = 1 From 0ef6f72c9cd795181f302948e569d79cc9f97361 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 11 Aug 2025 17:28:01 -0400 Subject: [PATCH 152/382] `getUri` should be `const` and on `Store::Config` not `Store` It is a side-effect property of the configuration alone, not the rest of the store. --- src/libcmd/repl.cc | 4 +-- src/libstore-c/nix_api_store.cc | 2 +- src/libstore-tests/legacy-ssh-store.cc | 2 +- src/libstore-tests/nix_api_store.cc | 2 +- src/libstore-tests/ssh-store.cc | 17 ++++------ src/libstore/binary-cache-store.cc | 14 +++++--- .../build/drv-output-substitution-goal.cc | 2 +- src/libstore/build/substitution-goal.cc | 7 ++-- src/libstore/dummy-store.cc | 10 +++--- src/libstore/http-binary-cache-store.cc | 13 +++---- .../nix/store/http-binary-cache-store.hh | 5 +++ .../include/nix/store/legacy-ssh-store.hh | 4 +-- .../nix/store/local-binary-cache-store.hh | 2 ++ .../include/nix/store/local-overlay-store.hh | 10 +++--- src/libstore/include/nix/store/local-store.hh | 4 +-- .../nix/store/s3-binary-cache-store.hh | 2 ++ src/libstore/include/nix/store/ssh-store.hh | 2 ++ src/libstore/include/nix/store/store-api.hh | 15 ++++---- src/libstore/include/nix/store/store-cast.hh | 2 +- .../include/nix/store/uds-remote-store.hh | 4 +-- src/libstore/legacy-ssh-store.cc | 6 ++-- src/libstore/local-binary-cache-store.cc | 10 +++--- src/libstore/local-store.cc | 6 ++-- src/libstore/remote-store.cc | 4 +-- src/libstore/restricted-store.cc | 5 --- src/libstore/s3-binary-cache-store.cc | 16 ++++----- src/libstore/ssh-store.cc | 12 +++---- src/libstore/store-api.cc | 34 ++++++++++--------- src/libstore/uds-remote-store.cc | 13 ++++--- src/nix/config-check.cc | 6 ++-- src/nix/log.cc | 4 +-- src/nix/run.cc | 2 +- src/nix/store-info.cc | 4 +-- 33 files changed, 123 insertions(+), 122 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index ea3f44a7c..3e3b882c3 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -574,14 +574,14 @@ ProcessLineResult NixRepl::processLine(std::string line) for (auto & sub : subs) { auto * logSubP = dynamic_cast(&*sub); if (!logSubP) { - printInfo("Skipped '%s' which does not support retrieving build logs", sub->getUri()); + printInfo("Skipped '%s' which does not support retrieving build logs", sub->config.getUri()); continue; } auto & logSub = *logSubP; auto log = logSub.getBuildLog(drvPath); if (log) { - printInfo("got build log for '%s' from '%s'", drvPathRaw, logSub.getUri()); + printInfo("got build log for '%s' from '%s'", drvPathRaw, logSub.config.getUri()); logger->writeToStdout(*log); foundLog = true; break; diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index b7b437e9c..705d8153f 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -62,7 +62,7 @@ nix_err nix_store_get_uri(nix_c_context * context, Store * store, nix_get_string if (context) context->last_err_code = NIX_OK; try { - auto res = store->ptr->getUri(); + auto res = store->ptr->config.getUri(); return call_nix_get_string_callback(res, callback, user_data); } NIXC_CATCH_ERRS diff --git a/src/libstore-tests/legacy-ssh-store.cc b/src/libstore-tests/legacy-ssh-store.cc index c69bd9c28..04c3763ec 100644 --- a/src/libstore-tests/legacy-ssh-store.cc +++ b/src/libstore-tests/legacy-ssh-store.cc @@ -27,6 +27,6 @@ TEST(LegacySSHStore, constructConfig) })); auto store = config->openStore(); - EXPECT_EQ(store->getUri(), "ssh://me@localhost:2222?remote-program=foo%20bar"); + EXPECT_EQ(store->config.getUri(), "ssh://me@localhost:2222?remote-program=foo%20bar"); } } // namespace nix diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index b0707e9f4..cff889ab9 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -104,7 +104,7 @@ TEST_F(nix_api_util_context, nix_store_open_dummy) nix_libstore_init(ctx); Store * store = nix_store_open(ctx, "dummy://", nullptr); ASSERT_EQ(NIX_OK, ctx->last_err_code); - ASSERT_STREQ("dummy", store->ptr->getUri().c_str()); + ASSERT_STREQ("dummy", store->ptr->config.getUri().c_str()); std::string str; nix_store_get_version(ctx, store, OBSERVE_STRING(str)); diff --git a/src/libstore-tests/ssh-store.cc b/src/libstore-tests/ssh-store.cc index 28ea0ee0b..335e4ae85 100644 --- a/src/libstore-tests/ssh-store.cc +++ b/src/libstore-tests/ssh-store.cc @@ -8,9 +8,7 @@ namespace nix { TEST(SSHStore, constructConfig) { - initLibStore(/*loadConfig=*/false); - - auto config = make_ref( + SSHStoreConfig config{ "ssh-ng", "me@localhost:2222", StoreConfig::Params{ @@ -19,20 +17,19 @@ TEST(SSHStore, constructConfig) // TODO #11106, no more split on space "foo bar", }, - }); + }, + }; EXPECT_EQ( - config->remoteProgram.get(), + config.remoteProgram.get(), (Strings{ "foo", "bar", })); - auto store = config->openStore(); - EXPECT_EQ(store->getUri(), "ssh-ng://me@localhost:2222?remote-program=foo%20bar"); - config->resetOverridden(); - store = config->openStore(); - EXPECT_EQ(store->getUri(), "ssh-ng://me@localhost:2222"); + EXPECT_EQ(config.getUri(), "ssh-ng://me@localhost:2222?remote-program=foo%20bar"); + config.resetOverridden(); + EXPECT_EQ(config.getUri(), "ssh-ng://me@localhost:2222"); } TEST(MountedSSHStore, constructConfig) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 276d1c78a..c55239413 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -58,7 +58,10 @@ void BinaryCacheStore::init() if (name == "StoreDir") { if (value != storeDir) throw Error( - "binary cache '%s' is for Nix stores with prefix '%s', not '%s'", getUri(), value, storeDir); + "binary cache '%s' is for Nix stores with prefix '%s', not '%s'", + config.getUri(), + value, + storeDir); } else if (name == "WantMassQuery") { config.wantMassQuery.setDefault(value == "1"); } else if (name == "Priority") { @@ -129,7 +132,8 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) } if (diskCache) - diskCache->upsertNarInfo(getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr(narInfo)); + diskCache->upsertNarInfo( + config.getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr(narInfo)); } ref BinaryCacheStore::addToStoreCommon( @@ -427,7 +431,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) void BinaryCacheStore::queryPathInfoUncached( const StorePath & storePath, Callback> callback) noexcept { - auto uri = getUri(); + auto uri = config.getUri(); auto storePathS = printStorePath(storePath); auto act = std::make_shared( *logger, @@ -527,7 +531,7 @@ void BinaryCacheStore::queryRealisationUncached( void BinaryCacheStore::registerDrvOutput(const Realisation & info) { if (diskCache) - diskCache->upsertRealisation(getUri(), info); + diskCache->upsertRealisation(config.getUri(), info); auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi"; upsertFile(filePath, info.toJSON().dump(), "application/json"); } @@ -555,7 +559,7 @@ std::optional BinaryCacheStore::getBuildLogExact(const StorePath & { auto logPath = "log/" + std::string(baseNameOf(printStorePath(path))); - debug("fetching build log from binary cache '%s/%s'", getUri(), logPath); + debug("fetching build log from binary cache '%s/%s'", config.getUri(), logPath); return getFile(logPath); } diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index 0ddd1c438..3f4b787f7 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -98,7 +98,7 @@ Goal::Co DrvOutputSubstitutionGoal::init() "substituter '%s' has an incompatible realisation for '%s', ignoring.\n" "Local: %s\n" "Remote: %s", - sub->getUri(), + sub->config.getUri(), depId.to_string(), worker.store.printStorePath(localOutputInfo->outPath), worker.store.printStorePath(depPath)); diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 3c9ad6374..e46ad2007 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -101,7 +101,7 @@ Goal::Co PathSubstitutionGoal::init() } else { printError( "asked '%s' for '%s' but got '%s'", - sub->getUri(), + sub->config.getUri(), worker.store.printStorePath(storePath), sub->printStorePath(info->path)); continue; @@ -127,7 +127,7 @@ Goal::Co PathSubstitutionGoal::init() warn( "ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'", worker.store.printStorePath(storePath), - sub->getUri()); + sub->config.getUri()); continue; } @@ -217,7 +217,8 @@ Goal::Co PathSubstitutionGoal::tryToRun( /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide.close(); }); - Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()}); + Activity act( + *logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->config.getUri()}); PushActivity pact(act.id); copyStorePath(*sub, worker.store, subPath, repair, sub->config.isTrusted ? NoCheckSigs : CheckSigs); diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 74119a529..bc8f5b6f5 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -32,6 +32,11 @@ struct DummyStoreConfig : public std::enable_shared_from_this, } ref openStore() const override; + + std::string getUri() const override + { + return *uriSchemes().begin(); + } }; struct DummyStore : virtual Store @@ -46,11 +51,6 @@ struct DummyStore : virtual Store { } - std::string getUri() override - { - return *Config::uriSchemes().begin(); - } - void queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept override { diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 21a31c3f5..31899f629 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -62,11 +62,6 @@ public: diskCache = getNarInfoDiskCache(); } - std::string getUri() override - { - return config->cacheUri; - } - void init() override { // FIXME: do this lazily? @@ -90,7 +85,7 @@ protected: auto state(_state.lock()); if (state->enabled && settings.tryFallback) { int t = 60; - printError("disabling binary cache '%s' for %s seconds", getUri(), t); + printError("disabling binary cache '%s' for %s seconds", config->getUri(), t); state->enabled = false; state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); } @@ -103,10 +98,10 @@ protected: return; if (std::chrono::steady_clock::now() > state->disabledUntil) { state->enabled = true; - debug("re-enabling binary cache '%s'", getUri()); + debug("re-enabling binary cache '%s'", config->getUri()); return; } - throw SubstituterDisabled("substituter '%s' is disabled", getUri()); + throw SubstituterDisabled("substituter '%s' is disabled", config->getUri()); } bool fileExists(const std::string & path) override @@ -159,7 +154,7 @@ protected: getFileTransfer()->download(std::move(request), sink); } catch (FileTransferError & e) { if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri()); + throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, config->getUri()); maybeDisable(); throw; } diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index f0d85a119..ef13aa7b6 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -23,6 +23,11 @@ struct HttpBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; + + std::string getUri() const override + { + return cacheUri; + } }; } // namespace nix diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index b64189af9..e53d18559 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -53,6 +53,8 @@ struct LegacySSHStoreConfig : std::enable_shared_from_this static std::string doc(); ref openStore() const override; + + std::string getUri() const override; }; struct LegacySSHStore : public virtual Store @@ -71,8 +73,6 @@ struct LegacySSHStore : public virtual Store ref openConnection(); - std::string getUri() override; - void queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept override; diff --git a/src/libstore/include/nix/store/local-binary-cache-store.hh b/src/libstore/include/nix/store/local-binary-cache-store.hh index 3561131d4..5ca5ca43e 100644 --- a/src/libstore/include/nix/store/local-binary-cache-store.hh +++ b/src/libstore/include/nix/store/local-binary-cache-store.hh @@ -26,6 +26,8 @@ struct LocalBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; + + std::string getUri() const override; }; } // namespace nix diff --git a/src/libstore/include/nix/store/local-overlay-store.hh b/src/libstore/include/nix/store/local-overlay-store.hh index e5097f3e4..1180f0466 100644 --- a/src/libstore/include/nix/store/local-overlay-store.hh +++ b/src/libstore/include/nix/store/local-overlay-store.hh @@ -88,6 +88,11 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig ref openStore() const override; + std::string getUri() const override + { + return "local-overlay://"; + } + protected: /** * @return The host OS path corresponding to the store path for the @@ -116,11 +121,6 @@ struct LocalOverlayStore : virtual LocalStore LocalOverlayStore(ref); - std::string getUri() override - { - return "local-overlay://"; - } - private: /** * The store beneath us. diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index 461562ef1..af243d480 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -111,6 +111,8 @@ struct LocalStoreConfig : std::enable_shared_from_this, static std::string doc(); ref openStore() const override; + + std::string getUri() const override; }; class LocalStore : public virtual IndirectRootStore, public virtual GcStore @@ -196,8 +198,6 @@ public: * Implementations of abstract store API methods. */ - std::string getUri() override; - bool isValidPathUncached(const StorePath & path) override; StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index 584488070..ec3aae149 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -106,6 +106,8 @@ public: static std::string doc(); ref openStore() const override; + + std::string getUri() const override; }; struct S3BinaryCacheStore : virtual BinaryCacheStore diff --git a/src/libstore/include/nix/store/ssh-store.hh b/src/libstore/include/nix/store/ssh-store.hh index 17fea39d5..ff6c3ed69 100644 --- a/src/libstore/include/nix/store/ssh-store.hh +++ b/src/libstore/include/nix/store/ssh-store.hh @@ -33,6 +33,8 @@ struct SSHStoreConfig : std::enable_shared_from_this, static std::string doc(); ref openStore() const override; + + std::string getUri() const override; }; struct MountedSSHStoreConfig : virtual SSHStoreConfig, virtual LocalFSStoreConfig diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index f8356be78..8f09fee48 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -197,6 +197,13 @@ struct StoreConfig : public StoreDirConfig * type. */ virtual ref openStore() const = 0; + + /** + * Render the config back to a "store URL". It should round-trip + * with `resolveStoreConfig` (for stores configs that are + * registered). + */ + virtual std::string getUri() const; }; /** @@ -277,12 +284,6 @@ public: virtual ~Store() {} - /** - * @todo move to `StoreConfig` one we store enough information in - * those to recover the scheme and authority in all cases. - */ - virtual std::string getUri() = 0; - /** * Follow symlinks until we end up with a path in the Nix store. */ @@ -872,7 +873,7 @@ protected: */ [[noreturn]] void unsupported(const std::string & op) { - throw Unsupported("operation '%s' is not supported by store '%s'", op, getUri()); + throw Unsupported("operation '%s' is not supported by store '%s'", op, config.getUri()); } }; diff --git a/src/libstore/include/nix/store/store-cast.hh b/src/libstore/include/nix/store/store-cast.hh index 89775599a..0d7257602 100644 --- a/src/libstore/include/nix/store/store-cast.hh +++ b/src/libstore/include/nix/store/store-cast.hh @@ -17,7 +17,7 @@ T & require(Store & store) { auto * castedStore = dynamic_cast(&store); if (!castedStore) - throw UsageError("%s not supported by store '%s'", T::operationName, store.getUri()); + throw UsageError("%s not supported by store '%s'", T::operationName, store.config.getUri()); return *castedStore; } diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index e4d0187c8..c77a29a8b 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -44,6 +44,8 @@ struct UDSRemoteStoreConfig : std::enable_shared_from_this } ref openStore() const override; + + std::string getUri() const override; }; struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore @@ -54,8 +56,6 @@ struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore UDSRemoteStore(ref); - std::string getUri() override; - ref getFSAccessor(bool requireValidPath = true) override { return LocalFSStore::getFSAccessor(requireValidPath); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 43eaac68b..9592994a1 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -88,11 +88,9 @@ ref LegacySSHStore::openConnection() return conn; }; -std::string LegacySSHStore::getUri() +std::string LegacySSHStoreConfig::getUri() const { - return ParsedURL{ - .scheme = *Config::uriSchemes().begin(), .authority = config->authority, .query = config->getQueryParams()} - .to_string(); + return ParsedURL{.scheme = *uriSchemes().begin(), .authority = authority, .query = getQueryParams()}.to_string(); } std::map LegacySSHStore::queryPathInfosUncached(const StorePathSet & paths) diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index f7511fdce..645a01b09 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -23,6 +23,11 @@ std::string LocalBinaryCacheStoreConfig::doc() ; } +std::string LocalBinaryCacheStoreConfig::getUri() const +{ + return "file://" + binaryCacheDir; +} + struct LocalBinaryCacheStore : virtual BinaryCacheStore { using Config = LocalBinaryCacheStoreConfig; @@ -38,11 +43,6 @@ struct LocalBinaryCacheStore : virtual BinaryCacheStore void init() override; - std::string getUri() override - { - return "file://" + config->binaryCacheDir; - } - protected: bool fileExists(const std::string & path) override; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 48cb8d718..dfffdea6a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -439,11 +439,11 @@ LocalStore::~LocalStore() } } -std::string LocalStore::getUri() +std::string LocalStoreConfig::getUri() const { std::ostringstream oss; - oss << *config->uriSchemes().begin(); - auto queryParams = config->getQueryParams(); + oss << *uriSchemes().begin(); + auto queryParams = getQueryParams(); if (!queryParams.empty()) oss << "?"; oss << encodeQuery(queryParams); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 2b072980b..3eff339e1 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -53,7 +53,7 @@ RemoteStore::RemoteStore(const Config & config) ref RemoteStore::openConnectionWrapper() { if (failed) - throw Error("opening a connection to remote store '%s' previously failed", getUri()); + throw Error("opening a connection to remote store '%s' previously failed", config.getUri()); try { return openConnection(); } catch (...) { @@ -95,7 +95,7 @@ void RemoteStore::initConnection(Connection & conn) if (ex) std::rethrow_exception(ex); } catch (Error & e) { - throw Error("cannot open connection to remote store '%s': %s", getUri(), e.what()); + throw Error("cannot open connection to remote store '%s': %s", config.getUri(), e.what()); } setOptions(conn); diff --git a/src/libstore/restricted-store.cc b/src/libstore/restricted-store.cc index f191950b5..1fb139dff 100644 --- a/src/libstore/restricted-store.cc +++ b/src/libstore/restricted-store.cc @@ -57,11 +57,6 @@ struct RestrictedStore : public virtual IndirectRootStore, public virtual GcStor return next->config->realStoreDir; } - std::string getUri() override - { - return next->getUri(); - } - StorePathSet queryAllValidPaths() override; void queryPathInfoUncached( diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 0df7e482a..5f91a8129 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -254,6 +254,11 @@ std::string S3BinaryCacheStoreConfig::doc() ; } +std::string S3BinaryCacheStoreConfig::getUri() const +{ + return "s3://" + bucketName; +} + struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore { Stats stats; @@ -269,19 +274,14 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore diskCache = getNarInfoDiskCache(); } - std::string getUri() override - { - return "s3://" + config->bucketName; - } - void init() override { - if (auto cacheInfo = diskCache->upToDateCacheExists(getUri())) { + if (auto cacheInfo = diskCache->upToDateCacheExists(config->getUri())) { config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); config->priority.setDefault(cacheInfo->priority); } else { BinaryCacheStore::init(); - diskCache->createCache(getUri(), config->storeDir, config->wantMassQuery, config->priority); + diskCache->createCache(config->getUri(), config->storeDir, config->wantMassQuery, config->priority); } } @@ -519,7 +519,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore sink(*res.data); } else - throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri()); + throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, config->getUri()); } StorePathSet queryAllValidPaths() override diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index a1b27cfb7..d3420186f 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -25,6 +25,11 @@ std::string SSHStoreConfig::doc() ; } +std::string SSHStoreConfig::getUri() const +{ + return ParsedURL{.scheme = *uriSchemes().begin(), .authority = authority, .query = getQueryParams()}.to_string(); +} + struct SSHStore : virtual RemoteStore { using Config = SSHStoreConfig; @@ -41,13 +46,6 @@ struct SSHStore : virtual RemoteStore { } - std::string getUri() override - { - return ParsedURL{ - .scheme = *Config::uriSchemes().begin(), .authority = config->authority, .query = config->getQueryParams()} - .to_string(); - } - // FIXME extend daemon protocol, move implementation to RemoteStore std::optional getBuildLogExact(const StorePath & path) override { diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b678833e6..a720084a0 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -300,7 +300,7 @@ Store::Store(const Store::Config & config) assertLibStoreInitialized(); } -std::string Store::getUri() +std::string StoreConfig::getUri() const { return ""; } @@ -395,11 +395,11 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta "replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), - sub->getUri()); + sub->config.getUri()); } else if (sub->storeDir != storeDir) continue; - debug("checking substituter '%s' for path '%s'", sub->getUri(), sub->printStorePath(subPath)); + debug("checking substituter '%s' for path '%s'", sub->config.getUri(), sub->printStorePath(subPath)); try { auto info = sub->queryPathInfo(subPath); @@ -439,7 +439,7 @@ bool Store::isValidPath(const StorePath & storePath) } if (diskCache) { - auto res = diskCache->lookupNarInfo(getUri(), std::string(storePath.hashPart())); + auto res = diskCache->lookupNarInfo(config.getUri(), std::string(storePath.hashPart())); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; auto state_(state.lock()); @@ -455,7 +455,7 @@ bool Store::isValidPath(const StorePath & storePath) if (diskCache && !valid) // FIXME: handle valid = true case. - diskCache->upsertNarInfo(getUri(), std::string(storePath.hashPart()), 0); + diskCache->upsertNarInfo(config.getUri(), std::string(storePath.hashPart()), 0); return valid; } @@ -509,7 +509,7 @@ std::optional> Store::queryPathInfoFromClie } if (diskCache) { - auto res = diskCache->lookupNarInfo(getUri(), hashPart); + auto res = diskCache->lookupNarInfo(config.getUri(), hashPart); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; { @@ -554,7 +554,7 @@ void Store::queryPathInfo(const StorePath & storePath, CallbackupsertNarInfo(getUri(), hashPart, info); + diskCache->upsertNarInfo(config.getUri(), hashPart, info); { auto state_(state.lock()); @@ -578,7 +578,7 @@ void Store::queryRealisation(const DrvOutput & id, CallbacklookupRealisation(getUri(), id); + auto [cacheOutcome, maybeCachedRealisation] = diskCache->lookupRealisation(config.getUri(), id); switch (cacheOutcome) { case NarInfoDiskCache::oValid: debug("Returning a cached realisation for %s", id.to_string()); @@ -604,9 +604,9 @@ void Store::queryRealisation(const DrvOutput & id, CallbackupsertRealisation(getUri(), *info); + diskCache->upsertRealisation(config.getUri(), *info); else - diskCache->upsertAbsentRealisation(getUri(), id); + diskCache->upsertAbsentRealisation(config.getUri(), id); } (*callbackPtr)(std::shared_ptr(info)); @@ -801,8 +801,8 @@ void copyStorePath( if (!repair && dstStore.isValidPath(storePath)) return; - auto srcUri = srcStore.getUri(); - auto dstUri = dstStore.getUri(); + auto srcUri = srcStore.config.getUri(); + auto dstUri = dstStore.config.getUri(); auto storePathS = srcStore.printStorePath(storePath); Activity act( *logger, lvlInfo, actCopyPath, makeCopyPathMessage(srcUri, dstUri, storePathS), {storePathS, srcUri, dstUri}); @@ -839,7 +839,9 @@ void copyStorePath( }, [&]() { throw EndOfFile( - "NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), srcStore.getUri()); + "NAR for '%s' fetched from '%s' is incomplete", + srcStore.printStorePath(storePath), + srcStore.config.getUri()); }); dstStore.addToStore(*info, *source, repair, checkSigs); @@ -937,7 +939,7 @@ std::map copyPaths( "replaced path '%s' to '%s' for substituter '%s'", srcStore.printStorePath(storePathForSrc), dstStore.printStorePath(storePathForDst), - dstStore.getUri()); + dstStore.config.getUri()); } return storePathForDst; }; @@ -955,8 +957,8 @@ std::map copyPaths( // We can reasonably assume that the copy will happen whenever we // read the path, so log something about that at that point uint64_t total = 0; - auto srcUri = srcStore.getUri(); - auto dstUri = dstStore.getUri(); + auto srcUri = srcStore.config.getUri(); + auto dstUri = dstStore.config.getUri(); auto storePathS = srcStore.printStorePath(missingPath); Activity act( *logger, diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index f8b3d834d..e1881c602 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -54,15 +54,14 @@ UDSRemoteStore::UDSRemoteStore(ref config) { } -std::string UDSRemoteStore::getUri() +std::string UDSRemoteStoreConfig::getUri() const { - return config->path == settings.nixDaemonSocketFile - ? // FIXME: Not clear why we return daemon here and not default - // to settings.nixDaemonSocketFile - // - // unix:// with no path also works. Change what we return? + return path == settings.nixDaemonSocketFile ? // FIXME: Not clear why we return daemon here and not default + // to settings.nixDaemonSocketFile + // + // unix:// with no path also works. Change what we return? "daemon" - : std::string(*Config::uriSchemes().begin()) + "://" + config->path; + : std::string(*uriSchemes().begin()) + "://" + path; } void UDSRemoteStore::Connection::closeWrite() diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 7fcb7be7e..685795487 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -71,7 +71,7 @@ struct CmdConfigCheck : StoreCommand void run(ref store) override { - logger->log("Running checks against store uri: " + store->getUri()); + logger->log("Running checks against store uri: " + store->config.getUri()); if (store.dynamic_pointer_cast()) { success &= checkNixInPath(); @@ -171,9 +171,9 @@ struct CmdConfigCheck : StoreCommand { if (auto trustedMay = store->isTrustedClient()) { std::string_view trusted = trustedMay.value() ? "trusted" : "not trusted"; - checkInfo(fmt("You are %s by store uri: %s", trusted, store->getUri())); + checkInfo(fmt("You are %s by store uri: %s", trusted, store->config.getUri())); } else { - checkInfo(fmt("Store uri: %s doesn't have a notion of trusted user", store->getUri())); + checkInfo(fmt("Store uri: %s doesn't have a notion of trusted user", store->config.getUri())); } } }; diff --git a/src/nix/log.cc b/src/nix/log.cc index 56e44645b..2b697c609 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -48,7 +48,7 @@ struct CmdLog : InstallableCommand for (auto & sub : subs) { auto * logSubP = dynamic_cast(&*sub); if (!logSubP) { - printInfo("Skipped '%s' which does not support retrieving build logs", sub->getUri()); + printInfo("Skipped '%s' which does not support retrieving build logs", sub->config.getUri()); continue; } auto & logSub = *logSubP; @@ -57,7 +57,7 @@ struct CmdLog : InstallableCommand if (!log) continue; logger->stop(); - printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri()); + printInfo("got build log for '%s' from '%s'", installable->what(), logSub.config.getUri()); writeFull(getStandardOutput(), *log); return; } diff --git a/src/nix/run.cc b/src/nix/run.cc index bde2cacd8..cd7784cee 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -77,7 +77,7 @@ void execProgramInStore( auto store2 = store.dynamic_pointer_cast(); if (!store2) - throw Error("store '%s' is not a local store so it does not support command execution", store->getUri()); + throw Error("store '%s' is not a local store so it does not support command execution", store->config.getUri()); if (store->storeDir != store2->getRealStoreDir()) { Strings helperArgs = { diff --git a/src/nix/store-info.cc b/src/nix/store-info.cc index 2132dc465..92fcef663 100644 --- a/src/nix/store-info.cc +++ b/src/nix/store-info.cc @@ -24,7 +24,7 @@ struct CmdInfoStore : StoreCommand, MixJSON void run(ref store) override { if (!json) { - notice("Store URL: %s", store->getUri()); + notice("Store URL: %s", store->config.getUri()); store->connect(); if (auto version = store->getVersion()) notice("Version: %s", *version); @@ -34,7 +34,7 @@ struct CmdInfoStore : StoreCommand, MixJSON nlohmann::json res; Finally printRes([&]() { printJSON(res); }); - res["url"] = store->getUri(); + res["url"] = store->config.getUri(); store->connect(); if (auto version = store->getVersion()) res["version"] = *version; From e8d780642d04c6f7ac23b7f55ce3ec580cc4ef70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 08:21:23 +0000 Subject: [PATCH 153/382] build(deps): bump actions/checkout from 4 to 5 Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 06ee05580..a9a9910d2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: eval: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: ./.github/actions/install-nix-action @@ -55,7 +55,7 @@ jobs: runs-on: ${{ matrix.runs-on }} timeout-minutes: 60 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: ./.github/actions/install-nix-action @@ -115,7 +115,7 @@ jobs: name: installer test ${{ matrix.scenario }} runs-on: ${{ matrix.runs-on }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Download installer tarball uses: actions/download-artifact@v4 with: @@ -175,7 +175,7 @@ jobs: _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }} run: | echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}" - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: cachix/install-nix-action@v31 @@ -221,7 +221,7 @@ jobs: vm_tests: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: ./.github/actions/install-nix-action with: dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} @@ -242,14 +242,14 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Checkout nix - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Checkout flake-regressions - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: repository: NixOS/flake-regressions path: flake-regressions - name: Checkout flake-regressions-data - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: repository: NixOS/flake-regressions-data path: flake-regressions/tests @@ -270,7 +270,7 @@ jobs: github.event_name == 'push' && github.ref_name == 'master' steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: ./.github/actions/install-nix-action From 4fb89eb2eae44a80a40f2fc644b21136e550aaf5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 09:05:31 +0000 Subject: [PATCH 154/382] build(deps): bump actions/download-artifact from 4 to 5 Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4 to 5. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a9a9910d2..70712610c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -117,7 +117,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Download installer tarball - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: installer-${{matrix.os}} path: out From 7ed0229d1abd4414144c7af396842462ce6fc1eb Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 14:22:30 +0300 Subject: [PATCH 155/382] tests/functional/lang: Add more tests for TOML timestamps Current test suite doesn't cover the subsecond formatting at all and toml11 is quite finicky with that. We should at the very least test its behavior to avoid silent breakages on updates. --- .../lang/eval-okay-fromTOML-timestamps.exp | 2 +- .../lang/eval-okay-fromTOML-timestamps.nix | 46 ++++++++++++++++++- 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp index 08b3c69a6..56e610533 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp @@ -1 +1 @@ -{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt2 = { _type = "timestamp"; value = "00:32:00.999999"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } +{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt10 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt11 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100"; }; ldt3 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120"; }; ldt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123"; }; ldt5 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123400"; }; ldt6 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123450"; }; ldt7 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456"; }; ldt8 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456700"; }; ldt9 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456780"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt10 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt11 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt2 = { _type = "timestamp"; value = "00:32:00.100"; }; lt3 = { _type = "timestamp"; value = "00:32:00.120"; }; lt4 = { _type = "timestamp"; value = "00:32:00.123"; }; lt5 = { _type = "timestamp"; value = "00:32:00.123400"; }; lt6 = { _type = "timestamp"; value = "00:32:00.123450"; }; lt7 = { _type = "timestamp"; value = "00:32:00.123456"; }; lt8 = { _type = "timestamp"; value = "00:32:00.123456700"; }; lt9 = { _type = "timestamp"; value = "00:32:00.123456780"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt10 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456Z"; }; odt11 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456700Z"; }; odt12 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456780Z"; }; odt13 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt14 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt5 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100Z"; }; odt6 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120Z"; }; odt7 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123Z"; }; odt8 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123400Z"; }; odt9 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123450Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix index 74cff9470..d8f3a03e9 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix @@ -55,11 +55,53 @@ builtins.fromTOML '' odt2 = 1979-05-27T00:32:00-07:00 odt3 = 1979-05-27T00:32:00.999999-07:00 odt4 = 1979-05-27 07:32:00Z + # milliseconds + odt5 = 1979-05-27 07:32:00.1Z + odt6 = 1979-05-27 07:32:00.12Z + odt7 = 1979-05-27 07:32:00.123Z + # microseconds + odt8 = 1979-05-27t07:32:00.1234Z + odt9 = 1979-05-27t07:32:00.12345Z + odt10 = 1979-05-27t07:32:00.123456Z + # nanoseconds + odt11 = 1979-05-27 07:32:00.1234567Z + odt12 = 1979-05-27 07:32:00.12345678Z + odt13 = 1979-05-27 07:32:00.123456789Z + # no more precision after nanoseconds + odt14 = 1979-05-27t07:32:00.1234567891Z + ldt1 = 1979-05-27T07:32:00 - ldt2 = 1979-05-27T00:32:00.999999 + # milliseconds + ldt2 = 1979-05-27T07:32:00.1 + ldt3 = 1979-05-27T07:32:00.12 + ldt4 = 1979-05-27T07:32:00.123 + # microseconds + ldt5 = 1979-05-27t00:32:00.1234 + ldt6 = 1979-05-27t00:32:00.12345 + ldt7 = 1979-05-27t00:32:00.123456 + # nanoseconds + ldt8 = 1979-05-27 00:32:00.1234567 + ldt9 = 1979-05-27 00:32:00.12345678 + ldt10 = 1979-05-27 00:32:00.123456789 + # no more precision after nanoseconds + ldt11 = 1979-05-27t00:32:00.1234567891 + ld1 = 1979-05-27 lt1 = 07:32:00 - lt2 = 00:32:00.999999 + # milliseconds + lt2 = 00:32:00.1 + lt3 = 00:32:00.12 + lt4 = 00:32:00.123 + # microseconds + lt5 = 00:32:00.1234 + lt6 = 00:32:00.12345 + lt7 = 00:32:00.123456 + # nanoseconds + lt8 = 00:32:00.1234567 + lt9 = 00:32:00.12345678 + lt10 = 00:32:00.123456789 + # no more precision after nanoseconds + lt11 = 00:32:00.1234567891 arr1 = [ 1, 2, 3 ] arr2 = [ "red", "yellow", "green" ] From df4e55ffc13c413e270af134227115a20a2341ba Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 14:31:34 +0300 Subject: [PATCH 156/382] libexpr: Remove extra trailing semicolons (NFC) This looks really weird after the reformat. --- src/libexpr/primops/fromTOML.cc | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 533739592..18a988d67 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -35,7 +35,6 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va v.mkAttrs(attrs); } break; - ; case toml::value_t::array: { auto array = toml::get>(t); @@ -44,25 +43,20 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va visit(*(v = state.allocValue()), array[n]); v.mkList(list); } break; - ; case toml::value_t::boolean: v.mkBool(toml::get(t)); break; - ; case toml::value_t::integer: v.mkInt(toml::get(t)); break; - ; case toml::value_t::floating: v.mkFloat(toml::get(t)); break; - ; case toml::value_t::string: { auto s = toml::get(t); forceNoNullByte(s); v.mkString(s); } break; - ; case toml::value_t::local_datetime: case toml::value_t::offset_datetime: case toml::value_t::local_date: @@ -80,11 +74,9 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va throw std::runtime_error("Dates and times are not supported"); } } break; - ; case toml::value_t::empty: v.mkNull(); break; - ; } }; From a80a5c4dba0d944fab8f5ed57a343869ae96bf16 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 14:54:53 +0300 Subject: [PATCH 157/382] libexpr: Use recursive lambda instead of std::function There's no reason to use a std::function for recursive lambdas since there are polymorphic lambdas. --- src/libexpr/primops/fromTOML.cc | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 18a988d67..9ade6705e 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -13,9 +13,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va std::istringstream tomlStream(std::string{toml}); - std::function visit; - - visit = [&](Value & v, toml::value t) { + auto visit = [&](auto & self, Value & v, toml::value t) -> void { switch (t.type()) { case toml::value_t::table: { auto table = toml::get(t); @@ -30,7 +28,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va for (auto & elem : table) { forceNoNullByte(elem.first); - visit(attrs.alloc(elem.first), elem.second); + self(self, attrs.alloc(elem.first), elem.second); } v.mkAttrs(attrs); @@ -40,7 +38,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va auto list = state.buildList(array.size()); for (const auto & [n, v] : enumerate(list)) - visit(*(v = state.allocValue()), array[n]); + self(self, *(v = state.allocValue()), array[n]); v.mkList(list); } break; case toml::value_t::boolean: @@ -81,7 +79,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va }; try { - visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); + visit(visit, val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); } catch (std::exception & e) { // TODO: toml::syntax_error state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } From d8fc55a46e0c09241131097dbf1d6fa09e0a9808 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 14:58:01 +0300 Subject: [PATCH 158/382] libexpr: Use table.size() instead of unnecessary loop --- src/libexpr/primops/fromTOML.cc | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 9ade6705e..c6b50ce8e 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -17,14 +17,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va switch (t.type()) { case toml::value_t::table: { auto table = toml::get(t); - - size_t size = 0; - for (auto & i : table) { - (void) i; - size++; - } - - auto attrs = state.buildBindings(size); + auto attrs = state.buildBindings(table.size()); for (auto & elem : table) { forceNoNullByte(elem.first); From dc769d72cb8ad22a0f89768682b5499a9d2b3d8b Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 16:11:54 +0300 Subject: [PATCH 159/382] libexpr: Canonicalize TOML timestamps for toml11 > 4.0 This addresses several changes from toml11 4.0 bump in nixpkgs [1]. 1. Added more regression tests for timestamp formats. Special attention needs to be paid to the precision of the subsecond range for local-time. Prior versions select the closest (upwards) multiple of 3 with a hard cap of 9 digits. 2. Normalize local datetime and offset datetime to always use the uppercase separator `T`. This is actually the issue surfaced in [2]. This canonicalization is basically a requirement by (a certain reading) of rfc3339 section 5.6 [3]. 3. If using toml11 >= 4.0 also keep the old behavior wrt to the number of digits used for subsecond part of the local-time. Newer versions cap it at 6 digits unconditionally. [1]: https://www.github.com/NixOS/nixpkgs/pull/331649 [2]: https://www.github.com/NixOS/nix/issues/11441 [3]: https://datatracker.ietf.org/doc/html/rfc3339 --- src/libexpr/meson.build | 6 +++ src/libexpr/primops/fromTOML.cc | 94 ++++++++++++++++++++++++++++++++- 2 files changed, 99 insertions(+), 1 deletion(-) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index e1a12106d..82a116708 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -71,6 +71,12 @@ toml11 = dependency( method : 'cmake', include_type : 'system', ) + +configdata_priv.set( + 'HAVE_TOML11_4', + toml11.version().version_compare('>= 4.0.0').to_int(), +) + deps_other += toml11 config_priv_h = configure_file( diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index c6b50ce8e..7d98a5de9 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -1,12 +1,91 @@ #include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" +#include "expr-config-private.hh" + #include #include namespace nix { +#if HAVE_TOML11_4 + +/** + * This is what toml11 < 4.0 did when choosing the subsecond precision. + * TOML 1.0.0 spec doesn't define how sub-millisecond ranges should be handled and calls it + * implementation defined behavior. For a lack of a better choice we stick with what older versions + * of toml11 did [1]. + * + * [1]: https://github.com/ToruNiina/toml11/blob/dcfe39a783a94e8d52c885e5883a6fbb21529019/toml/datetime.hpp#L282 + */ +static size_t normalizeSubsecondPrecision(toml::local_time lt) +{ + auto millis = lt.millisecond; + auto micros = lt.microsecond; + auto nanos = lt.nanosecond; + if (millis != 0 || micros != 0 || nanos != 0) { + if (micros != 0 || nanos != 0) { + if (nanos != 0) + return 9; + return 6; + } + return 3; + } + return 0; +} + +/** + * Normalize date/time formats to serialize to the same strings as versions prior to toml11 4.0. + * + * Several things to consider: + * + * 1. Sub-millisecond range is represented the same way as in toml11 versions prior to 4.0. Precisioun is rounded + * towards the next multiple of 3 or capped at 9 digits. + * 2. Seconds must be specified. This may become optional in (yet unreleased) TOML 1.1.0, but 1.0.0 defined local time + * in terms of RFC3339 [1]. + * 3. date-time separator (`t`, `T` or space ` `) is canonicalized to an upper T. This is compliant with RFC3339 + * [1] 5.6: + * > Applications that generate this format SHOULD use upper case letters. + * + * [1]: https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + */ +static void normalizeDatetimeFormat(toml::value & t) +{ + if (t.is_local_datetime()) { + auto & ldt = t.as_local_datetime(); + t.as_local_datetime_fmt() = { + .delimiter = toml::datetime_delimiter_kind::upper_T, + // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(ldt.time), + }; + return; + } + + if (t.is_offset_datetime()) { + auto & odt = t.as_offset_datetime(); + t.as_offset_datetime_fmt() = { + .delimiter = toml::datetime_delimiter_kind::upper_T, + // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(odt.time), + }; + return; + } + + if (t.is_local_time()) { + auto & lt = t.as_local_time(); + t.as_local_time_fmt() = { + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(lt), + }; + return; + } +} + +#endif + static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Value & val) { auto toml = state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.fromTOML"); @@ -53,6 +132,9 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va case toml::value_t::local_date: case toml::value_t::local_time: { if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { +#if HAVE_TOML11_4 + normalizeDatetimeFormat(t); +#endif auto attrs = state.buildBindings(2); attrs.alloc("_type").mkString("timestamp"); std::ostringstream s; @@ -72,7 +154,17 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va }; try { - visit(visit, val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); + visit( + visit, + val, + toml::parse( + tomlStream, + "fromTOML" /* the "filename" */ +#if HAVE_TOML11_4 + , + toml::spec::v(1, 0, 0) // Be explicit that we are parsing TOML 1.0.0 without extensions +#endif + )); } catch (std::exception & e) { // TODO: toml::syntax_error state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } From 320b2c74ef5c67af1f96c7cbcb8028921aee78c7 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 16:23:00 +0300 Subject: [PATCH 160/382] packaging: Build with toml11 4.4.0 --- packaging/dependencies.nix | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 17ba06b4d..23d020f37 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -64,6 +64,16 @@ scope: { NIX_CFLAGS_COMPILE = "-DINITIAL_MARK_STACK_SIZE=1048576"; }); + toml11 = pkgs.toml11.overrideAttrs rec { + version = "4.4.0"; + src = pkgs.fetchFromGitHub { + owner = "ToruNiina"; + repo = "toml11"; + tag = "v${version}"; + hash = "sha256-sgWKYxNT22nw376ttGsTdg0AMzOwp8QH3E8mx0BZJTQ="; + }; + }; + # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. boost = (pkgs.boost.override { From b5289fa1303cfed91aec5112251d27aeb4f3ef21 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 12 Aug 2025 22:47:05 +0300 Subject: [PATCH 161/382] libutil: Fix i686-linux build on clangStdenv Clang refused to do a narrowing conversion in an initializer list: ``` local-keys.cc:56:90: note: insert an explicit cast to silence this issue return name + ":" + base64::encode(std::as_bytes(std::span{sig, sigLen})); ^~~~~~ static_cast( ) ``` --- src/libutil/signature/local-keys.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 1541aed2f..7dcd92c72 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -53,7 +53,7 @@ std::string SecretKey::signDetached(std::string_view data) const unsigned char sig[crypto_sign_BYTES]; unsigned long long sigLen; crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(), (unsigned char *) key.data()); - return name + ":" + base64::encode(std::as_bytes(std::span{sig, sigLen})); + return name + ":" + base64::encode(std::as_bytes(std::span(sig, sigLen))); } PublicKey SecretKey::toPublicKey() const From ccf658ed5c1093e0bb3a3249688206cca7f1b2e1 Mon Sep 17 00:00:00 2001 From: Leandro Reina Date: Wed, 13 Aug 2025 16:40:55 +0200 Subject: [PATCH 162/382] Fix Git LFS SSH issues * Adds support for NIX_SSHOPTS * Properly uses the parsed port from URL (fixes #13337) * Don't guess the HTTP endpoint, use the response of git-lfs-authenticate * Add an SSH Git LFS test * Removed some unused test code --- doc/manual/rl-next/git-lfs-ssh.md | 11 +++ src/libfetchers/git-lfs-fetch.cc | 93 ++++++++++++------- src/libstore/include/nix/store/ssh.hh | 2 + src/libstore/ssh.cc | 24 +++-- .../fetch-git/test-cases/lfs/default.nix | 20 ++++ .../fetch-git/testsupport/gitea-repo.nix | 12 +-- tests/nixos/fetch-git/testsupport/gitea.nix | 51 ++++------ 7 files changed, 128 insertions(+), 85 deletions(-) create mode 100644 doc/manual/rl-next/git-lfs-ssh.md diff --git a/doc/manual/rl-next/git-lfs-ssh.md b/doc/manual/rl-next/git-lfs-ssh.md new file mode 100644 index 000000000..c49addf13 --- /dev/null +++ b/doc/manual/rl-next/git-lfs-ssh.md @@ -0,0 +1,11 @@ +--- +synopsis: "Fix Git LFS SSH issues" +prs: [13743] +issues: [13337] +--- + +Fixed some outstanding issues with Git LFS and SSH. + +* Added support for `NIX_SSHOPTS`. +* Properly use the parsed port from URL. +* Better use of the response of `git-lfs-authenticate` to determine API endpoint when the API is not exposed on port 443. diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index 1337c5b83..35230ae88 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -5,6 +5,7 @@ #include "nix/util/url.hh" #include "nix/util/users.hh" #include "nix/util/hash.hh" +#include "nix/store/ssh.hh" #include #include @@ -15,10 +16,9 @@ namespace nix::lfs { -// if authHeader is "", downloadToSink assumes no auth is expected static void downloadToSink( const std::string & url, - const std::string & authHeader, + const std::optional & authHeader, // FIXME: passing a StringSink is superfluous, we may as well // return a string. Or use an abstract Sink for streaming. StringSink & sink, @@ -27,8 +27,8 @@ static void downloadToSink( { FileTransferRequest request(url); Headers headers; - if (!authHeader.empty()) - headers.push_back({"Authorization", authHeader}); + if (authHeader.has_value()) + headers.push_back({"Authorization", *authHeader}); request.headers = headers; getFileTransfer()->download(std::move(request), sink); @@ -42,30 +42,53 @@ static void downloadToSink( "hash mismatch while fetching %s: expected sha256:%s but got sha256:%s", url, sha256Expected, sha256Actual); } -static std::string getLfsApiToken(const ParsedURL & url) +namespace { + +struct LfsApiInfo +{ + std::string endpoint; + std::optional authHeader; +}; + +} // namespace + +static LfsApiInfo getLfsApi(const ParsedURL & url) { assert(url.authority.has_value()); + if (url.scheme == "ssh") { + auto args = getNixSshOpts(); - // FIXME: Not entirely correct. - auto [status, output] = runProgram( - RunOptions{ - .program = "ssh", - .args = {url.authority->to_string(), "git-lfs-authenticate", url.path, "download"}, - }); + if (url.authority->port) + args.push_back(fmt("-p%d", *url.authority->port)); - if (output.empty()) - throw Error( - "git-lfs-authenticate: no output (cmd: ssh %s git-lfs-authenticate %s download)", - url.authority.value_or(ParsedURL::Authority{}).to_string(), - url.path); + std::ostringstream hostnameAndUser; + if (url.authority->user) + hostnameAndUser << *url.authority->user << "@"; + hostnameAndUser << url.authority->host; + args.push_back(std::move(hostnameAndUser).str()); - auto queryResp = nlohmann::json::parse(output); - if (!queryResp.contains("header")) - throw Error("no header in git-lfs-authenticate response"); - if (!queryResp["header"].contains("Authorization")) - throw Error("no Authorization in git-lfs-authenticate response"); + args.push_back("--"); + args.push_back("git-lfs-authenticate"); + args.push_back(url.path); + args.push_back("download"); - return queryResp["header"]["Authorization"].get(); + auto [status, output] = runProgram({.program = "ssh", .args = args}); + + if (output.empty()) + throw Error("git-lfs-authenticate: no output (cmd: 'ssh %s')", concatStringsSep(" ", args)); + + auto queryResp = nlohmann::json::parse(output); + auto headerIt = queryResp.find("header"); + if (headerIt == queryResp.end()) + throw Error("no header in git-lfs-authenticate response"); + auto authIt = headerIt->find("Authorization"); + if (authIt == headerIt->end()) + throw Error("no Authorization in git-lfs-authenticate response"); + + return {queryResp.at("href").get(), authIt->get()}; + } + + return {url.to_string() + "/info/lfs", std::nullopt}; } typedef std::unique_ptr> GitConfig; @@ -181,13 +204,14 @@ static nlohmann::json pointerToPayload(const std::vector & items) std::vector Fetch::fetchUrls(const std::vector & pointers) const { - ParsedURL httpUrl(url); - httpUrl.scheme = url.scheme == "ssh" ? "https" : url.scheme; - FileTransferRequest request(httpUrl.to_string() + "/info/lfs/objects/batch"); + auto api = lfs::getLfsApi(this->url); + auto url = api.endpoint + "/objects/batch"; + const auto & authHeader = api.authHeader; + FileTransferRequest request(url); request.post = true; Headers headers; - if (this->url.scheme == "ssh") - headers.push_back({"Authorization", lfs::getLfsApiToken(this->url)}); + if (authHeader.has_value()) + headers.push_back({"Authorization", *authHeader}); headers.push_back({"Content-Type", "application/vnd.git-lfs+json"}); headers.push_back({"Accept", "application/vnd.git-lfs+json"}); request.headers = headers; @@ -260,11 +284,16 @@ void Fetch::fetch( try { std::string sha256 = obj.at("oid"); // oid is also the sha256 std::string ourl = obj.at("actions").at("download").at("href"); - std::string authHeader = ""; - if (obj.at("actions").at("download").contains("header") - && obj.at("actions").at("download").at("header").contains("Authorization")) { - authHeader = obj["actions"]["download"]["header"]["Authorization"]; - } + auto authHeader = [&]() -> std::optional { + const auto & download = obj.at("actions").at("download"); + auto headerIt = download.find("header"); + if (headerIt == download.end()) + return std::nullopt; + auto authIt = headerIt->find("Authorization"); + if (authIt == headerIt->end()) + return std::nullopt; + return *authIt; + }(); const uint64_t size = obj.at("size"); sizeCallback(size); downloadToSink(ourl, authHeader, sink, sha256, size); diff --git a/src/libstore/include/nix/store/ssh.hh b/src/libstore/include/nix/store/ssh.hh index 6eb38acef..c7228464b 100644 --- a/src/libstore/include/nix/store/ssh.hh +++ b/src/libstore/include/nix/store/ssh.hh @@ -8,6 +8,8 @@ namespace nix { +Strings getNixSshOpts(); + class SSHMaster { private: diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 8ed72c643..8a4614a0d 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -51,6 +51,18 @@ static void checkValidAuthority(const ParsedURL::Authority & authority) } } +Strings getNixSshOpts() +{ + std::string sshOpts = getEnv("NIX_SSHOPTS").value_or(""); + + try { + return shellSplitString(sshOpts); + } catch (Error & e) { + e.addTrace({}, "while splitting NIX_SSHOPTS '%s'", sshOpts); + throw; + } +} + SSHMaster::SSHMaster( const ParsedURL::Authority & authority, std::string_view keyFile, @@ -82,16 +94,8 @@ void SSHMaster::addCommonSSHOpts(Strings & args) { auto state(state_.lock()); - std::string sshOpts = getEnv("NIX_SSHOPTS").value_or(""); - - try { - std::list opts = shellSplitString(sshOpts); - for (auto & i : opts) - args.push_back(i); - } catch (Error & e) { - e.addTrace({}, "while splitting NIX_SSHOPTS '%s'", sshOpts); - throw; - } + auto sshArgs = getNixSshOpts(); + args.insert(args.end(), sshArgs.begin(), sshArgs.end()); if (!keyFile.empty()) args.insert(args.end(), {"-i", keyFile}); diff --git a/tests/nixos/fetch-git/test-cases/lfs/default.nix b/tests/nixos/fetch-git/test-cases/lfs/default.nix index 686796fcc..289c37709 100644 --- a/tests/nixos/fetch-git/test-cases/lfs/default.nix +++ b/tests/nixos/fetch-git/test-cases/lfs/default.nix @@ -224,5 +224,25 @@ """) client.succeed(f"cmp {repo.path}/beeg {fetched_self_lfs}/beeg >&2") + + + with subtest("Ensure fetching with SSH generates the same output"): + client.succeed(f"{repo.git} push origin-ssh main >&2") + client.succeed("rm -rf ~/.cache/nix") # Avoid using the cached output of the http fetch + + fetchGit_ssh_expr = f""" + builtins.fetchGit {{ + url = "{repo.remote_ssh}"; + rev = "{lfs_file_rev}"; + ref = "main"; + lfs = true; + }} + """ + fetched_ssh = client.succeed(f""" + nix eval --debug --impure --raw --expr '({fetchGit_ssh_expr}).outPath' + """) + + assert fetched_ssh == fetched_lfs, \ + f"fetching with ssh (store path {fetched_ssh}) yielded a different result than using http (store path {fetched_lfs})" ''; } diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index c8244207f..826ae52f9 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -49,19 +49,15 @@ in self.name = name self.path = "/tmp/repos/" + name self.remote = "http://gitea:3000/test/" + name - self.remote_ssh = "ssh://gitea/root/" + name + self.remote_ssh = "ssh://gitea:3001/test/" + name self.git = f"git -C {self.path}" self.private = private self.create() def create(self): - # create ssh remote repo + # create remote repo gitea.succeed(f""" - git init --bare -b main /root/{self.name} - """) - # create http remote repo - gitea.succeed(f""" - curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ + curl --fail -X POST http://{gitea_user}:{gitea_password}@gitea:3000/api/v1/user/repos \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main", "private": {boolToJSON(self.private)}}}' )} """) @@ -70,7 +66,7 @@ in mkdir -p {self.path} \ && git init -b main {self.path} \ && {self.git} remote add origin {self.remote} \ - && {self.git} remote add origin-ssh root@gitea:{self.name} + && {self.git} remote add origin-ssh {self.remote_ssh} """) ''; }; diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index e63182639..ad88d4ff0 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -35,28 +35,20 @@ in server = { DOMAIN = "gitea"; HTTP_PORT = 3000; + SSH_PORT = 3001; + START_SSH_SERVER = true; }; log.LEVEL = "Info"; database.LOG_SQL = false; }; - services.openssh.enable = true; - networking.firewall.allowedTCPPorts = [ 3000 ]; + networking.firewall.allowedTCPPorts = [ + 3000 + 3001 + ]; environment.systemPackages = [ pkgs.git pkgs.gitea ]; - - users.users.root.openssh.authorizedKeys.keys = [ clientPublicKey ]; - - # TODO: remove this after updating to nixos-23.11 - nixpkgs.pkgs = lib.mkForce ( - import nixpkgs { - inherit system; - config.permittedInsecurePackages = [ - "gitea-1.19.4" - ]; - } - ); }; client = { pkgs, ... }: @@ -67,38 +59,33 @@ in ]; }; }; - defaults = - { pkgs, ... }: - { - environment.systemPackages = [ pkgs.jq ]; - }; setupScript = '' import shlex gitea.wait_for_unit("gitea.service") - gitea_admin = "test" - gitea_admin_password = "test123test" + gitea_user = "test" + gitea_password = "test123test" gitea.succeed(f""" gitea --version >&2 su -l gitea -c 'GITEA_WORK_DIR=/var/lib/gitea gitea admin user create \ - --username {gitea_admin} --password {gitea_admin_password} --email test@client' + --username {gitea_user} --password {gitea_password} --email test@client' """) client.wait_for_unit("multi-user.target") gitea.wait_for_open_port(3000) + gitea.wait_for_open_port(3001) - gitea_admin_token = gitea.succeed(f""" - curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/users/test/tokens \ + gitea.succeed(f""" + curl --fail -X POST http://{gitea_user}:{gitea_password}@gitea:3000/api/v1/user/keys \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ - -d {shlex.quote( '{"name":"token", "scopes":["all"]}' )} \ - | jq -r '.sha1' - """).strip() + -d {shlex.quote( '{"title":"key", "key":"${clientPublicKey}", "read_only": false}' )} >&2 + """) client.succeed(f""" - echo "http://{gitea_admin}:{gitea_admin_password}@gitea:3000" >~/.git-credentials-admin + echo "http://{gitea_user}:{gitea_password}@gitea:3000" >~/.git-credentials-admin git config --global credential.helper 'store --file ~/.git-credentials-admin' git config --global user.email "test@client" git config --global user.name "Test User" @@ -118,13 +105,7 @@ in echo "Host gitea" >>~/.ssh/config echo " StrictHostKeyChecking no" >>~/.ssh/config echo " UserKnownHostsFile /dev/null" >>~/.ssh/config - echo " User root" >>~/.ssh/config + echo " User gitea" >>~/.ssh/config """) - - # ensure ssh from client to gitea works - client.succeed(""" - ssh root@gitea true - """) - ''; } From 75740fbd757567adfeb0917fe47995cc01df1879 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 13 Aug 2025 22:46:03 +0200 Subject: [PATCH 163/382] Revert "Merge pull request #13741 from xokdvium/toml-timestamps" This reverts commit 53ac49f72cdee54da3f3dc4a1853f11541bec9d8, reversing changes made to 8e5ca787f41be7ad58afb68c2ec2ff8d0e49fe70. This broke nixpkgs eval test that was depending overflowing integers... --- packaging/dependencies.nix | 10 -- src/libexpr/meson.build | 6 - src/libexpr/primops/fromTOML.cc | 119 ++++-------------- .../lang/eval-okay-fromTOML-timestamps.exp | 2 +- .../lang/eval-okay-fromTOML-timestamps.nix | 46 +------ 5 files changed, 25 insertions(+), 158 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 23d020f37..17ba06b4d 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -64,16 +64,6 @@ scope: { NIX_CFLAGS_COMPILE = "-DINITIAL_MARK_STACK_SIZE=1048576"; }); - toml11 = pkgs.toml11.overrideAttrs rec { - version = "4.4.0"; - src = pkgs.fetchFromGitHub { - owner = "ToruNiina"; - repo = "toml11"; - tag = "v${version}"; - hash = "sha256-sgWKYxNT22nw376ttGsTdg0AMzOwp8QH3E8mx0BZJTQ="; - }; - }; - # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. boost = (pkgs.boost.override { diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 82a116708..e1a12106d 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -71,12 +71,6 @@ toml11 = dependency( method : 'cmake', include_type : 'system', ) - -configdata_priv.set( - 'HAVE_TOML11_4', - toml11.version().version_compare('>= 4.0.0').to_int(), -) - deps_other += toml11 config_priv_h = configure_file( diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 7d98a5de9..533739592 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -1,140 +1,73 @@ #include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" -#include "expr-config-private.hh" - #include #include namespace nix { -#if HAVE_TOML11_4 - -/** - * This is what toml11 < 4.0 did when choosing the subsecond precision. - * TOML 1.0.0 spec doesn't define how sub-millisecond ranges should be handled and calls it - * implementation defined behavior. For a lack of a better choice we stick with what older versions - * of toml11 did [1]. - * - * [1]: https://github.com/ToruNiina/toml11/blob/dcfe39a783a94e8d52c885e5883a6fbb21529019/toml/datetime.hpp#L282 - */ -static size_t normalizeSubsecondPrecision(toml::local_time lt) -{ - auto millis = lt.millisecond; - auto micros = lt.microsecond; - auto nanos = lt.nanosecond; - if (millis != 0 || micros != 0 || nanos != 0) { - if (micros != 0 || nanos != 0) { - if (nanos != 0) - return 9; - return 6; - } - return 3; - } - return 0; -} - -/** - * Normalize date/time formats to serialize to the same strings as versions prior to toml11 4.0. - * - * Several things to consider: - * - * 1. Sub-millisecond range is represented the same way as in toml11 versions prior to 4.0. Precisioun is rounded - * towards the next multiple of 3 or capped at 9 digits. - * 2. Seconds must be specified. This may become optional in (yet unreleased) TOML 1.1.0, but 1.0.0 defined local time - * in terms of RFC3339 [1]. - * 3. date-time separator (`t`, `T` or space ` `) is canonicalized to an upper T. This is compliant with RFC3339 - * [1] 5.6: - * > Applications that generate this format SHOULD use upper case letters. - * - * [1]: https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 - */ -static void normalizeDatetimeFormat(toml::value & t) -{ - if (t.is_local_datetime()) { - auto & ldt = t.as_local_datetime(); - t.as_local_datetime_fmt() = { - .delimiter = toml::datetime_delimiter_kind::upper_T, - // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 - .has_seconds = true, // Mandated by TOML 1.0.0 - .subsecond_precision = normalizeSubsecondPrecision(ldt.time), - }; - return; - } - - if (t.is_offset_datetime()) { - auto & odt = t.as_offset_datetime(); - t.as_offset_datetime_fmt() = { - .delimiter = toml::datetime_delimiter_kind::upper_T, - // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 - .has_seconds = true, // Mandated by TOML 1.0.0 - .subsecond_precision = normalizeSubsecondPrecision(odt.time), - }; - return; - } - - if (t.is_local_time()) { - auto & lt = t.as_local_time(); - t.as_local_time_fmt() = { - .has_seconds = true, // Mandated by TOML 1.0.0 - .subsecond_precision = normalizeSubsecondPrecision(lt), - }; - return; - } -} - -#endif - static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Value & val) { auto toml = state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.fromTOML"); std::istringstream tomlStream(std::string{toml}); - auto visit = [&](auto & self, Value & v, toml::value t) -> void { + std::function visit; + + visit = [&](Value & v, toml::value t) { switch (t.type()) { case toml::value_t::table: { auto table = toml::get(t); - auto attrs = state.buildBindings(table.size()); + + size_t size = 0; + for (auto & i : table) { + (void) i; + size++; + } + + auto attrs = state.buildBindings(size); for (auto & elem : table) { forceNoNullByte(elem.first); - self(self, attrs.alloc(elem.first), elem.second); + visit(attrs.alloc(elem.first), elem.second); } v.mkAttrs(attrs); } break; + ; case toml::value_t::array: { auto array = toml::get>(t); auto list = state.buildList(array.size()); for (const auto & [n, v] : enumerate(list)) - self(self, *(v = state.allocValue()), array[n]); + visit(*(v = state.allocValue()), array[n]); v.mkList(list); } break; + ; case toml::value_t::boolean: v.mkBool(toml::get(t)); break; + ; case toml::value_t::integer: v.mkInt(toml::get(t)); break; + ; case toml::value_t::floating: v.mkFloat(toml::get(t)); break; + ; case toml::value_t::string: { auto s = toml::get(t); forceNoNullByte(s); v.mkString(s); } break; + ; case toml::value_t::local_datetime: case toml::value_t::offset_datetime: case toml::value_t::local_date: case toml::value_t::local_time: { if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { -#if HAVE_TOML11_4 - normalizeDatetimeFormat(t); -#endif auto attrs = state.buildBindings(2); attrs.alloc("_type").mkString("timestamp"); std::ostringstream s; @@ -147,24 +80,16 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va throw std::runtime_error("Dates and times are not supported"); } } break; + ; case toml::value_t::empty: v.mkNull(); break; + ; } }; try { - visit( - visit, - val, - toml::parse( - tomlStream, - "fromTOML" /* the "filename" */ -#if HAVE_TOML11_4 - , - toml::spec::v(1, 0, 0) // Be explicit that we are parsing TOML 1.0.0 without extensions -#endif - )); + visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); } catch (std::exception & e) { // TODO: toml::syntax_error state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp index 56e610533..08b3c69a6 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp @@ -1 +1 @@ -{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt10 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt11 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100"; }; ldt3 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120"; }; ldt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123"; }; ldt5 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123400"; }; ldt6 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123450"; }; ldt7 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456"; }; ldt8 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456700"; }; ldt9 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456780"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt10 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt11 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt2 = { _type = "timestamp"; value = "00:32:00.100"; }; lt3 = { _type = "timestamp"; value = "00:32:00.120"; }; lt4 = { _type = "timestamp"; value = "00:32:00.123"; }; lt5 = { _type = "timestamp"; value = "00:32:00.123400"; }; lt6 = { _type = "timestamp"; value = "00:32:00.123450"; }; lt7 = { _type = "timestamp"; value = "00:32:00.123456"; }; lt8 = { _type = "timestamp"; value = "00:32:00.123456700"; }; lt9 = { _type = "timestamp"; value = "00:32:00.123456780"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt10 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456Z"; }; odt11 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456700Z"; }; odt12 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456780Z"; }; odt13 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt14 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt5 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100Z"; }; odt6 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120Z"; }; odt7 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123Z"; }; odt8 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123400Z"; }; odt9 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123450Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } +{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt2 = { _type = "timestamp"; value = "00:32:00.999999"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix index d8f3a03e9..74cff9470 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix @@ -55,53 +55,11 @@ builtins.fromTOML '' odt2 = 1979-05-27T00:32:00-07:00 odt3 = 1979-05-27T00:32:00.999999-07:00 odt4 = 1979-05-27 07:32:00Z - # milliseconds - odt5 = 1979-05-27 07:32:00.1Z - odt6 = 1979-05-27 07:32:00.12Z - odt7 = 1979-05-27 07:32:00.123Z - # microseconds - odt8 = 1979-05-27t07:32:00.1234Z - odt9 = 1979-05-27t07:32:00.12345Z - odt10 = 1979-05-27t07:32:00.123456Z - # nanoseconds - odt11 = 1979-05-27 07:32:00.1234567Z - odt12 = 1979-05-27 07:32:00.12345678Z - odt13 = 1979-05-27 07:32:00.123456789Z - # no more precision after nanoseconds - odt14 = 1979-05-27t07:32:00.1234567891Z - ldt1 = 1979-05-27T07:32:00 - # milliseconds - ldt2 = 1979-05-27T07:32:00.1 - ldt3 = 1979-05-27T07:32:00.12 - ldt4 = 1979-05-27T07:32:00.123 - # microseconds - ldt5 = 1979-05-27t00:32:00.1234 - ldt6 = 1979-05-27t00:32:00.12345 - ldt7 = 1979-05-27t00:32:00.123456 - # nanoseconds - ldt8 = 1979-05-27 00:32:00.1234567 - ldt9 = 1979-05-27 00:32:00.12345678 - ldt10 = 1979-05-27 00:32:00.123456789 - # no more precision after nanoseconds - ldt11 = 1979-05-27t00:32:00.1234567891 - + ldt2 = 1979-05-27T00:32:00.999999 ld1 = 1979-05-27 lt1 = 07:32:00 - # milliseconds - lt2 = 00:32:00.1 - lt3 = 00:32:00.12 - lt4 = 00:32:00.123 - # microseconds - lt5 = 00:32:00.1234 - lt6 = 00:32:00.12345 - lt7 = 00:32:00.123456 - # nanoseconds - lt8 = 00:32:00.1234567 - lt9 = 00:32:00.12345678 - lt10 = 00:32:00.123456789 - # no more precision after nanoseconds - lt11 = 00:32:00.1234567891 + lt2 = 00:32:00.999999 arr1 = [ 1, 2, 3 ] arr2 = [ "red", "yellow", "green" ] From c37df9c87c5170b1736337fd43a80ce54550d09c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Jul 2025 17:10:05 -0400 Subject: [PATCH 164/382] Inline `DerivationGoal::query{,Partial}DerivationOutputMap` The functions are used just once. --- src/libstore/build/derivation-goal.cc | 58 ++++++++----------- .../nix/store/build/derivation-goal.hh | 8 --- 2 files changed, 25 insertions(+), 41 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index b2faa7a6e..7eb5bd09d 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -241,7 +241,17 @@ Goal::Co DerivationGoal::repairClosure() that produced those outputs. */ /* Get the output closure. */ - auto outputs = queryDerivationOutputMap(); + auto outputs = [&] { + for (auto * drvStore : {&worker.evalStore, &worker.store}) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryDerivationOutputMap(drvPath, drvStore); + + OutputPathMap res; + for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) + res.insert_or_assign(name, *output.second); + return res; + }(); + StorePathSet outputClosure; if (auto mPath = get(outputs, wantedOutput)) { worker.store.computeFSClosure(*mPath, outputClosure); @@ -304,37 +314,6 @@ Goal::Co DerivationGoal::repairClosure() co_return done(BuildResult::AlreadyValid, assertPathValidity()); } -std::map> DerivationGoal::queryPartialDerivationOutputMap() -{ - assert(!drv->type().isImpure()); - - for (auto * drvStore : {&worker.evalStore, &worker.store}) - if (drvStore->isValidPath(drvPath)) - return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); - - /* In-memory derivation will naturally fall back on this case, where - we do best-effort with static information. */ - std::map> res; - for (auto & [name, output] : drv->outputs) - res.insert_or_assign(name, output.path(worker.store, drv->name, name)); - return res; -} - -OutputPathMap DerivationGoal::queryDerivationOutputMap() -{ - assert(!drv->type().isImpure()); - - for (auto * drvStore : {&worker.evalStore, &worker.store}) - if (drvStore->isValidPath(drvPath)) - return worker.store.queryDerivationOutputMap(drvPath, drvStore); - - // See comment in `DerivationGoal::queryPartialDerivationOutputMap`. - OutputPathMap res; - for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) - res.insert_or_assign(name, *output.second); - return res; -} - std::pair DerivationGoal::checkPathValidity() { if (drv->type().isImpure()) @@ -344,7 +323,20 @@ std::pair DerivationGoal::checkPathValidity() StringSet wantedOutputsLeft{wantedOutput}; SingleDrvOutputs validOutputs; - for (auto & i : queryPartialDerivationOutputMap()) { + auto partialDerivationOutputMap = [&] { + for (auto * drvStore : {&worker.evalStore, &worker.store}) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); + + /* In-memory derivation will naturally fall back on this case, where + we do best-effort with static information. */ + std::map> res; + for (auto & [name, output] : drv->outputs) + res.insert_or_assign(name, output.path(worker.store, drv->name, name)); + return res; + }(); + + for (auto & i : partialDerivationOutputMap) { auto initialOutput = get(initialOutputs, i.first); if (!initialOutput) // this is an invalid output, gets caught with (!wantedOutputsLeft.empty()) diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index d78073a91..5d5c8d131 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -78,14 +78,6 @@ struct DerivationGoal : public Goal */ Co haveDerivation(); - /** - * Wrappers around the corresponding Store methods that first consult the - * derivation. This is currently needed because when there is no drv file - * there also is no DB entry. - */ - std::map> queryPartialDerivationOutputMap(); - OutputPathMap queryDerivationOutputMap(); - /** * Update 'initialOutputs' to determine the current status of the * outputs of the derivation. Also returns a Boolean denoting From ed5593700260edcd6882e5c713670252e920cbe2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Jul 2025 17:15:32 -0400 Subject: [PATCH 165/382] Make many members of `DerivationGoal` private --- .../nix/store/build/derivation-goal.hh | 42 ++++++++++--------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index 5d5c8d131..4b004e73b 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -43,21 +43,6 @@ struct DerivationGoal : public Goal */ OutputName wantedOutput; - /** - * The derivation stored at drvPath. - */ - std::unique_ptr drv; - - /** - * The remainder is state held during the build. - */ - - std::map initialOutputs; - - BuildMode buildMode; - - std::unique_ptr> mcExpectedBuilds; - DerivationGoal( const StorePath & drvPath, const Derivation & drv, @@ -73,6 +58,28 @@ struct DerivationGoal : public Goal std::string key() override; + JobCategory jobCategory() const override + { + return JobCategory::Administration; + }; + +private: + + /** + * The derivation stored at drvPath. + */ + std::unique_ptr drv; + + /** + * The remainder is state held during the build. + */ + + std::map initialOutputs; + + BuildMode buildMode; + + std::unique_ptr> mcExpectedBuilds; + /** * The states. */ @@ -95,11 +102,6 @@ struct DerivationGoal : public Goal Co repairClosure(); Done done(BuildResult::Status status, SingleDrvOutputs builtOutputs = {}, std::optional ex = {}); - - JobCategory jobCategory() const override - { - return JobCategory::Administration; - }; }; } // namespace nix From 3e7879e6dfb75d5c39058b8c2fd6619db8df9b95 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 12 Aug 2025 10:50:45 -0400 Subject: [PATCH 166/382] Rewrite `StoreConfig::getUri` in terms of new `StoreConfig::getReference` Rather than having store implementations return a free-form URI string, have them return a `StoreReference`. This reflects that fact that this method is supposed to invert `resolveStoreConfig`, which goes from a `StoreReference` to some `StoreConfig` concrete derived class (based on the registry). `StoreConfig::getUri` is kept only as a convenience for the common case that we want to immediately render the `StoreReference`. A few tests were changed to use `local://` not `local`, since `StoreReference` does not encode the `local` and `daemon` shorthands (and instead desugars them to `local://` and `unix://` right away). I think that is fine. `local` and `daemon` still work as input. --- src/libstore-tests/http-binary-cache-store.cc | 4 +- src/libstore-tests/nix_api_store.cc | 4 +- src/libstore/dummy-store.cc | 9 ++- src/libstore/http-binary-cache-store.cc | 44 ++++++++++++--- .../nix/store/http-binary-cache-store.hh | 8 +-- .../include/nix/store/legacy-ssh-store.hh | 2 +- .../nix/store/local-binary-cache-store.hh | 2 +- .../include/nix/store/local-overlay-store.hh | 5 +- src/libstore/include/nix/store/local-store.hh | 2 +- .../nix/store/s3-binary-cache-store.hh | 2 +- src/libstore/include/nix/store/ssh-store.hh | 2 +- src/libstore/include/nix/store/store-api.hh | 9 ++- .../include/nix/store/uds-remote-store.hh | 2 +- src/libstore/legacy-ssh-store.cc | 11 +++- src/libstore/local-binary-cache-store.cc | 10 +++- src/libstore/local-overlay-store.cc | 10 ++++ src/libstore/local-store.cc | 16 +++--- src/libstore/s3-binary-cache-store.cc | 10 +++- src/libstore/ssh-store.cc | 11 +++- src/libstore/store-api.cc | 4 +- src/libstore/uds-remote-store.cc | 19 ++++--- tests/functional/store-info.sh | 55 ++++++++++++++++++- 22 files changed, 181 insertions(+), 60 deletions(-) diff --git a/src/libstore-tests/http-binary-cache-store.cc b/src/libstore-tests/http-binary-cache-store.cc index f4a3408b5..0e3be4ced 100644 --- a/src/libstore-tests/http-binary-cache-store.cc +++ b/src/libstore-tests/http-binary-cache-store.cc @@ -8,14 +8,14 @@ TEST(HttpBinaryCacheStore, constructConfig) { HttpBinaryCacheStoreConfig config{"http", "foo.bar.baz", {}}; - EXPECT_EQ(config.cacheUri, "http://foo.bar.baz"); + EXPECT_EQ(config.cacheUri.to_string(), "http://foo.bar.baz"); } TEST(HttpBinaryCacheStore, constructConfigNoTrailingSlash) { HttpBinaryCacheStoreConfig config{"https", "foo.bar.baz/a/b/", {}}; - EXPECT_EQ(config.cacheUri, "https://foo.bar.baz/a/b"); + EXPECT_EQ(config.cacheUri.to_string(), "https://foo.bar.baz/a/b"); } } // namespace nix diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index cff889ab9..2310c4395 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -24,7 +24,7 @@ TEST_F(nix_api_store_test, nix_store_get_uri) std::string str; auto ret = nix_store_get_uri(ctx, store, OBSERVE_STRING(str)); ASSERT_EQ(NIX_OK, ret); - auto expectedStoreURI = "local?" + auto expectedStoreURI = "local://?" + nix::encodeQuery({ {"log", nixLogDir}, {"state", nixStateDir}, @@ -104,7 +104,7 @@ TEST_F(nix_api_util_context, nix_store_open_dummy) nix_libstore_init(ctx); Store * store = nix_store_open(ctx, "dummy://", nullptr); ASSERT_EQ(NIX_OK, ctx->last_err_code); - ASSERT_STREQ("dummy", store->ptr->config.getUri().c_str()); + ASSERT_STREQ("dummy://", store->ptr->config.getUri().c_str()); std::string str; nix_store_get_version(ctx, store, OBSERVE_STRING(str)); diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index bc8f5b6f5..d0e298968 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -33,9 +33,14 @@ struct DummyStoreConfig : public std::enable_shared_from_this, ref openStore() const override; - std::string getUri() const override + StoreReference getReference() const override { - return *uriSchemes().begin(); + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + }, + }; } }; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 31899f629..fc19bc1f8 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -22,13 +22,25 @@ HttpBinaryCacheStoreConfig::HttpBinaryCacheStoreConfig( std::string_view scheme, std::string_view _cacheUri, const Params & params) : StoreConfig(params) , BinaryCacheStoreConfig(params) - , cacheUri( + , cacheUri(parseURL( std::string{scheme} + "://" + (!_cacheUri.empty() ? _cacheUri - : throw UsageError("`%s` Store requires a non-empty authority in Store URL", scheme))) + : throw UsageError("`%s` Store requires a non-empty authority in Store URL", scheme)))) { - while (!cacheUri.empty() && cacheUri.back() == '/') - cacheUri.pop_back(); + while (!cacheUri.path.empty() && cacheUri.path.back() == '/') + cacheUri.path.pop_back(); +} + +StoreReference HttpBinaryCacheStoreConfig::getReference() const +{ + return { + .variant = + StoreReference::Specified{ + .scheme = cacheUri.scheme, + .authority = (cacheUri.authority ? cacheUri.authority->to_string() : "") + cacheUri.path, + }, + .params = cacheUri.query, + }; } std::string HttpBinaryCacheStoreConfig::doc() @@ -65,16 +77,17 @@ public: void init() override { // FIXME: do this lazily? - if (auto cacheInfo = diskCache->upToDateCacheExists(config->cacheUri)) { + if (auto cacheInfo = diskCache->upToDateCacheExists(config->cacheUri.to_string())) { config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); config->priority.setDefault(cacheInfo->priority); } else { try { BinaryCacheStore::init(); } catch (UploadToHTTP &) { - throw Error("'%s' does not appear to be a binary cache", config->cacheUri); + throw Error("'%s' does not appear to be a binary cache", config->cacheUri.to_string()); } - diskCache->createCache(config->cacheUri, config->storeDir, config->wantMassQuery, config->priority); + diskCache->createCache( + config->cacheUri.to_string(), config->storeDir, config->wantMassQuery, config->priority); } } @@ -134,16 +147,29 @@ protected: try { getFileTransfer()->upload(req); } catch (FileTransferError & e) { - throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", config->cacheUri, e.msg()); + throw UploadToHTTP( + "while uploading to HTTP binary cache at '%s': %s", config->cacheUri.to_string(), e.msg()); } } FileTransferRequest makeRequest(const std::string & path) { + /* FIXME path is not a path, but a full relative or absolute + URL, e.g. we've seen in the wild NARINFO files have a URL + field which is + `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` + (note the query param) and that gets passed here. + + What should actually happen is that we have two parsed URLs + (if we support relative URLs), and then we combined them with + a URL `operator/` which would be like + `std::filesystem::path`'s equivalent operator, which properly + combines the the URLs, whether the right is relative or + absolute. */ return FileTransferRequest( hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://") ? path - : config->cacheUri + "/" + path); + : config->cacheUri.to_string() + "/" + path); } void getFile(const std::string & path, Sink & sink) override diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index ef13aa7b6..e0f6ce42f 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -1,3 +1,4 @@ +#include "nix/util/url.hh" #include "nix/store/binary-cache-store.hh" namespace nix { @@ -11,7 +12,7 @@ struct HttpBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; - std::string getUri() const override - { - return cacheUri; - } + StoreReference getReference() const override; }; } // namespace nix diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index e53d18559..91e021433 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -54,7 +54,7 @@ struct LegacySSHStoreConfig : std::enable_shared_from_this ref openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; struct LegacySSHStore : public virtual Store diff --git a/src/libstore/include/nix/store/local-binary-cache-store.hh b/src/libstore/include/nix/store/local-binary-cache-store.hh index 5ca5ca43e..2846a9225 100644 --- a/src/libstore/include/nix/store/local-binary-cache-store.hh +++ b/src/libstore/include/nix/store/local-binary-cache-store.hh @@ -27,7 +27,7 @@ struct LocalBinaryCacheStoreConfig : std::enable_shared_from_this openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; } // namespace nix diff --git a/src/libstore/include/nix/store/local-overlay-store.hh b/src/libstore/include/nix/store/local-overlay-store.hh index 1180f0466..b89d0a1a0 100644 --- a/src/libstore/include/nix/store/local-overlay-store.hh +++ b/src/libstore/include/nix/store/local-overlay-store.hh @@ -88,10 +88,7 @@ struct LocalOverlayStoreConfig : virtual LocalStoreConfig ref openStore() const override; - std::string getUri() const override - { - return "local-overlay://"; - } + StoreReference getReference() const override; protected: /** diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index af243d480..3d7e8301a 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -112,7 +112,7 @@ struct LocalStoreConfig : std::enable_shared_from_this, ref openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; class LocalStore : public virtual IndirectRootStore, public virtual GcStore diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index ec3aae149..2fe66b0ad 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -107,7 +107,7 @@ public: ref openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; struct S3BinaryCacheStore : virtual BinaryCacheStore diff --git a/src/libstore/include/nix/store/ssh-store.hh b/src/libstore/include/nix/store/ssh-store.hh index ff6c3ed69..9584a1a86 100644 --- a/src/libstore/include/nix/store/ssh-store.hh +++ b/src/libstore/include/nix/store/ssh-store.hh @@ -34,7 +34,7 @@ struct SSHStoreConfig : std::enable_shared_from_this, ref openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; struct MountedSSHStoreConfig : virtual SSHStoreConfig, virtual LocalFSStoreConfig diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 8f09fee48..3e32c49a3 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -199,11 +199,16 @@ struct StoreConfig : public StoreDirConfig virtual ref openStore() const = 0; /** - * Render the config back to a "store URL". It should round-trip + * Render the config back to a `StoreReference`. It should round-trip * with `resolveStoreConfig` (for stores configs that are * registered). */ - virtual std::string getUri() const; + virtual StoreReference getReference() const; + + std::string getUri() const + { + return getReference().render(); + } }; /** diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index c77a29a8b..37c239796 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -45,7 +45,7 @@ struct UDSRemoteStoreConfig : std::enable_shared_from_this ref openStore() const override; - std::string getUri() const override; + StoreReference getReference() const override; }; struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 9592994a1..0435cfa62 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -88,9 +88,16 @@ ref LegacySSHStore::openConnection() return conn; }; -std::string LegacySSHStoreConfig::getUri() const +StoreReference LegacySSHStoreConfig::getReference() const { - return ParsedURL{.scheme = *uriSchemes().begin(), .authority = authority, .query = getQueryParams()}.to_string(); + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + .authority = authority.to_string(), + }, + .params = getQueryParams(), + }; } std::map LegacySSHStore::queryPathInfosUncached(const StorePathSet & paths) diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index 645a01b09..b5e43de68 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -23,9 +23,15 @@ std::string LocalBinaryCacheStoreConfig::doc() ; } -std::string LocalBinaryCacheStoreConfig::getUri() const +StoreReference LocalBinaryCacheStoreConfig::getReference() const { - return "file://" + binaryCacheDir; + return { + .variant = + StoreReference::Specified{ + .scheme = "file", + .authority = binaryCacheDir, + }, + }; } struct LocalBinaryCacheStore : virtual BinaryCacheStore diff --git a/src/libstore/local-overlay-store.cc b/src/libstore/local-overlay-store.cc index 1e8d1429c..2b000b3db 100644 --- a/src/libstore/local-overlay-store.cc +++ b/src/libstore/local-overlay-store.cc @@ -23,6 +23,16 @@ ref LocalOverlayStoreConfig::openStore() const ref{std::dynamic_pointer_cast(shared_from_this())}); } +StoreReference LocalOverlayStoreConfig::getReference() const +{ + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + }, + }; +} + Path LocalOverlayStoreConfig::toUpperPath(const StorePath & path) const { return upperLayer + "/" + path.to_string(); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index dfffdea6a..f4d1b66ba 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -439,15 +439,15 @@ LocalStore::~LocalStore() } } -std::string LocalStoreConfig::getUri() const +StoreReference LocalStoreConfig::getReference() const { - std::ostringstream oss; - oss << *uriSchemes().begin(); - auto queryParams = getQueryParams(); - if (!queryParams.empty()) - oss << "?"; - oss << encodeQuery(queryParams); - return std::move(oss).str(); + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + }, + .params = getQueryParams(), + }; } int LocalStore::getSchema() diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5f91a8129..84eb63f7f 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -254,9 +254,15 @@ std::string S3BinaryCacheStoreConfig::doc() ; } -std::string S3BinaryCacheStoreConfig::getUri() const +StoreReference S3BinaryCacheStoreConfig::getReference() const { - return "s3://" + bucketName; + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + .authority = bucketName, + }, + }; } struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index d3420186f..dafe14fea 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -25,9 +25,16 @@ std::string SSHStoreConfig::doc() ; } -std::string SSHStoreConfig::getUri() const +StoreReference SSHStoreConfig::getReference() const { - return ParsedURL{.scheme = *uriSchemes().begin(), .authority = authority, .query = getQueryParams()}.to_string(); + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + .authority = authority.to_string(), + }, + .params = getQueryParams(), + }; } struct SSHStore : virtual RemoteStore diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index a720084a0..a30a07952 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -300,9 +300,9 @@ Store::Store(const Store::Config & config) assertLibStoreInitialized(); } -std::string StoreConfig::getUri() const +StoreReference StoreConfig::getReference() const { - return ""; + return {.variant = StoreReference::Auto{}}; } bool Store::PathInfoCacheValue::isKnownNow() diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index e1881c602..1d3ecb415 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -54,14 +54,19 @@ UDSRemoteStore::UDSRemoteStore(ref config) { } -std::string UDSRemoteStoreConfig::getUri() const +StoreReference UDSRemoteStoreConfig::getReference() const { - return path == settings.nixDaemonSocketFile ? // FIXME: Not clear why we return daemon here and not default - // to settings.nixDaemonSocketFile - // - // unix:// with no path also works. Change what we return? - "daemon" - : std::string(*uriSchemes().begin()) + "://" + path; + return { + .variant = + StoreReference::Specified{ + .scheme = *uriSchemes().begin(), + // We return the empty string when the path looks like the + // default path, but we could also just return the path + // verbatim always, to be robust to overall config changes + // at the cost of some verbosity. + .authority = path == settings.nixDaemonSocketFile ? "" : path, + }, + }; } void UDSRemoteStore::Connection::closeWrite() diff --git a/tests/functional/store-info.sh b/tests/functional/store-info.sh index b1e0772b5..7c9257215 100755 --- a/tests/functional/store-info.sh +++ b/tests/functional/store-info.sh @@ -2,12 +2,61 @@ source common.sh +# Different versions of the Nix daemon normalize or don't normalize +# store URLs, plus NIX_REMOTE (per the test suite) might not be using on +# store URL in normal form, so the easiest thing to do is normalize URLs +# after the fact before comparing them for equality. +normalize_nix_store_url () { + local url="$1" + case "$url" in + 'auto' ) + # Need to actually ask Nix in this case + echo "$defaultStore" + ;; + 'local://'* ) + # To not be captured by next pattern + echo "$url" + ;; + local | 'local?'* ) + echo "local://${url#local}" + ;; + daemon | 'daemon?'* ) + echo "unix://${url#daemon}" + ;; + * ) + echo "$url" + ;; + esac +} + STORE_INFO=$(nix store info 2>&1) LEGACY_STORE_INFO=$(nix store ping 2>&1) # alias to nix store info STORE_INFO_JSON=$(nix store info --json) -echo "$STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" -echo "$LEGACY_STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" +defaultStore="$(normalize_nix_store_url "$(echo "$STORE_INFO_JSON" | jq -r ".url")")" + +# Test cases for `normalize_nix_store_url` itself + +# Normalize local store +[[ "$(normalize_nix_store_url "local://")" = "local://" ]] +[[ "$(normalize_nix_store_url "local")" = "local://" ]] +[[ "$(normalize_nix_store_url "local?foo=bar")" = "local://?foo=bar" ]] + +# Normalize unix domain socket remote store +[[ "$(normalize_nix_store_url "unix://")" = "unix://" ]] +[[ "$(normalize_nix_store_url "daemon")" = "unix://" ]] +[[ "$(normalize_nix_store_url "daemon?x=y")" = "unix://?x=y" ]] + +# otherwise unchanged +[[ "$(normalize_nix_store_url "https://site")" = "https://site" ]] + +nixRemoteOrDefault=$(normalize_nix_store_url "${NIX_REMOTE:-"auto"}") + +check_human_readable () { + [[ "$(normalize_nix_store_url "$(echo "$1" | grep 'Store URL:' | sed 's^Store URL: ^^')")" = "${nixRemoteOrDefault}" ]] +} +check_human_readable "$STORE_INFO" +check_human_readable "$LEGACY_STORE_INFO" if [[ -v NIX_DAEMON_PACKAGE ]] && isDaemonNewer "2.7.0pre20220126"; then DAEMON_VERSION=$("$NIX_DAEMON_PACKAGE"/bin/nix daemon --version | cut -d' ' -f3) @@ -21,4 +70,4 @@ expect 127 NIX_REMOTE=unix:"$PWD"/store nix store info || \ TODO_NixOS -[[ "$(echo "$STORE_INFO_JSON" | jq -r ".url")" == "${NIX_REMOTE:-local}" ]] +[[ "$(normalize_nix_store_url "$(echo "$STORE_INFO_JSON" | jq -r ".url")")" == "${nixRemoteOrDefault}" ]] From 4b6edfcfc7a05efbd757837794be749525cfb4ba Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 17:36:37 -0400 Subject: [PATCH 167/382] `DerivationBuildingGoal`: Check outputs beforehand See the comment in the code for details. Some of the code is duplicated for now, but we'll be cleaning that up soon. --- .../build/derivation-building-goal.cc | 27 +++++++++++++++++++ src/libstore/build/derivation-goal.cc | 3 --- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index bc8b35462..559d77ba0 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -151,6 +151,33 @@ std::string showKnownOutputs(Store & store, const Derivation & drv) produced using a substitute. So we have to build instead. */ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() { + /* Recheck at goal start. In particular, whereas before we were + given this information by the downstream goal, that cannot happen + anymore if the downstream goal only cares about one output, but + we care about all outputs. */ + auto outputHashes = staticOutputHashes(worker.evalStore, *drv); + for (auto & [outputName, outputHash] : outputHashes) { + InitialOutput v{ + .wanted = true, // Will be refined later + .outputHash = outputHash}; + + /* TODO we might want to also allow randomizing the paths + for regular CA derivations, e.g. for sake of checking + determinism. */ + if (drv->type().isImpure()) { + v.known = InitialOutputStatus{ + .path = StorePath::random(outputPathName(drv->name, outputName)), + .status = PathStatus::Absent, + }; + } + + initialOutputs.insert({ + outputName, + std::move(v), + }); + } + checkPathValidity(); + Goals waitees; std::map, GoalPtr, value_comparison> inputGoals; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 7eb5bd09d..6e27e5cfa 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -82,9 +82,6 @@ Goal::Co DerivationGoal::haveDerivation() /* We will finish with it ourselves, as if we were the derivational goal. */ g->preserveException = true; - // TODO move into constructor - g->initialOutputs = initialOutputs; - { Goals waitees; waitees.insert(g); From 14173d761cd18e345a533860596e84d6f062f5aa Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Jul 2025 17:47:28 -0400 Subject: [PATCH 168/382] Simplify `DerivationGoal` by just storing a singular `initialOutput` We know we want exactly want output in `DerivationGoal` now (since recent refactors), so we can start simplifying things to take advantage of this. --- src/libstore/build/derivation-goal.cc | 93 ++++++++----------- .../nix/store/build/derivation-goal.hh | 4 +- 2 files changed, 40 insertions(+), 57 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 6e27e5cfa..f9f26ba52 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -24,15 +24,16 @@ namespace nix { -DerivationGoal::DerivationGoal( - const StorePath & drvPath, - const Derivation & drv, - const OutputName & wantedOutput, - Worker & worker, - BuildMode buildMode) +DerivationGoal::DerivationGoal(const StorePath & drvPath, const Derivation & drv, + const OutputName & wantedOutput, Worker & worker, BuildMode buildMode) : Goal(worker, haveDerivation()) , drvPath(drvPath) , wantedOutput(wantedOutput) + , initialOutput{ + .wanted = true, + .outputHash = Hash::dummy, // will be updated + .known = {}, + } , buildMode(buildMode) { this->drv = std::make_unique(drv); @@ -125,9 +126,10 @@ Goal::Co DerivationGoal::haveDerivation() auto outputHashes = staticOutputHashes(worker.evalStore, *drv); for (auto & [outputName, outputHash] : outputHashes) { - InitialOutput v{ - .wanted = true, // Will be refined later - .outputHash = outputHash}; + if (outputName != wantedOutput) + continue; + + InitialOutput v{.wanted = true, .outputHash = outputHash}; /* TODO we might want to also allow randomizing the paths for regular CA derivations, e.g. for sake of checking @@ -139,10 +141,8 @@ Goal::Co DerivationGoal::haveDerivation() }; } - initialOutputs.insert({ - outputName, - std::move(v), - }); + initialOutput = std::move(v); + break; } if (impure) { @@ -167,21 +167,18 @@ Goal::Co DerivationGoal::haveDerivation() /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ - if (settings.useSubstitutes && drvOptions.substitutesAllowed()) - for (auto & [outputName, status] : initialOutputs) { - if (!status.wanted) - continue; - if (!status.known) - waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( - DrvOutput{status.outputHash, outputName}, buildMode == bmRepair ? Repair : NoRepair))); - else { - auto * cap = getDerivationCA(*drv); - waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( - status.known->path, - buildMode == bmRepair ? Repair : NoRepair, - cap ? std::optional{*cap} : std::nullopt))); - } + if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { + if (!initialOutput.known) + waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( + DrvOutput{initialOutput.outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); + else { + auto * cap = getDerivationCA(*drv); + waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( + initialOutput.known->path, + buildMode == bmRepair ? Repair : NoRepair, + cap ? std::optional{*cap} : std::nullopt))); } + } co_await await(std::move(waitees)); @@ -317,7 +314,6 @@ std::pair DerivationGoal::checkPathValidity() return {false, {}}; bool checkHash = buildMode == bmRepair; - StringSet wantedOutputsLeft{wantedOutput}; SingleDrvOutputs validOutputs; auto partialDerivationOutputMap = [&] { @@ -333,17 +329,10 @@ std::pair DerivationGoal::checkPathValidity() return res; }(); - for (auto & i : partialDerivationOutputMap) { - auto initialOutput = get(initialOutputs, i.first); - if (!initialOutput) - // this is an invalid output, gets caught with (!wantedOutputsLeft.empty()) - continue; - auto & info = *initialOutput; - info.wanted = wantedOutput == i.first; - if (info.wanted) - wantedOutputsLeft.erase(i.first); - if (i.second) { - auto outputPath = *i.second; + if (auto * mpath = get(partialDerivationOutputMap, wantedOutput)) { + auto & info = initialOutput; + if (*mpath) { + auto & outputPath = **mpath; info.known = { .path = outputPath, .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent @@ -351,7 +340,7 @@ std::pair DerivationGoal::checkPathValidity() : PathStatus::Corrupt, }; } - auto drvOutput = DrvOutput{info.outputHash, i.first}; + auto drvOutput = DrvOutput{info.outputHash, wantedOutput}; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { if (auto real = worker.store.queryRealisation(drvOutput)) { info.known = { @@ -371,25 +360,19 @@ std::pair DerivationGoal::checkPathValidity() } } if (info.known && info.known->isValid()) - validOutputs.emplace(i.first, Realisation{drvOutput, info.known->path}); + validOutputs.emplace(wantedOutput, Realisation{drvOutput, info.known->path}); + } else { + // If we requested all the outputs, we are always fine. + // If we requested specific elements, the loop above removes all the valid + // ones, so any that are left must be invalid. + throw Error( + "derivation '%s' does not have wanted outputs '%s'", worker.store.printStorePath(drvPath), wantedOutput); } - // If we requested all the outputs, we are always fine. - // If we requested specific elements, the loop above removes all the valid - // ones, so any that are left must be invalid. - if (!wantedOutputsLeft.empty()) - throw Error( - "derivation '%s' does not have wanted outputs %s", - worker.store.printStorePath(drvPath), - concatStringsSep(", ", quoteStrings(wantedOutputsLeft))); - bool allValid = true; - for (auto & [_, status] : initialOutputs) { - if (!status.wanted) - continue; - if (!status.known || !status.known->isValid()) { + { + if (!initialOutput.known || !initialOutput.known->isValid()) { allValid = false; - break; } } diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index 4b004e73b..a11aad22c 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -74,7 +74,7 @@ private: * The remainder is state held during the build. */ - std::map initialOutputs; + InitialOutput initialOutput; BuildMode buildMode; @@ -86,7 +86,7 @@ private: Co haveDerivation(); /** - * Update 'initialOutputs' to determine the current status of the + * Update 'initialOutput' to determine the current status of the * outputs of the derivation. Also returns a Boolean denoting * whether all outputs are valid and non-corrupt, and a * 'SingleDrvOutputs' structure containing the valid outputs. From 1a6f92837a7483b44f4223a9f20a899805df055c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 22:18:03 -0400 Subject: [PATCH 169/382] Don't use `InitialOutput` in `DerivationGoal` We don't need the `wanted` field. Just inline the other two fields. --- src/libstore/build/derivation-goal.cc | 43 +++++++++---------- .../nix/store/build/derivation-goal.hh | 3 +- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index f9f26ba52..c65dbda21 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -24,16 +24,17 @@ namespace nix { -DerivationGoal::DerivationGoal(const StorePath & drvPath, const Derivation & drv, - const OutputName & wantedOutput, Worker & worker, BuildMode buildMode) +DerivationGoal::DerivationGoal( + const StorePath & drvPath, + const Derivation & drv, + const OutputName & wantedOutput, + Worker & worker, + BuildMode buildMode) : Goal(worker, haveDerivation()) , drvPath(drvPath) , wantedOutput(wantedOutput) - , initialOutput{ - .wanted = true, - .outputHash = Hash::dummy, // will be updated - .known = {}, - } + , outputHash{Hash::dummy} // will be updated + , outputKnown{} , buildMode(buildMode) { this->drv = std::make_unique(drv); @@ -129,19 +130,18 @@ Goal::Co DerivationGoal::haveDerivation() if (outputName != wantedOutput) continue; - InitialOutput v{.wanted = true, .outputHash = outputHash}; + this->outputHash = outputHash; /* TODO we might want to also allow randomizing the paths for regular CA derivations, e.g. for sake of checking determinism. */ if (impure) { - v.known = InitialOutputStatus{ + outputKnown = InitialOutputStatus{ .path = StorePath::random(outputPathName(drv->name, outputName)), .status = PathStatus::Absent, }; } - initialOutput = std::move(v); break; } @@ -168,13 +168,13 @@ Goal::Co DerivationGoal::haveDerivation() through substitutes. If that doesn't work, we'll build them. */ if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { - if (!initialOutput.known) + if (!outputKnown) waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( - DrvOutput{initialOutput.outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); + DrvOutput{outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); else { auto * cap = getDerivationCA(*drv); waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( - initialOutput.known->path, + outputKnown->path, buildMode == bmRepair ? Repair : NoRepair, cap ? std::optional{*cap} : std::nullopt))); } @@ -330,24 +330,23 @@ std::pair DerivationGoal::checkPathValidity() }(); if (auto * mpath = get(partialDerivationOutputMap, wantedOutput)) { - auto & info = initialOutput; if (*mpath) { auto & outputPath = **mpath; - info.known = { + outputKnown = { .path = outputPath, .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid : PathStatus::Corrupt, }; } - auto drvOutput = DrvOutput{info.outputHash, wantedOutput}; + auto drvOutput = DrvOutput{outputHash, wantedOutput}; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { if (auto real = worker.store.queryRealisation(drvOutput)) { - info.known = { + outputKnown = { .path = real->outPath, .status = PathStatus::Valid, }; - } else if (info.known && info.known->isValid()) { + } else if (outputKnown && outputKnown->isValid()) { // We know the output because it's a static output of the // derivation, and the output path is valid, but we don't have // its realisation stored (probably because it has been built @@ -355,12 +354,12 @@ std::pair DerivationGoal::checkPathValidity() worker.store.registerDrvOutput( Realisation{ drvOutput, - info.known->path, + outputKnown->path, }); } } - if (info.known && info.known->isValid()) - validOutputs.emplace(wantedOutput, Realisation{drvOutput, info.known->path}); + if (outputKnown && outputKnown->isValid()) + validOutputs.emplace(wantedOutput, Realisation{drvOutput, outputKnown->path}); } else { // If we requested all the outputs, we are always fine. // If we requested specific elements, the loop above removes all the valid @@ -371,7 +370,7 @@ std::pair DerivationGoal::checkPathValidity() bool allValid = true; { - if (!initialOutput.known || !initialOutput.known->isValid()) { + if (!outputKnown || !outputKnown->isValid()) { allValid = false; } } diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index a11aad22c..ca41d71aa 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -74,7 +74,8 @@ private: * The remainder is state held during the build. */ - InitialOutput initialOutput; + Hash outputHash; + std::optional outputKnown; BuildMode buildMode; From 26003911478a62a6c5f258ad8c1331c3d0bd876d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 22:25:41 -0400 Subject: [PATCH 170/382] Simplify `DerivationGoal` loop -> if More taking advantage of single wanted output. Also `auto *` not `auto` for easy reading. --- src/libstore/build/derivation-goal.cc | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index c65dbda21..c791aa5fc 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -126,23 +126,18 @@ Goal::Co DerivationGoal::haveDerivation() experimentalFeatureSettings.require(Xp::ImpureDerivations); auto outputHashes = staticOutputHashes(worker.evalStore, *drv); - for (auto & [outputName, outputHash] : outputHashes) { - if (outputName != wantedOutput) - continue; - - this->outputHash = outputHash; + if (auto * mOutputHash = get(outputHashes, wantedOutput)) { + outputHash = *mOutputHash; /* TODO we might want to also allow randomizing the paths for regular CA derivations, e.g. for sake of checking determinism. */ if (impure) { outputKnown = InitialOutputStatus{ - .path = StorePath::random(outputPathName(drv->name, outputName)), + .path = StorePath::random(outputPathName(drv->name, wantedOutput)), .status = PathStatus::Absent, }; } - - break; } if (impure) { @@ -247,7 +242,7 @@ Goal::Co DerivationGoal::repairClosure() }(); StorePathSet outputClosure; - if (auto mPath = get(outputs, wantedOutput)) { + if (auto * mPath = get(outputs, wantedOutput)) { worker.store.computeFSClosure(*mPath, outputClosure); } From b6ca60cb821c67739f03152e683b3e68dd632e7b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 23:01:58 -0400 Subject: [PATCH 171/382] `DerivationBuilder::checkPathValidity`: Simplify `allValid` calc Now that the loops is gone, we can just inline this mutation to a single simple expression. --- src/libstore/build/derivation-goal.cc | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index c791aa5fc..3273ab82f 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -363,14 +363,7 @@ std::pair DerivationGoal::checkPathValidity() "derivation '%s' does not have wanted outputs '%s'", worker.store.printStorePath(drvPath), wantedOutput); } - bool allValid = true; - { - if (!outputKnown || !outputKnown->isValid()) { - allValid = false; - } - } - - return {allValid, validOutputs}; + return {outputKnown && outputKnown->isValid(), validOutputs}; } SingleDrvOutputs DerivationGoal::assertPathValidity() From 2324fe351556ba00f2e898acc9dbffdecff9ec62 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 23:19:46 -0400 Subject: [PATCH 172/382] `DerivationBuilder::checkPathValidity`: Big simplify `Store::queryPartialDerivationOutputMap` is nothing but checking statically-known output paths, and then `Store::queryRealisation`, and we were doing both of those things already. Inline that and simplify, again taking advantage of the fact that we only care about one output. --- src/libstore/build/derivation-goal.cc | 76 ++++++++++++--------------- 1 file changed, 34 insertions(+), 42 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 3273ab82f..30eb7ca74 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -308,59 +308,51 @@ std::pair DerivationGoal::checkPathValidity() if (drv->type().isImpure()) return {false, {}}; - bool checkHash = buildMode == bmRepair; SingleDrvOutputs validOutputs; - auto partialDerivationOutputMap = [&] { - for (auto * drvStore : {&worker.evalStore, &worker.store}) - if (drvStore->isValidPath(drvPath)) - return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); + auto drvOutput = DrvOutput{outputHash, wantedOutput}; - /* In-memory derivation will naturally fall back on this case, where - we do best-effort with static information. */ - std::map> res; - for (auto & [name, output] : drv->outputs) - res.insert_or_assign(name, output.path(worker.store, drv->name, name)); - return res; - }(); + std::optional mRealisation; - if (auto * mpath = get(partialDerivationOutputMap, wantedOutput)) { - if (*mpath) { - auto & outputPath = **mpath; - outputKnown = { - .path = outputPath, - .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent - : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid - : PathStatus::Corrupt, - }; + if (auto * mOutput = get(drv->outputs, wantedOutput)) { + if (auto mPath = mOutput->path(worker.store, drv->name, wantedOutput)) { + mRealisation = Realisation{drvOutput, std::move(*mPath)}; } - auto drvOutput = DrvOutput{outputHash, wantedOutput}; - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - if (auto real = worker.store.queryRealisation(drvOutput)) { - outputKnown = { - .path = real->outPath, - .status = PathStatus::Valid, - }; - } else if (outputKnown && outputKnown->isValid()) { + } else { + throw Error( + "derivation '%s' does not have wanted outputs '%s'", worker.store.printStorePath(drvPath), wantedOutput); + } + + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { + for (auto * drvStore : {&worker.evalStore, &worker.store}) { + if (auto real = drvStore->queryRealisation(drvOutput)) { + mRealisation = *real; + break; + } + } + } + + if (mRealisation) { + auto & outputPath = mRealisation->outPath; + bool checkHash = buildMode == bmRepair; + outputKnown = { + .path = outputPath, + .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent + : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid + : PathStatus::Corrupt, + }; + + if (outputKnown->isValid()) { + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { // We know the output because it's a static output of the // derivation, and the output path is valid, but we don't have // its realisation stored (probably because it has been built // without the `ca-derivations` experimental flag). - worker.store.registerDrvOutput( - Realisation{ - drvOutput, - outputKnown->path, - }); + worker.store.registerDrvOutput(*mRealisation); } + + validOutputs.emplace(wantedOutput, *mRealisation); } - if (outputKnown && outputKnown->isValid()) - validOutputs.emplace(wantedOutput, Realisation{drvOutput, outputKnown->path}); - } else { - // If we requested all the outputs, we are always fine. - // If we requested specific elements, the loop above removes all the valid - // ones, so any that are left must be invalid. - throw Error( - "derivation '%s' does not have wanted outputs '%s'", worker.store.printStorePath(drvPath), wantedOutput); } return {outputKnown && outputKnown->isValid(), validOutputs}; From 766a52ce87458644591199aeb9c2888447095d20 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Aug 2025 23:46:16 -0400 Subject: [PATCH 173/382] `DerivationOutput`: Remove `outputKnown` state Now that `DerivationGoal::checkPathValidity` is legible, we can see that it only sets `outputKnown`, and doesn't read it. Likewise, with co-routines, we don't have tiny scopes that make local variables difficult. Between these two things, we can simply have `checkPathValidity` return what it finds, rather than mutate some state, and update everyting to use local variables. The same transformation could probably be done to the other derivation goal types (which currently, unfortunately, contain their own `checkPathValidity`s, though they are diverging, and we hope and believe that they continue to diverge). --- src/libstore/build/derivation-goal.cc | 84 +++++++------------ .../nix/store/build/derivation-goal.hh | 16 ++-- 2 files changed, 40 insertions(+), 60 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 30eb7ca74..9923b0dc1 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -34,7 +34,6 @@ DerivationGoal::DerivationGoal( , drvPath(drvPath) , wantedOutput(wantedOutput) , outputHash{Hash::dummy} // will be updated - , outputKnown{} , buildMode(buildMode) { this->drv = std::make_unique(drv); @@ -98,7 +97,7 @@ Goal::Co DerivationGoal::haveDerivation() /* In checking mode, the builder will not register any outputs. So we want to make sure the ones that we wanted to check are properly there. */ - buildResult.builtOutputs = assertPathValidity(); + buildResult.builtOutputs = {{wantedOutput, assertPathValidity()}}; } for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { @@ -125,19 +124,8 @@ Goal::Co DerivationGoal::haveDerivation() if (impure) experimentalFeatureSettings.require(Xp::ImpureDerivations); - auto outputHashes = staticOutputHashes(worker.evalStore, *drv); - if (auto * mOutputHash = get(outputHashes, wantedOutput)) { + if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, *drv), wantedOutput)) { outputHash = *mOutputHash; - - /* TODO we might want to also allow randomizing the paths - for regular CA derivations, e.g. for sake of checking - determinism. */ - if (impure) { - outputKnown = InitialOutputStatus{ - .path = StorePath::random(outputPathName(drv->name, wantedOutput)), - .status = PathStatus::Absent, - }; - } } if (impure) { @@ -147,14 +135,12 @@ Goal::Co DerivationGoal::haveDerivation() } } - { - /* Check what outputs paths are not already valid. */ - auto [allValid, validOutputs] = checkPathValidity(); + /* Check what outputs paths are not already valid. */ + auto checkResult = checkPathValidity(); - /* If they are all valid, then we're done. */ - if (allValid && buildMode == bmNormal) { - co_return done(BuildResult::AlreadyValid, std::move(validOutputs)); - } + /* If they are all valid, then we're done. */ + if (checkResult && checkResult->second == PathStatus::Valid && buildMode == bmNormal) { + co_return done(BuildResult::AlreadyValid, {{wantedOutput, checkResult->first}}); } Goals waitees; @@ -163,13 +149,13 @@ Goal::Co DerivationGoal::haveDerivation() through substitutes. If that doesn't work, we'll build them. */ if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { - if (!outputKnown) + if (!checkResult) waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( DrvOutput{outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); else { auto * cap = getDerivationCA(*drv); waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( - outputKnown->path, + checkResult->first.outPath, buildMode == bmRepair ? Repair : NoRepair, cap ? std::optional{*cap} : std::nullopt))); } @@ -192,10 +178,12 @@ Goal::Co DerivationGoal::haveDerivation() nrFailed = nrNoSubstituters = 0; - auto [allValid, validOutputs] = checkPathValidity(); + checkResult = checkPathValidity(); + + bool allValid = checkResult && checkResult->second == PathStatus::Valid; if (buildMode == bmNormal && allValid) { - co_return done(BuildResult::Substituted, std::move(validOutputs)); + co_return done(BuildResult::Substituted, {{wantedOutput, checkResult->first}}); } if (buildMode == bmRepair && allValid) { co_return repairClosure(); @@ -300,15 +288,13 @@ Goal::Co DerivationGoal::repairClosure() "some paths in the output closure of derivation '%s' could not be repaired", worker.store.printStorePath(drvPath)); } - co_return done(BuildResult::AlreadyValid, assertPathValidity()); + co_return done(BuildResult::AlreadyValid, {{wantedOutput, assertPathValidity()}}); } -std::pair DerivationGoal::checkPathValidity() +std::optional> DerivationGoal::checkPathValidity() { if (drv->type().isImpure()) - return {false, {}}; - - SingleDrvOutputs validOutputs; + return std::nullopt; auto drvOutput = DrvOutput{outputHash, wantedOutput}; @@ -335,35 +321,29 @@ std::pair DerivationGoal::checkPathValidity() if (mRealisation) { auto & outputPath = mRealisation->outPath; bool checkHash = buildMode == bmRepair; - outputKnown = { - .path = outputPath, - .status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent - : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid - : PathStatus::Corrupt, - }; + PathStatus status = !worker.store.isValidPath(outputPath) ? PathStatus::Absent + : !checkHash || worker.pathContentsGood(outputPath) ? PathStatus::Valid + : PathStatus::Corrupt; - if (outputKnown->isValid()) { - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - // We know the output because it's a static output of the - // derivation, and the output path is valid, but we don't have - // its realisation stored (probably because it has been built - // without the `ca-derivations` experimental flag). - worker.store.registerDrvOutput(*mRealisation); - } - - validOutputs.emplace(wantedOutput, *mRealisation); + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && status == PathStatus::Valid) { + // We know the output because it's a static output of the + // derivation, and the output path is valid, but we don't have + // its realisation stored (probably because it has been built + // without the `ca-derivations` experimental flag). + worker.store.registerDrvOutput(*mRealisation); } - } - return {outputKnown && outputKnown->isValid(), validOutputs}; + return {{*mRealisation, status}}; + } else + return std::nullopt; } -SingleDrvOutputs DerivationGoal::assertPathValidity() +Realisation DerivationGoal::assertPathValidity() { - auto [allValid, validOutputs] = checkPathValidity(); - if (!allValid) + auto checkResult = checkPathValidity(); + if (!(checkResult && checkResult->second == PathStatus::Valid)) throw Error("some outputs are unexpectedly invalid"); - return validOutputs; + return checkResult->first; } Goal::Done DerivationGoal::done(BuildResult::Status status, SingleDrvOutputs builtOutputs, std::optional ex) diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index ca41d71aa..d0c069446 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -75,7 +75,6 @@ private: */ Hash outputHash; - std::optional outputKnown; BuildMode buildMode; @@ -87,18 +86,19 @@ private: Co haveDerivation(); /** - * Update 'initialOutput' to determine the current status of the - * outputs of the derivation. Also returns a Boolean denoting - * whether all outputs are valid and non-corrupt, and a - * 'SingleDrvOutputs' structure containing the valid outputs. + * Return `std::nullopt` if the output is unknown, e.g. un unbuilt + * floating content-addressing derivation. Otherwise, returns a pair + * of a `Realisation`, containing among other things the store path + * of the wanted output, and a `PathStatus` with the + * current status of that output. */ - std::pair checkPathValidity(); + std::optional> checkPathValidity(); /** * Aborts if any output is not valid or corrupt, and otherwise - * returns a 'SingleDrvOutputs' structure containing all outputs. + * returns a 'Realisation' for the wanted output. */ - SingleDrvOutputs assertPathValidity(); + Realisation assertPathValidity(); Co repairClosure(); From 7707d0acad199835f441249436d7d4c97b776068 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:09:08 -0400 Subject: [PATCH 174/382] Get rid of `filterDrvOutputs` We don't need it any more, because we only used it in the single-wanted-output `DerivationGoal`. --- src/libstore/build/derivation-goal.cc | 5 ++--- src/libstore/include/nix/store/realisation.hh | 7 ------- src/libstore/realisation.cc | 12 ------------ 3 files changed, 2 insertions(+), 22 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 9923b0dc1..62b88f42f 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -359,9 +359,8 @@ Goal::Done DerivationGoal::done(BuildResult::Status status, SingleDrvOutputs bui mcExpectedBuilds.reset(); if (buildResult.success()) { - auto wantedBuiltOutputs = filterDrvOutputs(OutputsSpec::Names{wantedOutput}, std::move(builtOutputs)); - assert(!wantedBuiltOutputs.empty()); - buildResult.builtOutputs = std::move(wantedBuiltOutputs); + assert(!builtOutputs.empty()); + buildResult.builtOutputs = std::move(builtOutputs); if (status == BuildResult::Built) worker.doneBuilds++; } else { diff --git a/src/libstore/include/nix/store/realisation.hh b/src/libstore/include/nix/store/realisation.hh index f653d517b..6eb3eecf3 100644 --- a/src/libstore/include/nix/store/realisation.hh +++ b/src/libstore/include/nix/store/realisation.hh @@ -102,13 +102,6 @@ typedef std::map SingleDrvOutputs; */ typedef std::map DrvOutputs; -/** - * Filter a SingleDrvOutputs to include only specific output names - * - * Moves the `outputs` input. - */ -SingleDrvOutputs filterDrvOutputs(const OutputsSpec &, SingleDrvOutputs &&); - struct OpaquePath { StorePath path; diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 8a6d99ffe..8c3baa73b 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -135,18 +135,6 @@ size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const return good; } -SingleDrvOutputs filterDrvOutputs(const OutputsSpec & wanted, SingleDrvOutputs && outputs) -{ - SingleDrvOutputs ret = std::move(outputs); - for (auto it = ret.begin(); it != ret.end();) { - if (!wanted.contains(it->first)) - it = ret.erase(it); - else - ++it; - } - return ret; -} - StorePath RealisedPath::path() const { return std::visit([](auto && arg) { return arg.getPath(); }, raw); From 88275e572384803e0b39f9cb4321712d6f1baf63 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:16:26 -0400 Subject: [PATCH 175/382] `DerivationGoal` slight cleanup of some impure drv logic --- src/libstore/build/derivation-goal.cc | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 62b88f42f..d21590c8a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -119,16 +119,12 @@ Goal::Co DerivationGoal::haveDerivation() worker.store.addTempRoot(*i.second.second); { - bool impure = drv->type().isImpure(); - - if (impure) - experimentalFeatureSettings.require(Xp::ImpureDerivations); - if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, *drv), wantedOutput)) { outputHash = *mOutputHash; } - if (impure) { + if (drv->type().isImpure()) { + experimentalFeatureSettings.require(Xp::ImpureDerivations); /* We don't yet have any safe way to cache an impure derivation at this step. */ co_return gaveUpOnSubstitution(); From 14441f93827ef30c3376f21c4f7846d7d460d771 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:21:37 -0400 Subject: [PATCH 176/382] `DerivationGoal` inline `gaveUpOnSubstitution` lambda We can shuffle around control flow so it's only called once. You'll definitely want to review this diff ignoring whitespace. --- src/libstore/build/derivation-goal.cc | 203 +++++++++++++------------- 1 file changed, 98 insertions(+), 105 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d21590c8a..3db335b04 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -75,121 +75,114 @@ Goal::Co DerivationGoal::haveDerivation() if (!drv->type().hasKnownOutputPaths()) experimentalFeatureSettings.require(Xp::CaDerivations); - /* At least one of the output paths could not be - produced using a substitute. So we have to build instead. */ - auto gaveUpOnSubstitution = [&]() -> Goal::Co { - auto g = worker.makeDerivationBuildingGoal(drvPath, *drv, buildMode); - - /* We will finish with it ourselves, as if we were the derivational goal. */ - g->preserveException = true; - - { - Goals waitees; - waitees.insert(g); - co_await await(std::move(waitees)); - } - - trace("outer build done"); - - buildResult = g->buildResult; - - if (buildMode == bmCheck) { - /* In checking mode, the builder will not register any outputs. - So we want to make sure the ones that we wanted to check are - properly there. */ - buildResult.builtOutputs = {{wantedOutput, assertPathValidity()}}; - } - - for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { - if (it->first != wantedOutput) { - it = buildResult.builtOutputs.erase(it); - } else { - ++it; - } - } - - if (buildResult.success()) - assert(buildResult.builtOutputs.count(wantedOutput) > 0); - - co_return amDone(g->exitCode, g->ex); - }; - for (auto & i : drv->outputsAndOptPaths(worker.store)) if (i.second.second) worker.store.addTempRoot(*i.second.second); + if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, *drv), wantedOutput)) { + outputHash = *mOutputHash; + } + + /* We don't yet have any safe way to cache an impure derivation at + this step. */ + if (drv->type().isImpure()) { + experimentalFeatureSettings.require(Xp::ImpureDerivations); + } else { + /* Check what outputs paths are not already valid. */ + auto checkResult = checkPathValidity(); + + /* If they are all valid, then we're done. */ + if (checkResult && checkResult->second == PathStatus::Valid && buildMode == bmNormal) { + co_return done(BuildResult::AlreadyValid, {{wantedOutput, checkResult->first}}); + } + + Goals waitees; + + /* We are first going to try to create the invalid output paths + through substitutes. If that doesn't work, we'll build + them. */ + if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { + if (!checkResult) + waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( + DrvOutput{outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); + else { + auto * cap = getDerivationCA(*drv); + waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( + checkResult->first.outPath, + buildMode == bmRepair ? Repair : NoRepair, + cap ? std::optional{*cap} : std::nullopt))); + } + } + + co_await await(std::move(waitees)); + + trace("all outputs substituted (maybe)"); + + assert(!drv->type().isImpure()); + + if (nrFailed > 0 && nrFailed > nrNoSubstituters && !settings.tryFallback) { + co_return done( + BuildResult::TransientFailure, + {}, + Error( + "some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", + worker.store.printStorePath(drvPath))); + } + + nrFailed = nrNoSubstituters = 0; + + checkResult = checkPathValidity(); + + bool allValid = checkResult && checkResult->second == PathStatus::Valid; + + if (buildMode == bmNormal && allValid) { + co_return done(BuildResult::Substituted, {{wantedOutput, checkResult->first}}); + } + if (buildMode == bmRepair && allValid) { + co_return repairClosure(); + } + if (buildMode == bmCheck && !allValid) + throw Error( + "some outputs of '%s' are not valid, so checking is not possible", + worker.store.printStorePath(drvPath)); + } + + /* Give up on substitution for the output we want, actually build this derivation */ + + auto g = worker.makeDerivationBuildingGoal(drvPath, *drv, buildMode); + + /* We will finish with it ourselves, as if we were the derivational goal. */ + g->preserveException = true; + { - if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, *drv), wantedOutput)) { - outputHash = *mOutputHash; - } + Goals waitees; + waitees.insert(g); + co_await await(std::move(waitees)); + } - if (drv->type().isImpure()) { - experimentalFeatureSettings.require(Xp::ImpureDerivations); - /* We don't yet have any safe way to cache an impure derivation at - this step. */ - co_return gaveUpOnSubstitution(); + trace("outer build done"); + + buildResult = g->buildResult; + + if (buildMode == bmCheck) { + /* In checking mode, the builder will not register any outputs. + So we want to make sure the ones that we wanted to check are + properly there. */ + buildResult.builtOutputs = {{wantedOutput, assertPathValidity()}}; + } + + for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { + if (it->first != wantedOutput) { + it = buildResult.builtOutputs.erase(it); + } else { + ++it; } } - /* Check what outputs paths are not already valid. */ - auto checkResult = checkPathValidity(); + if (buildResult.success()) + assert(buildResult.builtOutputs.count(wantedOutput) > 0); - /* If they are all valid, then we're done. */ - if (checkResult && checkResult->second == PathStatus::Valid && buildMode == bmNormal) { - co_return done(BuildResult::AlreadyValid, {{wantedOutput, checkResult->first}}); - } - - Goals waitees; - - /* We are first going to try to create the invalid output paths - through substitutes. If that doesn't work, we'll build - them. */ - if (settings.useSubstitutes && drvOptions.substitutesAllowed()) { - if (!checkResult) - waitees.insert(upcast_goal(worker.makeDrvOutputSubstitutionGoal( - DrvOutput{outputHash, wantedOutput}, buildMode == bmRepair ? Repair : NoRepair))); - else { - auto * cap = getDerivationCA(*drv); - waitees.insert(upcast_goal(worker.makePathSubstitutionGoal( - checkResult->first.outPath, - buildMode == bmRepair ? Repair : NoRepair, - cap ? std::optional{*cap} : std::nullopt))); - } - } - - co_await await(std::move(waitees)); - - trace("all outputs substituted (maybe)"); - - assert(!drv->type().isImpure()); - - if (nrFailed > 0 && nrFailed > nrNoSubstituters && !settings.tryFallback) { - co_return done( - BuildResult::TransientFailure, - {}, - Error( - "some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", - worker.store.printStorePath(drvPath))); - } - - nrFailed = nrNoSubstituters = 0; - - checkResult = checkPathValidity(); - - bool allValid = checkResult && checkResult->second == PathStatus::Valid; - - if (buildMode == bmNormal && allValid) { - co_return done(BuildResult::Substituted, {{wantedOutput, checkResult->first}}); - } - if (buildMode == bmRepair && allValid) { - co_return repairClosure(); - } - if (buildMode == bmCheck && !allValid) - throw Error( - "some outputs of '%s' are not valid, so checking is not possible", worker.store.printStorePath(drvPath)); - - /* Nothing to wait for; tail call */ - co_return gaveUpOnSubstitution(); + co_return amDone(g->exitCode, g->ex); } /** From c9402837503712a1af3b5533dc6a76e3475321c7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:26:19 -0400 Subject: [PATCH 177/382] `DerivationBuilder` Move output result filtering logic and assert just into the branch where it is not obviously a no op / meeting the assertion. Add a comment too, while we are at it. --- src/libstore/build/derivation-goal.cc | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 3db335b04..5407e6f60 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -169,18 +169,21 @@ Goal::Co DerivationGoal::haveDerivation() So we want to make sure the ones that we wanted to check are properly there. */ buildResult.builtOutputs = {{wantedOutput, assertPathValidity()}}; - } - - for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { - if (it->first != wantedOutput) { - it = buildResult.builtOutputs.erase(it); - } else { - ++it; + } else { + /* Otherwise the builder will give us info for out output, but + also for other outputs. Filter down to just our output so as + not to leak info on unrelated things. */ + for (auto it = buildResult.builtOutputs.begin(); it != buildResult.builtOutputs.end();) { + if (it->first != wantedOutput) { + it = buildResult.builtOutputs.erase(it); + } else { + ++it; + } } - } - if (buildResult.success()) - assert(buildResult.builtOutputs.count(wantedOutput) > 0); + if (buildResult.success()) + assert(buildResult.builtOutputs.count(wantedOutput) > 0); + } co_return amDone(g->exitCode, g->ex); } From f155dffe5901172823b08814fd49dce3b3d8d2b4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:30:26 -0400 Subject: [PATCH 178/382] `DerivationGoal::done` Clean up parameter types We don't need to ask all these callers to build these single-entry maps for us. --- src/libstore/build/derivation-goal.cc | 13 +++++++------ .../include/nix/store/build/derivation-goal.hh | 8 +++++++- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 5407e6f60..13ea6019e 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -93,7 +93,7 @@ Goal::Co DerivationGoal::haveDerivation() /* If they are all valid, then we're done. */ if (checkResult && checkResult->second == PathStatus::Valid && buildMode == bmNormal) { - co_return done(BuildResult::AlreadyValid, {{wantedOutput, checkResult->first}}); + co_return done(BuildResult::AlreadyValid, checkResult->first); } Goals waitees; @@ -136,7 +136,7 @@ Goal::Co DerivationGoal::haveDerivation() bool allValid = checkResult && checkResult->second == PathStatus::Valid; if (buildMode == bmNormal && allValid) { - co_return done(BuildResult::Substituted, {{wantedOutput, checkResult->first}}); + co_return done(BuildResult::Substituted, checkResult->first); } if (buildMode == bmRepair && allValid) { co_return repairClosure(); @@ -280,7 +280,7 @@ Goal::Co DerivationGoal::repairClosure() "some paths in the output closure of derivation '%s' could not be repaired", worker.store.printStorePath(drvPath)); } - co_return done(BuildResult::AlreadyValid, {{wantedOutput, assertPathValidity()}}); + co_return done(BuildResult::AlreadyValid, assertPathValidity()); } std::optional> DerivationGoal::checkPathValidity() @@ -338,7 +338,8 @@ Realisation DerivationGoal::assertPathValidity() return checkResult->first; } -Goal::Done DerivationGoal::done(BuildResult::Status status, SingleDrvOutputs builtOutputs, std::optional ex) +Goal::Done +DerivationGoal::done(BuildResult::Status status, std::optional builtOutput, std::optional ex) { buildResult.status = status; if (ex) @@ -351,8 +352,8 @@ Goal::Done DerivationGoal::done(BuildResult::Status status, SingleDrvOutputs bui mcExpectedBuilds.reset(); if (buildResult.success()) { - assert(!builtOutputs.empty()); - buildResult.builtOutputs = std::move(builtOutputs); + assert(builtOutput); + buildResult.builtOutputs = {{wantedOutput, std::move(*builtOutput)}}; if (status == BuildResult::Built) worker.doneBuilds++; } else { diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index d0c069446..8417ea64f 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -102,7 +102,13 @@ private: Co repairClosure(); - Done done(BuildResult::Status status, SingleDrvOutputs builtOutputs = {}, std::optional ex = {}); + /** + * @param builtOutput Must be set if `status` is successful. + */ + Done done( + BuildResult::Status status, + std::optional builtOutput = std::nullopt, + std::optional ex = {}); }; } // namespace nix From 4a2de1dbabd28b432b46e4b448e2ca2f46daefeb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 00:39:00 -0400 Subject: [PATCH 179/382] `DerivationGoal` Make some fields immutable We can set both during construction, yay! --- src/libstore/build/derivation-goal.cc | 12 +++++++----- .../include/nix/store/build/derivation-goal.hh | 8 ++++---- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 13ea6019e..30a247777 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -33,7 +33,13 @@ DerivationGoal::DerivationGoal( : Goal(worker, haveDerivation()) , drvPath(drvPath) , wantedOutput(wantedOutput) - , outputHash{Hash::dummy} // will be updated + , outputHash{[&] { + if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, drv), wantedOutput)) + return *mOutputHash; + else + throw Error( + "derivation '%s' does not have output '%s'", worker.store.printStorePath(drvPath), wantedOutput); + }()} , buildMode(buildMode) { this->drv = std::make_unique(drv); @@ -79,10 +85,6 @@ Goal::Co DerivationGoal::haveDerivation() if (i.second.second) worker.store.addTempRoot(*i.second.second); - if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, *drv), wantedOutput)) { - outputHash = *mOutputHash; - } - /* We don't yet have any safe way to cache an impure derivation at this step. */ if (drv->type().isImpure()) { diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index 8417ea64f..589c3fd58 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -70,14 +70,14 @@ private: */ std::unique_ptr drv; + const Hash outputHash; + + const BuildMode buildMode; + /** * The remainder is state held during the build. */ - Hash outputHash; - - BuildMode buildMode; - std::unique_ptr> mcExpectedBuilds; /** From 48d15ed1fb8d6edf099d543bb6840fb853c47db1 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 14 Aug 2025 14:54:20 +0300 Subject: [PATCH 180/382] ci/check_secrets: Remove deprecated set-output --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70712610c..bae883f96 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,17 +146,17 @@ jobs: check_secrets: permissions: contents: none - name: Check Docker secrets present for installer tests + name: Check presence of secrets runs-on: ubuntu-24.04 outputs: docker: ${{ steps.secret.outputs.docker }} steps: - - name: Check for secrets + - name: Check for DockerHub secrets id: secret env: _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }} run: | - echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}" + echo "docker=${{ env._DOCKER_SECRETS != '' }}" >> $GITHUB_OUTPUT docker_push_image: needs: [tests, vm_tests, check_secrets] From 26dbda6302fc0767153fdccb81f15319cca349f1 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 14 Aug 2025 14:54:45 +0300 Subject: [PATCH 181/382] ci/docker_push_image: Remove dead step This step is now part of the check_secrets job and the output is completely unused. --- .github/workflows/ci.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bae883f96..2f47f13e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -169,12 +169,6 @@ jobs: github.ref_name == 'master' runs-on: ubuntu-24.04 steps: - - name: Check for secrets - id: secret - env: - _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }} - run: | - echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}" - uses: actions/checkout@v5 with: fetch-depth: 0 From e6f3a193d875cdc193fd6c02a04bfe331bb47187 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 14 Aug 2025 15:52:24 +0300 Subject: [PATCH 182/382] libstore: Fix makeCopyPathMessage after config getUri refactor --- src/libstore/store-api.cc | 40 +++++++++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index a30a07952..298b23e5e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -785,12 +785,24 @@ const Store::Stats & Store::getStats() return stats; } -static std::string makeCopyPathMessage(std::string_view srcUri, std::string_view dstUri, std::string_view storePath) +static std::string +makeCopyPathMessage(const StoreReference & src, const StoreReference & dst, std::string_view storePath) { - return srcUri == "local" || srcUri == "daemon" ? fmt("copying path '%s' to '%s'", storePath, dstUri) - : dstUri == "local" || dstUri == "daemon" - ? fmt("copying path '%s' from '%s'", storePath, srcUri) - : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri); + auto isShorthand = [](const StoreReference & ref) { + if (const auto * specified = std::get_if(&ref.variant)) { + const auto & scheme = specified->scheme; + return (scheme == "local" || scheme == "unix") && specified->authority.empty() && ref.params.empty(); + } + return false; + }; + + if (isShorthand(src)) + return fmt("copying path '%s' to '%s'", storePath, dst.render()); + + if (isShorthand(dst)) + return fmt("copying path '%s' from '%s'", storePath, src.render()); + + return fmt("copying path '%s' from '%s' to '%s'", storePath, src.render(), dst.render()); } void copyStorePath( @@ -801,11 +813,15 @@ void copyStorePath( if (!repair && dstStore.isValidPath(storePath)) return; - auto srcUri = srcStore.config.getUri(); - auto dstUri = dstStore.config.getUri(); + auto srcRef = srcStore.config.getReference(); + auto dstRef = dstStore.config.getReference(); auto storePathS = srcStore.printStorePath(storePath); Activity act( - *logger, lvlInfo, actCopyPath, makeCopyPathMessage(srcUri, dstUri, storePathS), {storePathS, srcUri, dstUri}); + *logger, + lvlInfo, + actCopyPath, + makeCopyPathMessage(srcRef, dstRef, storePathS), + {storePathS, srcRef.render(), dstRef.render()}); PushActivity pact(act.id); auto info = srcStore.queryPathInfo(storePath); @@ -957,15 +973,15 @@ std::map copyPaths( // We can reasonably assume that the copy will happen whenever we // read the path, so log something about that at that point uint64_t total = 0; - auto srcUri = srcStore.config.getUri(); - auto dstUri = dstStore.config.getUri(); + auto srcRef = srcStore.config.getReference(); + auto dstRef = dstStore.config.getReference(); auto storePathS = srcStore.printStorePath(missingPath); Activity act( *logger, lvlInfo, actCopyPath, - makeCopyPathMessage(srcUri, dstUri, storePathS), - {storePathS, srcUri, dstUri}); + makeCopyPathMessage(srcRef, dstRef, storePathS), + {storePathS, srcRef.render(), dstRef.render()}); PushActivity pact(act.id); LambdaSink progressSink([&](std::string_view data) { From 1b7ffa53af168aab255c9ee4c1ca5a192c269738 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 14 Aug 2025 16:47:05 +0300 Subject: [PATCH 183/382] treewide: Remove getUri and replace with getHumanReadableURI where appropriate The problem with old code was that it used getUri for both the `diskCache` as well as logging. This is really bad because it mixes the textual human readable representation with the caching. Also using getUri for the cache key is really problematic for the S3 store, since it doesn't include the `endpoint` in the cache key, so it's totally broken. This starts separating the logging / cache concerns by introducing a `getHumanReadableURI` that should only be used for logging. The caching logic now instead uses `getReference().render(/*withParams=*/false)` exclusively. This would need to be fixed in follow-ups, because that's really fragile and broken for some store types (but it was already broken before). --- src/libcmd/repl.cc | 5 +- src/libstore-c/nix_api_store.cc | 2 +- src/libstore-tests/legacy-ssh-store.cc | 10 ++- src/libstore-tests/nix_api_store.cc | 2 +- src/libstore-tests/ssh-store.cc | 4 +- src/libstore/binary-cache-store.cc | 12 ++-- .../build/drv-output-substitution-goal.cc | 2 +- src/libstore/build/substitution-goal.cc | 8 ++- src/libstore/http-binary-cache-store.cc | 9 +-- src/libstore/include/nix/store/store-api.hh | 17 ++++- src/libstore/include/nix/store/store-cast.hh | 2 +- .../include/nix/store/store-reference.hh | 4 +- src/libstore/remote-store.cc | 4 +- src/libstore/s3-binary-cache-store.cc | 10 ++- src/libstore/store-api.cc | 67 +++++++++++-------- src/libstore/store-reference.cc | 4 +- src/nix/config-check.cc | 6 +- src/nix/log.cc | 5 +- src/nix/run.cc | 4 +- src/nix/store-info.cc | 4 +- 20 files changed, 107 insertions(+), 74 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 863de05ed..01d786deb 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -574,14 +574,15 @@ ProcessLineResult NixRepl::processLine(std::string line) for (auto & sub : subs) { auto * logSubP = dynamic_cast(&*sub); if (!logSubP) { - printInfo("Skipped '%s' which does not support retrieving build logs", sub->config.getUri()); + printInfo( + "Skipped '%s' which does not support retrieving build logs", sub->config.getHumanReadableURI()); continue; } auto & logSub = *logSubP; auto log = logSub.getBuildLog(drvPath); if (log) { - printInfo("got build log for '%s' from '%s'", drvPathRaw, logSub.config.getUri()); + printInfo("got build log for '%s' from '%s'", drvPathRaw, logSub.config.getHumanReadableURI()); logger->writeToStdout(*log); foundLog = true; break; diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 705d8153f..4f91f5332 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -62,7 +62,7 @@ nix_err nix_store_get_uri(nix_c_context * context, Store * store, nix_get_string if (context) context->last_err_code = NIX_OK; try { - auto res = store->ptr->config.getUri(); + auto res = store->ptr->config.getReference().render(/*withParams=*/true); return call_nix_get_string_callback(res, callback, user_data); } NIXC_CATCH_ERRS diff --git a/src/libstore-tests/legacy-ssh-store.cc b/src/libstore-tests/legacy-ssh-store.cc index 04c3763ec..d60ecc424 100644 --- a/src/libstore-tests/legacy-ssh-store.cc +++ b/src/libstore-tests/legacy-ssh-store.cc @@ -6,9 +6,7 @@ namespace nix { TEST(LegacySSHStore, constructConfig) { - initLibStore(/*loadConfig=*/false); - - auto config = make_ref( + LegacySSHStoreConfig config( "ssh", "me@localhost:2222", StoreConfig::Params{ @@ -20,13 +18,13 @@ TEST(LegacySSHStore, constructConfig) }); EXPECT_EQ( - config->remoteProgram.get(), + config.remoteProgram.get(), (Strings{ "foo", "bar", })); - auto store = config->openStore(); - EXPECT_EQ(store->config.getUri(), "ssh://me@localhost:2222?remote-program=foo%20bar"); + EXPECT_EQ(config.getReference().render(/*withParams=*/true), "ssh://me@localhost:2222?remote-program=foo%20bar"); + EXPECT_EQ(config.getReference().render(/*withParams=*/false), "ssh://me@localhost:2222"); } } // namespace nix diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 2310c4395..c7146f977 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -104,7 +104,7 @@ TEST_F(nix_api_util_context, nix_store_open_dummy) nix_libstore_init(ctx); Store * store = nix_store_open(ctx, "dummy://", nullptr); ASSERT_EQ(NIX_OK, ctx->last_err_code); - ASSERT_STREQ("dummy://", store->ptr->config.getUri().c_str()); + ASSERT_STREQ("dummy://", store->ptr->config.getReference().render(/*withParams=*/true).c_str()); std::string str; nix_store_get_version(ctx, store, OBSERVE_STRING(str)); diff --git a/src/libstore-tests/ssh-store.cc b/src/libstore-tests/ssh-store.cc index 335e4ae85..a156da52b 100644 --- a/src/libstore-tests/ssh-store.cc +++ b/src/libstore-tests/ssh-store.cc @@ -27,9 +27,9 @@ TEST(SSHStore, constructConfig) "bar", })); - EXPECT_EQ(config.getUri(), "ssh-ng://me@localhost:2222?remote-program=foo%20bar"); + EXPECT_EQ(config.getReference().render(/*withParams=*/true), "ssh-ng://me@localhost:2222?remote-program=foo%20bar"); config.resetOverridden(); - EXPECT_EQ(config.getUri(), "ssh-ng://me@localhost:2222"); + EXPECT_EQ(config.getReference().render(/*withParams=*/true), "ssh-ng://me@localhost:2222"); } TEST(MountedSSHStore, constructConfig) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index c55239413..0a44b0cf0 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -59,7 +59,7 @@ void BinaryCacheStore::init() if (value != storeDir) throw Error( "binary cache '%s' is for Nix stores with prefix '%s', not '%s'", - config.getUri(), + config.getHumanReadableURI(), value, storeDir); } else if (name == "WantMassQuery") { @@ -133,7 +133,9 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) if (diskCache) diskCache->upsertNarInfo( - config.getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr(narInfo)); + config.getReference().render(/*FIXME withParams=*/false), + std::string(narInfo->path.hashPart()), + std::shared_ptr(narInfo)); } ref BinaryCacheStore::addToStoreCommon( @@ -431,7 +433,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) void BinaryCacheStore::queryPathInfoUncached( const StorePath & storePath, Callback> callback) noexcept { - auto uri = config.getUri(); + auto uri = config.getReference().render(/*FIXME withParams=*/false); auto storePathS = printStorePath(storePath); auto act = std::make_shared( *logger, @@ -531,7 +533,7 @@ void BinaryCacheStore::queryRealisationUncached( void BinaryCacheStore::registerDrvOutput(const Realisation & info) { if (diskCache) - diskCache->upsertRealisation(config.getUri(), info); + diskCache->upsertRealisation(config.getReference().render(/*FIXME withParams=*/false), info); auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi"; upsertFile(filePath, info.toJSON().dump(), "application/json"); } @@ -559,7 +561,7 @@ std::optional BinaryCacheStore::getBuildLogExact(const StorePath & { auto logPath = "log/" + std::string(baseNameOf(printStorePath(path))); - debug("fetching build log from binary cache '%s/%s'", config.getUri(), logPath); + debug("fetching build log from binary cache '%s/%s'", config.getHumanReadableURI(), logPath); return getFile(logPath); } diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index 3f4b787f7..222cd8618 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -98,7 +98,7 @@ Goal::Co DrvOutputSubstitutionGoal::init() "substituter '%s' has an incompatible realisation for '%s', ignoring.\n" "Local: %s\n" "Remote: %s", - sub->config.getUri(), + sub->config.getHumanReadableURI(), depId.to_string(), worker.store.printStorePath(localOutputInfo->outPath), worker.store.printStorePath(depPath)); diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index e46ad2007..3c0e96152 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -101,7 +101,7 @@ Goal::Co PathSubstitutionGoal::init() } else { printError( "asked '%s' for '%s' but got '%s'", - sub->config.getUri(), + sub->config.getHumanReadableURI(), worker.store.printStorePath(storePath), sub->printStorePath(info->path)); continue; @@ -127,7 +127,7 @@ Goal::Co PathSubstitutionGoal::init() warn( "ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'", worker.store.printStorePath(storePath), - sub->config.getUri()); + sub->config.getHumanReadableURI()); continue; } @@ -218,7 +218,9 @@ Goal::Co PathSubstitutionGoal::tryToRun( Finally updateStats([this]() { outPipe.writeSide.close(); }); Activity act( - *logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->config.getUri()}); + *logger, + actSubstitute, + Logger::Fields{worker.store.printStorePath(storePath), sub->config.getHumanReadableURI()}); PushActivity pact(act.id); copyStorePath(*sub, worker.store, subPath, repair, sub->config.isTrusted ? NoCheckSigs : CheckSigs); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index fc19bc1f8..2777b8827 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -98,7 +98,7 @@ protected: auto state(_state.lock()); if (state->enabled && settings.tryFallback) { int t = 60; - printError("disabling binary cache '%s' for %s seconds", config->getUri(), t); + printError("disabling binary cache '%s' for %s seconds", config->getHumanReadableURI(), t); state->enabled = false; state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); } @@ -111,10 +111,10 @@ protected: return; if (std::chrono::steady_clock::now() > state->disabledUntil) { state->enabled = true; - debug("re-enabling binary cache '%s'", config->getUri()); + debug("re-enabling binary cache '%s'", config->getHumanReadableURI()); return; } - throw SubstituterDisabled("substituter '%s' is disabled", config->getUri()); + throw SubstituterDisabled("substituter '%s' is disabled", config->getHumanReadableURI()); } bool fileExists(const std::string & path) override @@ -180,7 +180,8 @@ protected: getFileTransfer()->download(std::move(request), sink); } catch (FileTransferError & e) { if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, config->getUri()); + throw NoSuchBinaryCacheFile( + "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); maybeDisable(); throw; } diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 3e32c49a3..ba80c18f2 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -205,9 +205,20 @@ struct StoreConfig : public StoreDirConfig */ virtual StoreReference getReference() const; - std::string getUri() const + /** + * Get a textual representation of the store reference. + * + * @warning This is only suitable for logging or error messages. + * This will not roundtrip when parsed as a StoreReference. + * Must NOT be used as a cache key or otherwise be relied upon to + * be stable. + * + * Can be implemented by subclasses to make the URI more legible, + * e.g. when some query parameters are necessary to make sense of the URI. + */ + virtual std::string getHumanReadableURI() const { - return getReference().render(); + return getReference().render(/*withParams=*/false); } }; @@ -878,7 +889,7 @@ protected: */ [[noreturn]] void unsupported(const std::string & op) { - throw Unsupported("operation '%s' is not supported by store '%s'", op, config.getUri()); + throw Unsupported("operation '%s' is not supported by store '%s'", op, config.getHumanReadableURI()); } }; diff --git a/src/libstore/include/nix/store/store-cast.hh b/src/libstore/include/nix/store/store-cast.hh index 0d7257602..1e0b14914 100644 --- a/src/libstore/include/nix/store/store-cast.hh +++ b/src/libstore/include/nix/store/store-cast.hh @@ -17,7 +17,7 @@ T & require(Store & store) { auto * castedStore = dynamic_cast(&store); if (!castedStore) - throw UsageError("%s not supported by store '%s'", T::operationName, store.config.getUri()); + throw UsageError("%s not supported by store '%s'", T::operationName, store.config.getHumanReadableURI()); return *castedStore; } diff --git a/src/libstore/include/nix/store/store-reference.hh b/src/libstore/include/nix/store/store-reference.hh index fff3b5c5c..5cf1e9a11 100644 --- a/src/libstore/include/nix/store/store-reference.hh +++ b/src/libstore/include/nix/store/store-reference.hh @@ -73,9 +73,9 @@ struct StoreReference bool operator==(const StoreReference & rhs) const = default; /** - * Render the whole store reference as a URI, including parameters. + * Render the whole store reference as a URI, optionally including parameters. */ - std::string render() const; + std::string render(bool withParams = true) const; /** * Parse a URI into a store reference. diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 3eff339e1..d3446093d 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -53,7 +53,7 @@ RemoteStore::RemoteStore(const Config & config) ref RemoteStore::openConnectionWrapper() { if (failed) - throw Error("opening a connection to remote store '%s' previously failed", config.getUri()); + throw Error("opening a connection to remote store '%s' previously failed", config.getHumanReadableURI()); try { return openConnection(); } catch (...) { @@ -95,7 +95,7 @@ void RemoteStore::initConnection(Connection & conn) if (ex) std::rethrow_exception(ex); } catch (Error & e) { - throw Error("cannot open connection to remote store '%s': %s", config.getUri(), e.what()); + throw Error("cannot open connection to remote store '%s': %s", config.getHumanReadableURI(), e.what()); } setOptions(conn); diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 84eb63f7f..4ad09aff2 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -282,12 +282,15 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore void init() override { - if (auto cacheInfo = diskCache->upToDateCacheExists(config->getUri())) { + /* FIXME: The URI (when used as a cache key) must have several parameters rendered (e.g. the endpoint). + This must be represented as a separate opaque string (probably a URI) that has the right query parameters. */ + auto cacheUri = config->getReference().render(/*withParams=*/false); + if (auto cacheInfo = diskCache->upToDateCacheExists(cacheUri)) { config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); config->priority.setDefault(cacheInfo->priority); } else { BinaryCacheStore::init(); - diskCache->createCache(config->getUri(), config->storeDir, config->wantMassQuery, config->priority); + diskCache->createCache(cacheUri, config->storeDir, config->wantMassQuery, config->priority); } } @@ -525,7 +528,8 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStore sink(*res.data); } else - throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, config->getUri()); + throw NoSuchBinaryCacheFile( + "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); } StorePathSet queryAllValidPaths() override diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 298b23e5e..493137361 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -395,11 +395,14 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta "replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), - sub->config.getUri()); + sub->config.getHumanReadableURI()); } else if (sub->storeDir != storeDir) continue; - debug("checking substituter '%s' for path '%s'", sub->config.getUri(), sub->printStorePath(subPath)); + debug( + "checking substituter '%s' for path '%s'", + sub->config.getHumanReadableURI(), + sub->printStorePath(subPath)); try { auto info = sub->queryPathInfo(subPath); @@ -439,7 +442,8 @@ bool Store::isValidPath(const StorePath & storePath) } if (diskCache) { - auto res = diskCache->lookupNarInfo(config.getUri(), std::string(storePath.hashPart())); + auto res = diskCache->lookupNarInfo( + config.getReference().render(/*FIXME withParams=*/false), std::string(storePath.hashPart())); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; auto state_(state.lock()); @@ -455,7 +459,8 @@ bool Store::isValidPath(const StorePath & storePath) if (diskCache && !valid) // FIXME: handle valid = true case. - diskCache->upsertNarInfo(config.getUri(), std::string(storePath.hashPart()), 0); + diskCache->upsertNarInfo( + config.getReference().render(/*FIXME withParams=*/false), std::string(storePath.hashPart()), 0); return valid; } @@ -509,7 +514,7 @@ std::optional> Store::queryPathInfoFromClie } if (diskCache) { - auto res = diskCache->lookupNarInfo(config.getUri(), hashPart); + auto res = diskCache->lookupNarInfo(config.getReference().render(/*FIXME withParams=*/false), hashPart); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; { @@ -554,7 +559,7 @@ void Store::queryPathInfo(const StorePath & storePath, CallbackupsertNarInfo(config.getUri(), hashPart, info); + diskCache->upsertNarInfo(config.getReference().render(/*FIXME withParams=*/false), hashPart, info); { auto state_(state.lock()); @@ -578,7 +583,8 @@ void Store::queryRealisation(const DrvOutput & id, CallbacklookupRealisation(config.getUri(), id); + auto [cacheOutcome, maybeCachedRealisation] = + diskCache->lookupRealisation(config.getReference().render(/*FIXME: withParams=*/false), id); switch (cacheOutcome) { case NarInfoDiskCache::oValid: debug("Returning a cached realisation for %s", id.to_string()); @@ -604,9 +610,11 @@ void Store::queryRealisation(const DrvOutput & id, CallbackupsertRealisation(config.getUri(), *info); + diskCache->upsertRealisation( + config.getReference().render(/*FIXME withParams=*/false), *info); else - diskCache->upsertAbsentRealisation(config.getUri(), id); + diskCache->upsertAbsentRealisation( + config.getReference().render(/*FIXME withParams=*/false), id); } (*callbackPtr)(std::shared_ptr(info)); @@ -786,23 +794,26 @@ const Store::Stats & Store::getStats() } static std::string -makeCopyPathMessage(const StoreReference & src, const StoreReference & dst, std::string_view storePath) +makeCopyPathMessage(const StoreConfig & srcCfg, const StoreConfig & dstCfg, std::string_view storePath) { + auto src = srcCfg.getReference(); + auto dst = dstCfg.getReference(); + auto isShorthand = [](const StoreReference & ref) { - if (const auto * specified = std::get_if(&ref.variant)) { - const auto & scheme = specified->scheme; - return (scheme == "local" || scheme == "unix") && specified->authority.empty() && ref.params.empty(); - } - return false; + /* At this point StoreReference **must** be resolved. */ + const auto & specified = std::get(ref.variant); + const auto & scheme = specified.scheme; + return (scheme == "local" || scheme == "unix") && specified.authority.empty() && ref.params.empty(); }; if (isShorthand(src)) - return fmt("copying path '%s' to '%s'", storePath, dst.render()); + return fmt("copying path '%s' to '%s'", storePath, dstCfg.getHumanReadableURI()); if (isShorthand(dst)) - return fmt("copying path '%s' from '%s'", storePath, src.render()); + return fmt("copying path '%s' from '%s'", storePath, srcCfg.getHumanReadableURI()); - return fmt("copying path '%s' from '%s' to '%s'", storePath, src.render(), dst.render()); + return fmt( + "copying path '%s' from '%s' to '%s'", storePath, srcCfg.getHumanReadableURI(), dstCfg.getHumanReadableURI()); } void copyStorePath( @@ -813,15 +824,15 @@ void copyStorePath( if (!repair && dstStore.isValidPath(storePath)) return; - auto srcRef = srcStore.config.getReference(); - auto dstRef = dstStore.config.getReference(); + const auto & srcCfg = srcStore.config; + const auto & dstCfg = dstStore.config; auto storePathS = srcStore.printStorePath(storePath); Activity act( *logger, lvlInfo, actCopyPath, - makeCopyPathMessage(srcRef, dstRef, storePathS), - {storePathS, srcRef.render(), dstRef.render()}); + makeCopyPathMessage(srcCfg, dstCfg, storePathS), + {storePathS, srcCfg.getHumanReadableURI(), dstCfg.getHumanReadableURI()}); PushActivity pact(act.id); auto info = srcStore.queryPathInfo(storePath); @@ -857,7 +868,7 @@ void copyStorePath( throw EndOfFile( "NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), - srcStore.config.getUri()); + srcStore.config.getHumanReadableURI()); }); dstStore.addToStore(*info, *source, repair, checkSigs); @@ -955,7 +966,7 @@ std::map copyPaths( "replaced path '%s' to '%s' for substituter '%s'", srcStore.printStorePath(storePathForSrc), dstStore.printStorePath(storePathForDst), - dstStore.config.getUri()); + dstStore.config.getHumanReadableURI()); } return storePathForDst; }; @@ -973,15 +984,15 @@ std::map copyPaths( // We can reasonably assume that the copy will happen whenever we // read the path, so log something about that at that point uint64_t total = 0; - auto srcRef = srcStore.config.getReference(); - auto dstRef = dstStore.config.getReference(); + const auto & srcCfg = srcStore.config; + const auto & dstCfg = dstStore.config; auto storePathS = srcStore.printStorePath(missingPath); Activity act( *logger, lvlInfo, actCopyPath, - makeCopyPathMessage(srcRef, dstRef, storePathS), - {storePathS, srcRef.render(), dstRef.render()}); + makeCopyPathMessage(srcCfg, dstCfg, storePathS), + {storePathS, srcCfg.getHumanReadableURI(), dstCfg.getHumanReadableURI()}); PushActivity pact(act.id); LambdaSink progressSink([&](std::string_view data) { diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index 13feeae3e..2b8305072 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -18,7 +18,7 @@ static bool isNonUriPath(const std::string & spec) && spec.find("/") != std::string::npos; } -std::string StoreReference::render() const +std::string StoreReference::render(bool withParams) const { std::string res; @@ -33,7 +33,7 @@ std::string StoreReference::render() const }, variant); - if (!params.empty()) { + if (withParams && !params.empty()) { res += "?"; res += encodeQuery(params); } diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 685795487..dc6453e27 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -71,7 +71,7 @@ struct CmdConfigCheck : StoreCommand void run(ref store) override { - logger->log("Running checks against store uri: " + store->config.getUri()); + logger->log("Running checks against store uri: " + store->config.getHumanReadableURI()); if (store.dynamic_pointer_cast()) { success &= checkNixInPath(); @@ -171,9 +171,9 @@ struct CmdConfigCheck : StoreCommand { if (auto trustedMay = store->isTrustedClient()) { std::string_view trusted = trustedMay.value() ? "trusted" : "not trusted"; - checkInfo(fmt("You are %s by store uri: %s", trusted, store->config.getUri())); + checkInfo(fmt("You are %s by store uri: %s", trusted, store->config.getHumanReadableURI())); } else { - checkInfo(fmt("Store uri: %s doesn't have a notion of trusted user", store->config.getUri())); + checkInfo(fmt("Store uri: %s doesn't have a notion of trusted user", store->config.getHumanReadableURI())); } } }; diff --git a/src/nix/log.cc b/src/nix/log.cc index 2b697c609..cabe611fa 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -48,7 +48,8 @@ struct CmdLog : InstallableCommand for (auto & sub : subs) { auto * logSubP = dynamic_cast(&*sub); if (!logSubP) { - printInfo("Skipped '%s' which does not support retrieving build logs", sub->config.getUri()); + printInfo( + "Skipped '%s' which does not support retrieving build logs", sub->config.getHumanReadableURI()); continue; } auto & logSub = *logSubP; @@ -57,7 +58,7 @@ struct CmdLog : InstallableCommand if (!log) continue; logger->stop(); - printInfo("got build log for '%s' from '%s'", installable->what(), logSub.config.getUri()); + printInfo("got build log for '%s' from '%s'", installable->what(), logSub.config.getHumanReadableURI()); writeFull(getStandardOutput(), *log); return; } diff --git a/src/nix/run.cc b/src/nix/run.cc index cd7784cee..c3d416a6e 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -77,7 +77,9 @@ void execProgramInStore( auto store2 = store.dynamic_pointer_cast(); if (!store2) - throw Error("store '%s' is not a local store so it does not support command execution", store->config.getUri()); + throw Error( + "store '%s' is not a local store so it does not support command execution", + store->config.getHumanReadableURI()); if (store->storeDir != store2->getRealStoreDir()) { Strings helperArgs = { diff --git a/src/nix/store-info.cc b/src/nix/store-info.cc index 92fcef663..4526d9cda 100644 --- a/src/nix/store-info.cc +++ b/src/nix/store-info.cc @@ -24,7 +24,7 @@ struct CmdInfoStore : StoreCommand, MixJSON void run(ref store) override { if (!json) { - notice("Store URL: %s", store->config.getUri()); + notice("Store URL: %s", store->config.getReference().render(/*withParams=*/true)); store->connect(); if (auto version = store->getVersion()) notice("Version: %s", *version); @@ -34,7 +34,7 @@ struct CmdInfoStore : StoreCommand, MixJSON nlohmann::json res; Finally printRes([&]() { printJSON(res); }); - res["url"] = store->config.getUri(); + res["url"] = store->config.getReference().render(/*withParams=*/true); store->connect(); if (auto version = store->getVersion()) res["version"] = *version; From e74ef417db4cc956e56dc0188a8331b8f1ca577d Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 15 Aug 2025 00:55:03 +0300 Subject: [PATCH 184/382] libstore: Fix makeCopyPathMessage Old code completely ignored query parameters and it seems ok to keep that behavior. There's a lot of code out there that parses nix code like nix-output-monitor and it can't parse messages like: > copying path '/nix/store/wha2hi4yhkjmccqhivxavbfspsg1wrsj-source' from 'https://cache.nixos.org' to 'local://'... Let's not break these tools without a good reason. This goes in line with what other code does by ignoring parameters in logs. The issue is just in detecting the shorthand notations for the store reference - not in printing the url in logs. By default the daemon opens a local store with ?path-info-cache-size=0, so that leads to the erronenous 'local://'. --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 493137361..2c4d0302c 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -803,7 +803,7 @@ makeCopyPathMessage(const StoreConfig & srcCfg, const StoreConfig & dstCfg, std: /* At this point StoreReference **must** be resolved. */ const auto & specified = std::get(ref.variant); const auto & scheme = specified.scheme; - return (scheme == "local" || scheme == "unix") && specified.authority.empty() && ref.params.empty(); + return (scheme == "local" || scheme == "unix") && specified.authority.empty(); }; if (isShorthand(src)) From 677b1c0f8f9dc3f96a8729eef43c6c54bf82920e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 13 Aug 2025 23:01:01 +0200 Subject: [PATCH 185/382] prepare merge queues for nix --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2f47f13e1..e7e103b63 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,6 +2,7 @@ name: "CI" on: pull_request: + merge_group: push: workflow_dispatch: inputs: From cea85e79ee04d428717d76d9f2cab3d5372fa718 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 15 Aug 2025 16:39:28 +0300 Subject: [PATCH 186/382] libstore: Fix dangling pointers in DerivationGoal constructors This leads to a use-after free, because staticOutputHashes returns a temporary object that dies before we can do a `return *mOutputHash`. This is most likely the cause for random failures in Hydra [1]. [1]: https://hydra.nixos.org/build/305091330/nixlog/2 --- src/libstore/build/derivation-goal.cc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 30a247777..e8523569d 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -34,11 +34,10 @@ DerivationGoal::DerivationGoal( , drvPath(drvPath) , wantedOutput(wantedOutput) , outputHash{[&] { - if (auto * mOutputHash = get(staticOutputHashes(worker.evalStore, drv), wantedOutput)) + auto outputHashes = staticOutputHashes(worker.evalStore, drv); + if (auto * mOutputHash = get(outputHashes, wantedOutput)) return *mOutputHash; - else - throw Error( - "derivation '%s' does not have output '%s'", worker.store.printStorePath(drvPath), wantedOutput); + throw Error("derivation '%s' does not have output '%s'", worker.store.printStorePath(drvPath), wantedOutput); }()} , buildMode(buildMode) { From 408c09a1207e1f6bb7367322ceb25d187334673f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 15 Aug 2025 17:29:17 +0300 Subject: [PATCH 187/382] libutil: Delete footgun overloads of get and getOr To avoid mistakes like the one in cea85e79ee04d428717d76d9f2cab3d5372fa718. These overloads are just asking for trouble. --- src/libutil/include/nix/util/util.hh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 56041a112..765da8698 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -214,6 +214,10 @@ typename T::mapped_type * get(T & map, const typename T::key_type & key) return &i->second; } +/** Deleted because this is use-after-free liability. Just don't pass temporaries to this overload set. */ +template +typename T::mapped_type * get(T && map, const typename T::key_type & key) = delete; + /** * Get a value for the specified key from an associate container, or a default value if the key isn't present. */ @@ -227,6 +231,11 @@ getOr(T & map, const typename T::key_type & key, const typename T::mapped_type & return i->second; } +/** Deleted because this is use-after-free liability. Just don't pass temporaries to this overload set. */ +template +const typename T::mapped_type & +getOr(T && map, const typename T::key_type & key, const typename T::mapped_type & defaultValue) = delete; + /** * Remove and return the first item from a container. */ From 64c2ee3f457c8621df464f158e54d5c29b281517 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 13:48:50 -0400 Subject: [PATCH 188/382] Simplify "Store dir" superclass We can cut out some gratuitous inhertence as follows: - `MixStoreDirMethods` -> `StoreDirConfig` - `StoreDirConfig` deleted because no longer needed. It is just folded into `StoreConfig`. - `StoreDirConfigBase` -> `StoreConfigBase` same trick still needed, but now is for `StoreConfig` not `StoreDirConfig` Here's how we got here: 1. I once factored out `StoreDirConfig` in #6236. 2. I factored out `MixStoreDirMethods` in #13154. But, I didn't realize at point (2) that we didn't need `StoreDirConfig` anymore, all uses of `StoreDirConfig` could instead be uses of `MixStoreDirMethods`. Now I am doing that, and renaming `MixStoreDirMethods` to just `StoreDirConfig` to reduce churn. --- src/libstore/include/nix/store/store-api.hh | 32 +++++++++++-- .../include/nix/store/store-dir-config.hh | 48 +++---------------- src/libstore/store-api.cc | 14 ++++-- src/libstore/store-dir-config.cc | 32 +++++-------- 4 files changed, 59 insertions(+), 67 deletions(-) diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index ba80c18f2..528375851 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -81,6 +81,25 @@ struct MissingPaths uint64_t narSize{0}; }; +/** + * Need to make this a separate class so I can get the right + * initialization order in the constructor for `StoreConfig`. + */ +struct StoreConfigBase : Config +{ + using Config::Config; + + const PathSetting storeDir_{ + this, + settings.nixStore, + "store", + R"( + Logical location of the Nix store, usually + `/nix/store`. Note that you can only copy store paths + between stores if they have the same `store` setting. + )"}; +}; + /** * About the class hierarchy of the store types: * @@ -107,10 +126,17 @@ struct MissingPaths * ``` * cpp static RegisterStoreImplementation regStore; * ``` + * + * @note The order of `StoreConfigBase` and then `StorerConfig` is + * very important. This ensures that `StoreConfigBase::storeDir_` + * is initialized before we have our one chance (because references are + * immutable) to initialize `StoreConfig::storeDir`. */ -struct StoreConfig : public StoreDirConfig +struct StoreConfig : public StoreConfigBase, public StoreDirConfig { - using StoreDirConfig::StoreDirConfig; + using Params = StoreReference::Params; + + StoreConfig(const Params & params); StoreConfig() = delete; @@ -233,7 +259,7 @@ struct StoreConfig : public StoreDirConfig * underlying resource, which could be an external process (daemon * server), file system state, etc. */ -class Store : public std::enable_shared_from_this, public MixStoreDirMethods +class Store : public std::enable_shared_from_this, public StoreDirConfig { public: diff --git a/src/libstore/include/nix/store/store-dir-config.hh b/src/libstore/include/nix/store/store-dir-config.hh index bc2944b0b..2dfd601f1 100644 --- a/src/libstore/include/nix/store/store-dir-config.hh +++ b/src/libstore/include/nix/store/store-dir-config.hh @@ -18,15 +18,17 @@ MakeError(BadStorePath, Error); MakeError(BadStorePathName, BadStorePath); /** - * @todo This should just be part of `StoreDirConfig`. However, it would - * be a huge amount of churn if `Store` didn't have these methods + * @todo This should just be inherited by `StoreConfig`. However, it + * would be a huge amount of churn if `Store` didn't have these methods * anymore, forcing a bunch of code to go from `store.method(...)` to * `store.config.method(...)`. * - * So we instead pull out the methods into their own mix-in, so can put - * them directly on the Store too. + * @todo this should not have "config" in its name, because it no longer + * uses the configuration system for `storeDir` --- in fact, `storeDir` + * isn't even owned, but a mere reference. But doing that rename would + * cause a bunch of churn. */ -struct MixStoreDirMethods +struct StoreDirConfig { const Path & storeDir; @@ -96,40 +98,4 @@ struct MixStoreDirMethods PathFilter & filter = defaultPathFilter) const; }; -/** - * Need to make this a separate class so I can get the right - * initialization order in the constructor for `StoreDirConfig`. - */ -struct StoreDirConfigBase : Config -{ - using Config::Config; - - const PathSetting storeDir_{ - this, - settings.nixStore, - "store", - R"( - Logical location of the Nix store, usually - `/nix/store`. Note that you can only copy store paths - between stores if they have the same `store` setting. - )"}; -}; - -/** - * The order of `StoreDirConfigBase` and then `MixStoreDirMethods` is - * very important. This ensures that `StoreDirConfigBase::storeDir_` - * is initialized before we have our one chance (because references are - * immutable) to initialize `MixStoreDirMethods::storeDir`. - */ -struct StoreDirConfig : StoreDirConfigBase, MixStoreDirMethods -{ - using Params = StringMap; - - StoreDirConfig(const Params & params); - - StoreDirConfig() = delete; - - virtual ~StoreDirConfig() = default; -}; - } // namespace nix diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 2c4d0302c..dd8c39557 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -27,12 +27,18 @@ using json = nlohmann::json; namespace nix { -bool MixStoreDirMethods::isInStore(PathView path) const +StoreConfig::StoreConfig(const Params & params) + : StoreConfigBase(params) + , StoreDirConfig{storeDir_} +{ +} + +bool StoreDirConfig::isInStore(PathView path) const { return isInDir(path, storeDir); } -std::pair MixStoreDirMethods::toStorePath(PathView path) const +std::pair StoreDirConfig::toStorePath(PathView path) const { if (!isInStore(path)) throw Error("path '%1%' is not in the Nix store", path); @@ -293,7 +299,7 @@ StringSet Store::Config::getDefaultSystemFeatures() } Store::Store(const Store::Config & config) - : MixStoreDirMethods{config} + : StoreDirConfig{config} , config{config} , state({(size_t) config.pathInfoCacheSize}) { @@ -1082,7 +1088,7 @@ decodeValidPathInfo(const Store & store, std::istream & str, std::optional(std::move(info)); } -std::string MixStoreDirMethods::showPaths(const StorePathSet & paths) const +std::string StoreDirConfig::showPaths(const StorePathSet & paths) const { std::string s; for (auto & i : paths) { diff --git a/src/libstore/store-dir-config.cc b/src/libstore/store-dir-config.cc index 62f08d819..8c756ff58 100644 --- a/src/libstore/store-dir-config.cc +++ b/src/libstore/store-dir-config.cc @@ -6,7 +6,7 @@ namespace nix { -StorePath MixStoreDirMethods::parseStorePath(std::string_view path) const +StorePath StoreDirConfig::parseStorePath(std::string_view path) const { // On Windows, `/nix/store` is not a canonical path. More broadly it // is unclear whether this function should be using the native @@ -25,7 +25,7 @@ StorePath MixStoreDirMethods::parseStorePath(std::string_view path) const return StorePath(baseNameOf(p)); } -std::optional MixStoreDirMethods::maybeParseStorePath(std::string_view path) const +std::optional StoreDirConfig::maybeParseStorePath(std::string_view path) const { try { return parseStorePath(path); @@ -34,12 +34,12 @@ std::optional MixStoreDirMethods::maybeParseStorePath(std::string_vie } } -bool MixStoreDirMethods::isStorePath(std::string_view path) const +bool StoreDirConfig::isStorePath(std::string_view path) const { return (bool) maybeParseStorePath(path); } -StorePathSet MixStoreDirMethods::parseStorePathSet(const PathSet & paths) const +StorePathSet StoreDirConfig::parseStorePathSet(const PathSet & paths) const { StorePathSet res; for (auto & i : paths) @@ -47,12 +47,12 @@ StorePathSet MixStoreDirMethods::parseStorePathSet(const PathSet & paths) const return res; } -std::string MixStoreDirMethods::printStorePath(const StorePath & path) const +std::string StoreDirConfig::printStorePath(const StorePath & path) const { return (storeDir + "/").append(path.to_string()); } -PathSet MixStoreDirMethods::printStorePathSet(const StorePathSet & paths) const +PathSet StoreDirConfig::printStorePathSet(const StorePathSet & paths) const { PathSet res; for (auto & i : paths) @@ -69,7 +69,7 @@ also update the user-visible behavior, please update the specification to match. */ -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const +StorePath StoreDirConfig::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); @@ -77,12 +77,12 @@ StorePath MixStoreDirMethods::makeStorePath(std::string_view type, std::string_v return StorePath(h, name); } -StorePath MixStoreDirMethods::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const +StorePath StoreDirConfig::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const { return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); } -StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const +StorePath StoreDirConfig::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const { return makeStorePath("output:" + std::string{id}, hash, outputPathName(name, id)); } @@ -90,7 +90,7 @@ StorePath MixStoreDirMethods::makeOutputPath(std::string_view id, const Hash & h /* Stuff the references (if any) into the type. This is a bit hacky, but we can't put them in, say, (per the grammar above) since that would be ambiguous. */ -static std::string makeType(const MixStoreDirMethods & store, std::string && type, const StoreReferences & references) +static std::string makeType(const StoreDirConfig & store, std::string && type, const StoreReferences & references) { for (auto & i : references.others) { type += ":"; @@ -101,7 +101,7 @@ static std::string makeType(const MixStoreDirMethods & store, std::string && typ return std::move(type); } -StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const +StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const { if (info.method == FileIngestionMethod::Git && !(info.hash.algo == HashAlgorithm::SHA1 || info.hash.algo == HashAlgorithm::SHA256)) { @@ -126,7 +126,7 @@ StorePath MixStoreDirMethods::makeFixedOutputPath(std::string_view name, const F } StorePath -MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const +StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const { // New template return std::visit( @@ -148,7 +148,7 @@ MixStoreDirMethods::makeFixedOutputPathFromCA(std::string_view name, const Conte ca.raw); } -std::pair MixStoreDirMethods::computeStorePath( +std::pair StoreDirConfig::computeStorePath( std::string_view name, const SourcePath & path, ContentAddressMethod method, @@ -173,10 +173,4 @@ std::pair MixStoreDirMethods::computeStorePath( }; } -StoreDirConfig::StoreDirConfig(const Params & params) - : StoreDirConfigBase(params) - , MixStoreDirMethods{storeDir_} -{ -} - } // namespace nix From 0ef818dd927f538769ee68ac3d91347f273842ef Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 23 May 2025 15:29:55 -0400 Subject: [PATCH 189/382] More flexible typing for `get` in `util.hh` This is good for e.g. `std::string_view` and `StringMap`. Needed by #11139 Co-authored-by: Sergei Zimmerman <145775305+xokdvium@users.noreply.github.com> --- src/libutil/include/nix/util/util.hh | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 765da8698..dd6294c2a 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -196,8 +196,8 @@ std::pair getLine(std::string_view s); /** * Get a value for the specified key from an associate container. */ -template -const typename T::mapped_type * get(const T & map, const typename T::key_type & key) +template +const typename T::mapped_type * get(const T & map, K & key) { auto i = map.find(key); if (i == map.end()) @@ -205,8 +205,8 @@ const typename T::mapped_type * get(const T & map, const typename T::key_type & return &i->second; } -template -typename T::mapped_type * get(T & map, const typename T::key_type & key) +template +typename T::mapped_type * get(T & map, K & key) { auto i = map.find(key); if (i == map.end()) @@ -221,9 +221,8 @@ typename T::mapped_type * get(T && map, const typename T::key_type & key) = dele /** * Get a value for the specified key from an associate container, or a default value if the key isn't present. */ -template -const typename T::mapped_type & -getOr(T & map, const typename T::key_type & key, const typename T::mapped_type & defaultValue) +template +const typename T::mapped_type & getOr(T & map, K & key, const typename T::mapped_type & defaultValue) { auto i = map.find(key); if (i == map.end()) From 79fb9b0d3c83df87deee6b1a721b43f1adaf05f2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 14:55:47 -0400 Subject: [PATCH 190/382] Make a few more things use `StoreDirConfig` instead of `Store` --- .../include/nix/store/tests/protocol.hh | 10 +++++++--- src/libstore-tests/common-protocol.cc | 4 ++-- src/libstore-tests/serve-protocol.cc | 2 +- src/libstore-tests/worker-protocol.cc | 2 +- src/libstore/build/derivation-building-goal.cc | 5 +++-- .../nix/store/build/derivation-building-goal.hh | 3 --- .../nix/store/build/derivation-building-misc.hh | 5 +++-- src/libstore/include/nix/store/nar-info.hh | 12 ++++++------ src/libstore/include/nix/store/path-info.hh | 16 +++++++++------- src/libstore/nar-info.cc | 8 ++++---- src/libstore/path-info.cc | 16 +++++++++------- 11 files changed, 45 insertions(+), 38 deletions(-) diff --git a/src/libstore-test-support/include/nix/store/tests/protocol.hh b/src/libstore-test-support/include/nix/store/tests/protocol.hh index 3d7a9b073..5b57c6585 100644 --- a/src/libstore-test-support/include/nix/store/tests/protocol.hh +++ b/src/libstore-test-support/include/nix/store/tests/protocol.hh @@ -10,7 +10,7 @@ namespace nix { template -class ProtoTest : public CharacterizationTest, public LibStoreTest +class ProtoTest : public CharacterizationTest { std::filesystem::path unitTestData = getUnitTestData() / protocolDir; @@ -18,6 +18,10 @@ class ProtoTest : public CharacterizationTest, public LibStoreTest { return unitTestData / (std::string{testStem + ".bin"}); } + +public: + Path storeDir = "/nix/store"; + StoreDirConfig store{storeDir}; }; template @@ -34,7 +38,7 @@ public: T got = ({ StringSource from{encoded}; Proto::template Serialise::read( - *LibStoreTest::store, + this->store, typename Proto::ReadConn{ .from = from, .version = version, @@ -54,7 +58,7 @@ public: CharacterizationTest::writeTest(testStem, [&]() { StringSink to; Proto::template Serialise::write( - *LibStoreTest::store, + this->store, typename Proto::WriteConn{ .to = to, .version = version, diff --git a/src/libstore-tests/common-protocol.cc b/src/libstore-tests/common-protocol.cc index 2b039180c..35fca165d 100644 --- a/src/libstore-tests/common-protocol.cc +++ b/src/libstore-tests/common-protocol.cc @@ -25,7 +25,7 @@ public: CharacterizationTest::readTest(testStem, [&](const auto & encoded) { T got = ({ StringSource from{encoded}; - CommonProto::Serialise::read(*store, CommonProto::ReadConn{.from = from}); + CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = from}); }); ASSERT_EQ(got, expected); @@ -40,7 +40,7 @@ public: { CharacterizationTest::writeTest(testStem, [&]() -> std::string { StringSink to; - CommonProto::Serialise::write(*store, CommonProto::WriteConn{.to = to}, decoded); + CommonProto::Serialise::write(store, CommonProto::WriteConn{.to = to}, decoded); return to.s; }); } diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index 62acb061d..01d6058cb 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -275,7 +275,7 @@ VERSIONED_CHARACTERIZATION_TEST( }), ({ ValidPathInfo info{ - *LibStoreTest::store, + store, "foo", FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index 28190cc9d..a761c96dd 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -516,7 +516,7 @@ VERSIONED_CHARACTERIZATION_TEST( }), ({ ValidPathInfo info{ - *LibStoreTest::store, + store, "foo", FixedOutputInfo{ .method = FileIngestionMethod::NixArchive, diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 559d77ba0..21dacff7d 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -132,7 +132,7 @@ struct value_comparison } }; -std::string showKnownOutputs(Store & store, const Derivation & drv) +std::string showKnownOutputs(const StoreDirConfig & store, const Derivation & drv) { std::string msg; StorePathSet expectedOutputPaths; @@ -743,7 +743,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() #endif } -void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths) +void runPostBuildHook( + const StoreDirConfig & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths) { auto hook = settings.postBuildHook; if (hook == "") diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 029288998..b16a1300c 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -21,9 +21,6 @@ struct DerivationBuilder; typedef enum { rpAccept, rpDecline, rpPostpone } HookReply; -/** Used internally */ -void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); - /** * A goal for building a derivation. Substitution, (or any other method of * obtaining the outputs) will not be attempted, so it is the calling goal's diff --git a/src/libstore/include/nix/store/build/derivation-building-misc.hh b/src/libstore/include/nix/store/build/derivation-building-misc.hh index 46577919b..131eaf89a 100644 --- a/src/libstore/include/nix/store/build/derivation-building-misc.hh +++ b/src/libstore/include/nix/store/build/derivation-building-misc.hh @@ -50,11 +50,12 @@ struct InitialOutput std::optional known; }; -void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); +void runPostBuildHook( + const StoreDirConfig & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); /** * Format the known outputs of a derivation for use in error messages. */ -std::string showKnownOutputs(Store & store, const Derivation & drv); +std::string showKnownOutputs(const StoreDirConfig & store, const Derivation & drv); } // namespace nix diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 805d46248..39d75b0a9 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -7,7 +7,7 @@ namespace nix { -class Store; +struct StoreDirConfig; struct NarInfo : ValidPathInfo { @@ -18,7 +18,7 @@ struct NarInfo : ValidPathInfo NarInfo() = delete; - NarInfo(const Store & store, std::string name, ContentAddressWithReferences ca, Hash narHash) + NarInfo(const StoreDirConfig & store, std::string name, ContentAddressWithReferences ca, Hash narHash) : ValidPathInfo(store, std::move(name), std::move(ca), narHash) { } @@ -33,16 +33,16 @@ struct NarInfo : ValidPathInfo { } - NarInfo(const Store & store, const std::string & s, const std::string & whence); + NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence); bool operator==(const NarInfo &) const = default; // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet // auto operator <=>(const NarInfo &) const = default; - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; - nlohmann::json toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const override; - static NarInfo fromJSON(const Store & store, const StorePath & path, const nlohmann::json & json); + nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override; + static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json); }; } // namespace nix diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index 91244361b..9f341198c 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -12,6 +12,7 @@ namespace nix { class Store; +struct StoreDirConfig; struct SubstitutablePathInfo { @@ -116,8 +117,8 @@ struct UnkeyedValidPathInfo * @param includeImpureInfo If true, variable elements such as the * registration time are included. */ - virtual nlohmann::json toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const; - static UnkeyedValidPathInfo fromJSON(const Store & store, const nlohmann::json & json); + virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const; + static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json); }; struct ValidPathInfo : UnkeyedValidPathInfo @@ -135,7 +136,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo * speaking superfluous, but might prevent endless/excessive data * attacks. */ - std::string fingerprint(const Store & store) const; + std::string fingerprint(const StoreDirConfig & store) const; void sign(const Store & store, const Signer & signer); void sign(const Store & store, const std::vector> & signers); @@ -150,7 +151,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo /** * @return true iff the path is verifiably content-addressed. */ - bool isContentAddressed(const Store & store) const; + bool isContentAddressed(const StoreDirConfig & store) const; static const size_t maxSigs = std::numeric_limits::max(); @@ -159,12 +160,12 @@ struct ValidPathInfo : UnkeyedValidPathInfo * produced by one of the specified keys, or maxSigs if the path * is content-addressed. */ - size_t checkSignatures(const Store & store, const PublicKeys & publicKeys) const; + size_t checkSignatures(const StoreDirConfig & store, const PublicKeys & publicKeys) const; /** * Verify a single signature. */ - bool checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const; + bool checkSignature(const StoreDirConfig & store, const PublicKeys & publicKeys, const std::string & sig) const; /** * References as store path basenames, including a self reference if it has one. @@ -178,7 +179,8 @@ struct ValidPathInfo : UnkeyedValidPathInfo : UnkeyedValidPathInfo(info) , path(path) {}; - ValidPathInfo(const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); + ValidPathInfo( + const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); }; static_assert(std::is_move_assignable_v); diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 783ec7d34..1e7c48287 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -6,7 +6,7 @@ namespace nix { -NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence) +NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence) : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack { unsigned line = 1; @@ -102,7 +102,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & } } -std::string NarInfo::to_string(const Store & store) const +std::string NarInfo::to_string(const StoreDirConfig & store) const { std::string res; res += "StorePath: " + store.printStorePath(path) + "\n"; @@ -130,7 +130,7 @@ std::string NarInfo::to_string(const Store & store) const return res; } -nlohmann::json NarInfo::toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const +nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const { using nlohmann::json; @@ -150,7 +150,7 @@ nlohmann::json NarInfo::toJSON(const Store & store, bool includeImpureInfo, Hash return jsonObject; } -NarInfo NarInfo::fromJSON(const Store & store, const StorePath & path, const nlohmann::json & json) +NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json) { using nlohmann::detail::value_t; diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index ad4123e8f..e3de5949d 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -22,7 +22,7 @@ GENERATE_CMP_EXT( me->sigs, me->ca); -std::string ValidPathInfo::fingerprint(const Store & store) const +std::string ValidPathInfo::fingerprint(const StoreDirConfig & store) const { if (narSize == 0) throw Error( @@ -81,7 +81,7 @@ std::optional ValidPathInfo::contentAddressWithRef } } -bool ValidPathInfo::isContentAddressed(const Store & store) const +bool ValidPathInfo::isContentAddressed(const StoreDirConfig & store) const { auto fullCaOpt = contentAddressWithReferences(); @@ -98,7 +98,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const return res; } -size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const +size_t ValidPathInfo::checkSignatures(const StoreDirConfig & store, const PublicKeys & publicKeys) const { if (isContentAddressed(store)) return maxSigs; @@ -110,7 +110,8 @@ size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & pu return good; } -bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const +bool ValidPathInfo::checkSignature( + const StoreDirConfig & store, const PublicKeys & publicKeys, const std::string & sig) const { return verifyDetached(fingerprint(store), sig, publicKeys); } @@ -124,7 +125,7 @@ Strings ValidPathInfo::shortRefs() const } ValidPathInfo::ValidPathInfo( - const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash) + const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash) : UnkeyedValidPathInfo(narHash) , path(store.makeFixedOutputPathFromCA(name, ca)) { @@ -144,7 +145,8 @@ ValidPathInfo::ValidPathInfo( std::move(ca).raw); } -nlohmann::json UnkeyedValidPathInfo::toJSON(const Store & store, bool includeImpureInfo, HashFormat hashFormat) const +nlohmann::json +UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const { using nlohmann::json; @@ -176,7 +178,7 @@ nlohmann::json UnkeyedValidPathInfo::toJSON(const Store & store, bool includeImp return jsonObject; } -UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const Store & store, const nlohmann::json & _json) +UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store, const nlohmann::json & _json) { UnkeyedValidPathInfo res{ Hash(Hash::dummy), From 14e355d87d57ed8a2d66f84f0bb29fb621672b5b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 10:26:23 -0400 Subject: [PATCH 191/382] Remove `InitialOutput::wanted` No derivation goal type has a notion of variable wanted outputs any more. They either want them all, or they just care about a single output, in which case we would just store this information for the one output in question. --- src/libstore/build/derivation-building-goal.cc | 7 +------ .../include/nix/store/build/derivation-building-misc.hh | 1 - src/libstore/unix/build/derivation-builder.cc | 4 ++-- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 559d77ba0..a4aace163 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -157,9 +157,7 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() we care about all outputs. */ auto outputHashes = staticOutputHashes(worker.evalStore, *drv); for (auto & [outputName, outputHash] : outputHashes) { - InitialOutput v{ - .wanted = true, // Will be refined later - .outputHash = outputHash}; + InitialOutput v{.outputHash = outputHash}; /* TODO we might want to also allow randomizing the paths for regular CA derivations, e.g. for sake of checking @@ -1202,7 +1200,6 @@ std::pair DerivationBuildingGoal::checkPathValidity() // this is an invalid output, gets caught with (!wantedOutputsLeft.empty()) continue; auto & info = *initialOutput; - info.wanted = true; if (i.second) { auto outputPath = *i.second; info.known = { @@ -1237,8 +1234,6 @@ std::pair DerivationBuildingGoal::checkPathValidity() bool allValid = true; for (auto & [_, status] : initialOutputs) { - if (!status.wanted) - continue; if (!status.known || !status.known->isValid()) { allValid = false; break; diff --git a/src/libstore/include/nix/store/build/derivation-building-misc.hh b/src/libstore/include/nix/store/build/derivation-building-misc.hh index 46577919b..9c85e5714 100644 --- a/src/libstore/include/nix/store/build/derivation-building-misc.hh +++ b/src/libstore/include/nix/store/build/derivation-building-misc.hh @@ -45,7 +45,6 @@ struct InitialOutputStatus struct InitialOutput { - bool wanted; Hash outputHash; std::optional known; }; diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index ed493b8f4..190352985 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1481,8 +1481,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() auto & initialInfo = *initialOutput; /* Don't register if already valid, and not checking */ - initialInfo.wanted = buildMode == bmCheck || !(initialInfo.known && initialInfo.known->isValid()); - if (!initialInfo.wanted) { + bool wanted = buildMode == bmCheck || !(initialInfo.known && initialInfo.known->isValid()); + if (!wanted) { outputReferencesIfUnregistered.insert_or_assign( outputName, AlreadyRegistered{.path = initialInfo.known->path}); continue; From 4bc9ae67c70d73fdffaa240202a74ad759b5855c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Aug 2025 10:44:13 -0400 Subject: [PATCH 192/382] Give `DerivationBuilder` a `LocalStore` not `Store` This is just more honest, since we downcasted it to `LocalStore` in many places. We had the downcast before because it wasn't needed in the hook case, just the local building case, but now that `DerivationBuilder` is separated and just does the building case, we have formalized the boundary where the single downcast should occur. --- .../build/derivation-building-goal.cc | 5 ++- .../unix/build/chroot-derivation-builder.cc | 4 +- .../unix/build/darwin-derivation-builder.cc | 2 +- src/libstore/unix/build/derivation-builder.cc | 41 +++++++------------ .../unix/build/linux-derivation-builder.cc | 4 +- .../nix/store/build/derivation-builder.hh | 2 +- 6 files changed, 24 insertions(+), 34 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index a4aace163..2b7622b3c 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -673,10 +673,13 @@ Goal::Co DerivationBuildingGoal::tryToBuild() } }; + auto * localStoreP = dynamic_cast(&worker.store); + assert(localStoreP); + /* If we have to wait and retry (see below), then `builder` will already be created, so we don't need to create it again. */ builder = makeDerivationBuilder( - worker.store, + *localStoreP, std::make_unique(*this, builder), DerivationBuilderParams{ drvPath, diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc index ccf4f8e20..669e3ffb7 100644 --- a/src/libstore/unix/build/chroot-derivation-builder.cc +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -5,7 +5,7 @@ namespace nix { struct ChrootDerivationBuilder : virtual DerivationBuilderImpl { ChrootDerivationBuilder( - Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params) : DerivationBuilderImpl{store, std::move(miscMethods), std::move(params)} { } @@ -178,7 +178,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl continue; if (buildMode != bmCheck && status.known->isValid()) continue; - auto p = store.toRealPath(status.known->path); + auto p = store.Store::toRealPath(status.known->path); if (pathExists(chrootRootDir + p)) std::filesystem::rename((chrootRootDir + p), p); } diff --git a/src/libstore/unix/build/darwin-derivation-builder.cc b/src/libstore/unix/build/darwin-derivation-builder.cc index d25325126..5889ecf8f 100644 --- a/src/libstore/unix/build/darwin-derivation-builder.cc +++ b/src/libstore/unix/build/darwin-derivation-builder.cc @@ -21,7 +21,7 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl bool useSandbox; DarwinDerivationBuilder( - Store & store, + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params, bool useSandbox) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 190352985..7520933c4 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -61,14 +61,14 @@ class DerivationBuilderImpl : public DerivationBuilder, public DerivationBuilder { protected: - Store & store; + LocalStore & store; std::unique_ptr miscMethods; public: DerivationBuilderImpl( - Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params) : DerivationBuilderParams{std::move(params)} , store{store} , miscMethods{std::move(miscMethods)} @@ -424,13 +424,6 @@ void handleDiffHook( const Path DerivationBuilderImpl::homeDir = "/homeless-shelter"; -static LocalStore & getLocalStore(Store & store) -{ - auto p = dynamic_cast(&store); - assert(p); - return *p; -} - void DerivationBuilderImpl::killSandbox(bool getStats) { if (buildUser) { @@ -631,10 +624,9 @@ bool DerivationBuilderImpl::decideWhetherDiskFull() so, we don't mark this build as a permanent failure. */ #if HAVE_STATVFS { - auto & localStore = getLocalStore(store); uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable struct statvfs st; - if (statvfs(localStore.config->realStoreDir.get().c_str(), &st) == 0 + if (statvfs(store.config->realStoreDir.get().c_str(), &st) == 0 && (uint64_t) st.f_bavail * st.f_bsize < required) diskFull = true; if (statvfs(tmpDir.c_str(), &st) == 0 && (uint64_t) st.f_bavail * st.f_bsize < required) @@ -712,7 +704,7 @@ void DerivationBuilderImpl::startBuilder() Magenta(drv.platform), concatStringsSep(", ", drvOptions.getRequiredSystemFeatures(drv)), Magenta(settings.thisSystem), - concatStringsSep(", ", store.config.systemFeatures)); + concatStringsSep(", ", store.Store::config.systemFeatures)); // since aarch64-darwin has Rosetta 2, this user can actually run x86_64-darwin on their hardware - we should // tell them to run the command to install Darwin 2 @@ -724,7 +716,7 @@ void DerivationBuilderImpl::startBuilder() throw BuildError(msg); } - auto buildDir = getLocalStore(store).config->getBuildDir(); + auto buildDir = store.config->getBuildDir(); createDirs(buildDir); @@ -1173,7 +1165,7 @@ void DerivationBuilderImpl::startDaemon() auto store = makeRestrictedStore( [&] { - auto config = make_ref(*getLocalStore(this->store).config); + auto config = make_ref(*this->store.config); config->pathInfoCacheSize = 0; config->stateDir = "/no-such-path"; config->logDir = "/no-such-path"; @@ -1839,8 +1831,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() } } - auto & localStore = getLocalStore(store); - if (buildMode == bmCheck) { if (!store.isValidPath(newInfo.path)) @@ -1876,8 +1866,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() /* Since we verified the build, it's now ultimately trusted. */ if (!oldInfo.ultimate) { oldInfo.ultimate = true; - localStore.signPathInfo(oldInfo); - localStore.registerValidPaths({{oldInfo.path, oldInfo}}); + store.signPathInfo(oldInfo); + store.registerValidPaths({{oldInfo.path, oldInfo}}); } continue; @@ -1891,12 +1881,12 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() debug("unreferenced input: '%1%'", store.printStorePath(i)); } - localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences() + store.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences() miscMethods->markContentsGood(newInfo.path); newInfo.deriver = drvPath; newInfo.ultimate = true; - localStore.signPathInfo(newInfo); + store.signPathInfo(newInfo); finish(newInfo.path); @@ -1904,7 +1894,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() isn't statically known so that we can safely unlock the path before the next iteration */ if (newInfo.ca) - localStore.registerValidPaths({{newInfo.path, newInfo}}); + store.registerValidPaths({{newInfo.path, newInfo}}); infos.emplace(outputName, std::move(newInfo)); } @@ -1925,13 +1915,11 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() paths referenced by each of them. If there are cycles in the outputs, this will fail. */ { - auto & localStore = getLocalStore(store); - ValidPathInfos infos2; for (auto & [outputName, newInfo] : infos) { infos2.insert_or_assign(newInfo.path, newInfo); } - localStore.registerValidPaths(infos2); + store.registerValidPaths(infos2); } /* In case of a fixed-output derivation hash mismatch, throw an @@ -2164,7 +2152,7 @@ StorePath DerivationBuilderImpl::makeFallbackPath(const StorePath & path) namespace nix { std::unique_ptr makeDerivationBuilder( - Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params) { bool useSandbox = false; @@ -2191,8 +2179,7 @@ std::unique_ptr makeDerivationBuilder( useSandbox = params.drv.type().isSandboxed() && !params.drvOptions.noChroot; } - auto & localStore = getLocalStore(store); - if (localStore.storeDir != localStore.config->realStoreDir.get()) { + if (store.storeDir != store.config->realStoreDir.get()) { #ifdef __linux__ useSandbox = true; #else diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index 3e67cdd42..0d9dc4a85 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -191,7 +191,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu std::optional cgroup; ChrootLinuxDerivationBuilder( - Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params) + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params) : DerivationBuilderImpl{store, std::move(miscMethods), std::move(params)} , ChrootDerivationBuilder{store, std::move(miscMethods), std::move(params)} , LinuxDerivationBuilder{store, std::move(miscMethods), std::move(params)} @@ -492,7 +492,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/pts"); ss.push_back("/dev/full"); - if (store.config.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) + if (store.Store::config.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) ss.push_back("/dev/kvm"); ss.push_back("/dev/null"); ss.push_back("/dev/random"); diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/unix/include/nix/store/build/derivation-builder.hh index 465b45197..8a36a6a8f 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/unix/include/nix/store/build/derivation-builder.hh @@ -179,6 +179,6 @@ struct DerivationBuilder : RestrictionContext }; std::unique_ptr makeDerivationBuilder( - Store & store, std::unique_ptr miscMethods, DerivationBuilderParams params); + LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params); } // namespace nix From 870bb68d388eb0df7f0f591ebca7a45748c2aa67 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 00:43:36 -0400 Subject: [PATCH 193/382] `DerivationBuilder::registerOutputs`: Inline `checkSuffix` It is a simple constant that is only used once. --- src/libstore/unix/build/derivation-builder.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 7520933c4..c6027ed61 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1422,8 +1422,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() outputs to allow hard links between outputs. */ InodesSeen inodesSeen; - Path checkSuffix = ".check"; - std::exception_ptr delayedException; /* The paths that can be referenced are the input closures, the @@ -1839,7 +1837,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (newInfo.narHash != oldInfo.narHash) { miscMethods->noteCheckMismatch(); if (settings.runDiffHook || settings.keepFailed) { - auto dst = store.toRealPath(finalDestPath + checkSuffix); + auto dst = store.toRealPath(finalDestPath + ".check"); deletePath(dst); movePath(actualPath, dst); From 9ccbe23056bb7cd81a762fcefb093f8a97dac8f0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 01:19:22 -0400 Subject: [PATCH 194/382] `DerivationBuilder` Change missing `initialOutput` `if..throw` to `assert` Since this goal has no (goal-wide) notion of "wanted outputs" (we're building the derivation, and thus making all outputs), we should have `initialOutputs` for all outputs, and if we're missing one that's an internal error caused by a bug in Nix. Concretely, `DerivationBuildingGoal::gaveUpOnSubstitution` now clearly does create `initialOutputs` for all outputs, whereas a few commits ago that was not obvious, so I feel confident in saying that this invariant that should be upheld, in fact is upheld. `scatchOutputs` is initialized for every initial output, so the same change to it follows for the same reasons. --- src/libstore/unix/build/derivation-builder.cc | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index c6027ed61..3ea208924 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1456,18 +1456,14 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() std::map outputStats; for (auto & [outputName, _] : drv.outputs) { auto scratchOutput = get(scratchOutputs, outputName); - if (!scratchOutput) - throw BuildError( - "builder for '%s' has no scratch output for '%s'", store.printStorePath(drvPath), outputName); + assert(scratchOutput); auto actualPath = realPathInSandbox(store.printStorePath(*scratchOutput)); outputsToSort.insert(outputName); /* Updated wanted info to remove the outputs we definitely don't need to register */ auto initialOutput = get(initialOutputs, outputName); - if (!initialOutput) - throw BuildError( - "builder for '%s' has no initial output for '%s'", store.printStorePath(drvPath), outputName); + assert(initialOutput); auto & initialInfo = *initialOutput; /* Don't register if already valid, and not checking */ From 048cfb9cd65088ece06aafeaf76d4f5a51d37484 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 16:55:23 -0400 Subject: [PATCH 195/382] Delete dangling declaration There is no definition associated with this. --- .../include/nix/store/build/derivation-building-goal.hh | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index b16a1300c..95949649c 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -174,8 +174,6 @@ struct DerivationBuildingGoal : public Goal void appendLogTailErrorMsg(std::string & msg); - StorePathSet exportReferences(const StorePathSet & storePaths); - JobCategory jobCategory() const override { return JobCategory::Build; From 0348030ae2acc8a98cf46545d5b58f203d6fd905 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 16 Aug 2025 18:27:39 +0300 Subject: [PATCH 196/382] dev-shell: Add gdb It's about time we added debuggers to the dev-shell. Having it in build inputs does some magic so pretty printers for standard library types work better. --- packaging/dev-shell.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index f10a9d56e..27d819ec4 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -119,6 +119,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( modular.pre-commit.settings.package (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) pkgs.buildPackages.nixfmt-rfc-style + pkgs.buildPackages.gdb ] ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) ( lib.hiPrio pkgs.buildPackages.clang-tools From dc1b2012afb9dbe2a2832da87ac9a2465e38ecbf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 16 Aug 2025 23:00:31 +0300 Subject: [PATCH 197/382] libutil: Fix handling of unescaped spaces, quotes and shevrons in queries and fragments Turns out we didn't have tests for some of the important behavior introduced for flake reference fragments and url queries [1]. This is rather important and is relied upon by existing tooling. This fixes up these exact cases before handing off the URL to the Boost.URL parser. To the best of my knowledge this implements the same behavior as prior regex-based parser did [2]: > fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*"; > queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; [1]: 9c0a09f09fbb930483b26f60f8552fbe5236b777 [2]: https://github.com/NixOS/nix/blob/2.30.2/src/libutil/include/nix/util/url-parts.hh --- src/libflake-tests/flakeref.cc | 14 +++++ src/libutil-tests/url.cc | 20 +++++++ src/libutil/include/nix/util/url.hh | 4 +- src/libutil/url.cc | 91 +++++++++++++++++++---------- 4 files changed, 96 insertions(+), 33 deletions(-) diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index 2f8deb123..404d7590a 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -74,6 +74,20 @@ TEST(parseFlakeRef, GitArchiveInput) auto flakeref = parseFlakeRef(fetchSettings, s); ASSERT_EQ(flakeref.to_string(), "github:foo/bar/branch%23"); } + + { + auto s = "github:foo/bar?ref=branch#\"name.with.dot\""; // unescaped quotes `"` + auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); + ASSERT_EQ(fragment, "\"name.with.dot\""); + ASSERT_EQ(flakeref.to_string(), "github:foo/bar/branch"); + } + + { + auto s = "github:foo/bar#\"name.with.dot\""; // unescaped quotes `"` + auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s); + ASSERT_EQ(fragment, "\"name.with.dot\""); + ASSERT_EQ(flakeref.to_string(), "github:foo/bar"); + } } TEST(to_string, doesntReencodeUrl) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index fb27689de..0dfb5f463 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -212,6 +212,26 @@ TEST(parseURL, parsedUrlsIsEqualToItself) ASSERT_TRUE(url == url); } +TEST(parseURL, parsedUrlsWithUnescapedChars) +{ + /* Test for back-compat. Behavior is rather questionable, but + * is ingrained pretty deep into how URL parsing is shared between + * flakes and libstore. + * 1. Unescaped spaces, quotes and shevron (^) in fragment. + * 2. Unescaped spaces and quotes in query. + */ + auto s = "http://www.example.org/file.tar.gz?query \"= 123\"#shevron^quote\"space "; + auto url = parseURL(s); + + ASSERT_EQ(url.fragment, "shevron^quote\"space "); + + auto query = StringMap{ + {"query \"", " 123\""}, + }; + + ASSERT_EQ(url.query, query); +} + TEST(parseURL, parseFTPUrl) { auto s = "ftp://ftp.nixos.org/downloads/nixos.iso"; diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 0a6194b19..0aa1eac9f 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -104,8 +104,8 @@ std::string encodeQuery(const StringMap & query); * Parse a Nix URL into a ParsedURL. * * Nix URI is mostly compliant with RFC3986, but with some deviations: - * - Literal spaces are allowed and don't have to be percent encoded. - * This is mostly done for backward compatibility. + * - Fragments can contain unescaped (not URL encoded) '^', '"' or space literals. + * - Queries may contain unescaped '"' or spaces. * * @note IPv6 ZoneId literals (RFC4007) are represented in URIs according to RFC6874. * diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 8f902552f..75f62d445 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -11,28 +11,6 @@ namespace nix { std::regex refRegex(refRegexS, std::regex::ECMAScript); std::regex revRegex(revRegexS, std::regex::ECMAScript); -/** - * Drop trailing shevron for output installable syntax. - * - * FIXME: parseURL shouldn't really be used for parsing the OutputSpec, but it does - * get used. That code should actually use ExtendedOutputsSpec::parseOpt. - */ -static std::string_view dropShevronSuffix(std::string_view url) -{ - auto shevron = url.rfind("^"); - if (shevron == std::string_view::npos) - return url; - return url.substr(0, shevron); -} - -/** - * Percent encode spaces in the url. - */ -static std::string percentEncodeSpaces(std::string_view url) -{ - return replaceStrings(std::string(url), " ", percentEncode(" ")); -} - ParsedURL::Authority ParsedURL::Authority::parse(std::string_view encodedAuthority) { auto parsed = boost::urls::parse_authority(encodedAuthority); @@ -108,14 +86,65 @@ std::string ParsedURL::Authority::to_string() const return std::move(oss).str(); } +/** + * Additional characters that don't need URL encoding in the fragment. + */ +static constexpr boost::urls::grammar::lut_chars extraAllowedCharsInFragment = " \"^"; + +/** + * Additional characters that don't need URL encoding in the query. + */ +static constexpr boost::urls::grammar::lut_chars extraAllowedCharsInQuery = " \""; + +static std::string percentEncodeCharSet(std::string_view s, auto charSet) +{ + std::string res; + for (auto c : s) { + if (charSet(c)) + res += percentEncode(std::string_view{&c, &c + 1}); + else + res += c; + } + return res; +} + ParsedURL parseURL(const std::string & url) try { - /* Drop the shevron suffix used for the flakerefs. Shevron character is reserved and - shouldn't appear in normal URIs. */ - auto unparsedView = dropShevronSuffix(url); - /* For back-compat literal spaces are allowed. */ - auto withFixedSpaces = percentEncodeSpaces(unparsedView); - auto urlView = boost::urls::url_view(withFixedSpaces); + auto unparsedView = url; + + /* Account for several non-standard properties of nix urls (for back-compat): + * - Allow unescaped spaces ' ' and '"' characters in queries. + * - Allow '"', ' ' and '^' characters in the fragment component. + * We could write our own grammar for this, but fixing it up here seems + * more concise, since the deviation is rather minor. + */ + std::string fixedEncodedUrl = [&]() { + std::string fixed; + std::string_view view = url; + + if (auto beforeQuery = splitPrefixTo(view, '?')) { + fixed += *beforeQuery; + fixed += '?'; + auto fragmentStart = view.find('#'); + auto queryView = view.substr(0, fragmentStart); + auto fixedQuery = percentEncodeCharSet(queryView, extraAllowedCharsInQuery); + fixed += fixedQuery; + view.remove_prefix(std::min(fragmentStart, view.size())); + } + + if (auto beforeFragment = splitPrefixTo(view, '#')) { + fixed += *beforeFragment; + fixed += '#'; + auto fixedFragment = percentEncodeCharSet(view, extraAllowedCharsInFragment); + fixed += fixedFragment; + return fixed; + } + + fixed += view; + return fixed; + }(); + + auto urlView = boost::urls::url_view(fixedEncodedUrl); if (!urlView.has_scheme()) throw BadURL("'%s' doesn't have a scheme", url); @@ -176,12 +205,12 @@ std::string percentEncode(std::string_view s, std::string_view keep) StringMap decodeQuery(const std::string & query) try { - /* For back-compat literal spaces are allowed. */ - auto withFixedSpaces = percentEncodeSpaces(query); + /* For back-compat unescaped characters are allowed. */ + auto fixedEncodedQuery = percentEncodeCharSet(query, extraAllowedCharsInQuery); StringMap result; - auto encodedQuery = boost::urls::params_encoded_view(withFixedSpaces); + auto encodedQuery = boost::urls::params_encoded_view(fixedEncodedQuery); for (auto && [key, value, value_specified] : encodedQuery) { if (!value_specified) { warn("dubious URI query '%s' is missing equal sign '%s', ignoring", std::string_view(key), "="); From 695f3bc7e3d69cd798ac63488eabc633688c2dca Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 16 Aug 2025 23:31:57 +0300 Subject: [PATCH 198/382] tests/nixos: Fix daemon store reference in authorization test Recent store reference cleanups have started canonicalizing daemon -> unix://. --- tests/nixos/authorization.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/nixos/authorization.nix b/tests/nixos/authorization.nix index 6540e9fa3..ee3be7504 100644 --- a/tests/nixos/authorization.nix +++ b/tests/nixos/authorization.nix @@ -84,7 +84,7 @@ su --login mallory -c ' nix-store --generate-binary-cache-key cache1.example.org sk1 pk1 (! nix store sign --key-file sk1 ${pathFour} 2>&1)' | tee diag 1>&2 - grep -F "cannot open connection to remote store 'daemon'" diag + grep -F "cannot open connection to remote store 'unix://'" diag """) machine.succeed(""" From 3c0a5e0a5101a21626394985f394cd1a273a6f94 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 00:56:41 +0300 Subject: [PATCH 199/382] nix-cli: Improve compile times (Before) **** Time summary: Compilation (61 times): Parsing (frontend): 143.2 s Codegen & opts (backend): 105.8 s (After) **** Time summary: Compilation (61 times): Parsing (frontend): 113.8 s Codegen & opts (backend): 111.7 s --- src/nix/pch/precompiled-headers.hh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/nix/pch/precompiled-headers.hh b/src/nix/pch/precompiled-headers.hh index 5b320c861..ab5856ea8 100644 --- a/src/nix/pch/precompiled-headers.hh +++ b/src/nix/pch/precompiled-headers.hh @@ -1,3 +1,6 @@ #include "nix/cmd/command.hh" #include "nix/expr/eval.hh" #include "nix/main/shared.hh" +#include "nix/store/derivations.hh" + +#include From d6973e105cdd5a39150c9d95d851e2d3b9db67c4 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 00:56:54 +0300 Subject: [PATCH 200/382] libexpr-tests: Improve compile times with PCH (Before) **** Time summary: Compilation (14 times): Parsing (frontend): 60.1 s Codegen & opts (backend): 18.0 s (After) **** Time summary: Compilation (15 times): Parsing (frontend): 16.6 s Codegen & opts (backend): 17.4 s --- src/libexpr-tests/meson.build | 1 + src/libexpr-tests/pch/precompiled-headers.hh | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 src/libexpr-tests/pch/precompiled-headers.hh diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index 1f3973681..cc203d60e 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -75,6 +75,7 @@ this_exe = executable( # TODO: -lrapidcheck, see ../libutil-support/build.meson link_args : linker_export_flags + [ '-lrapidcheck' ], install : true, + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) test( diff --git a/src/libexpr-tests/pch/precompiled-headers.hh b/src/libexpr-tests/pch/precompiled-headers.hh new file mode 100644 index 000000000..63bf023a2 --- /dev/null +++ b/src/libexpr-tests/pch/precompiled-headers.hh @@ -0,0 +1,4 @@ +#include "nix/expr/tests/libexpr.hh" + +#include +#include From f13aeaf2f1e43032454bc53d2832f01e114a9817 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 00:57:06 +0300 Subject: [PATCH 201/382] libutil-tests: Improve compile times with PCH (Before) **** Time summary: Compilation (30 times): Parsing (frontend): 98.2 s Codegen & opts (backend): 27.7 s (After) **** Time summary: Compilation (31 times): Parsing (frontend): 34.9 s Codegen & opts (backend): 28.1 s --- src/libutil-tests/meson.build | 1 + src/libutil-tests/pch/precompiled-headers.hh | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 src/libutil-tests/pch/precompiled-headers.hh diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index e4a060d3e..6f04b46fd 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -89,6 +89,7 @@ this_exe = executable( link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) test( diff --git a/src/libutil-tests/pch/precompiled-headers.hh b/src/libutil-tests/pch/precompiled-headers.hh new file mode 100644 index 000000000..0a078fcc4 --- /dev/null +++ b/src/libutil-tests/pch/precompiled-headers.hh @@ -0,0 +1,5 @@ +#include "nix/util/util.hh" + +#include +#include +#include From 347ed8d9ba83b7cd45dd371c996fd9c111df91a2 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 00:57:14 +0300 Subject: [PATCH 202/382] libfetchers: Improve compile times with PCH (Before) **** Time summary: Compilation (17 times): Parsing (frontend): 72.3 s Codegen & opts (backend): 47.8 s (After) Compilation (19 times): Parsing (frontend): 27.5 s Codegen & opts (backend): 43.8 s --- src/libfetchers/git-lfs-fetch.cc | 2 +- src/libfetchers/meson.build | 1 + src/libfetchers/pch/precompiled-headers.hh | 3 +++ 3 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 src/libfetchers/pch/precompiled-headers.hh diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index 35230ae88..a68cdf832 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -292,7 +292,7 @@ void Fetch::fetch( auto authIt = headerIt->find("Authorization"); if (authIt == headerIt->end()) return std::nullopt; - return *authIt; + return std::string(*authIt); }(); const uint64_t size = obj.at("size"); sizeCallback(size); diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 0b53ef44d..3da58cb70 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -66,6 +66,7 @@ this_library = library( link_args : linker_export_flags, prelink : true, # For C++ static initializers install : true, + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) install_headers(headers, subdir : 'nix/fetchers', preserve_path : true) diff --git a/src/libfetchers/pch/precompiled-headers.hh b/src/libfetchers/pch/precompiled-headers.hh new file mode 100644 index 000000000..db7f22a2e --- /dev/null +++ b/src/libfetchers/pch/precompiled-headers.hh @@ -0,0 +1,3 @@ +#include "nix/fetchers/fetchers.hh" +#include "nix/store/store-api.hh" +#include "nix/util/json-utils.hh" From 8be1cc6e96a95a634c73caf2bf545373c992b128 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 01:08:31 +0300 Subject: [PATCH 203/382] libstore-tests: Improve compile times with PCH (Before) **** Time summary: Compilation (25 times): Parsing (frontend): 145.5 s Codegen & opts (backend): 65.9 s (After) **** Time summary: Compilation (26 times): Parsing (frontend): 39.9 s Codegen & opts (backend): 55.1 s --- src/libstore-tests/meson.build | 2 ++ src/libstore-tests/pch/precompiled-headers.hh | 9 +++++++++ 2 files changed, 11 insertions(+) create mode 100644 src/libstore-tests/pch/precompiled-headers.hh diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index e5995bcb1..87f6a234a 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -94,6 +94,7 @@ this_exe = executable( link_args : linker_export_flags + [ '-lrapidcheck' ], # get main from gtest install : true, + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], ) test( @@ -127,6 +128,7 @@ if get_option('benchmarks') include_directories : include_dirs, link_args : linker_export_flags, install : true, + cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], cpp_args : [ '-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"', ], diff --git a/src/libstore-tests/pch/precompiled-headers.hh b/src/libstore-tests/pch/precompiled-headers.hh new file mode 100644 index 000000000..3eaa45261 --- /dev/null +++ b/src/libstore-tests/pch/precompiled-headers.hh @@ -0,0 +1,9 @@ +#include "nix/store/store-api.hh" +#include "nix/store/tests/libstore.hh" +#include "nix/util/util.hh" + +#include +#include +#include + +#include From ccc0a31f3973c79acd4144ca1beae727f4521f96 Mon Sep 17 00:00:00 2001 From: Manse Date: Sun, 17 Aug 2025 00:39:41 -0300 Subject: [PATCH 204/382] fix doc code example Fixed nix shell's doc's shebang python example --- src/nix/shell.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/shell.md b/src/nix/shell.md index 677151a85..1e44c2478 100644 --- a/src/nix/shell.md +++ b/src/nix/shell.md @@ -83,7 +83,7 @@ import prettytable # Print a simple table. t = prettytable.PrettyTable(["N", "N^2"]) for n in range(1, 10): t.add_row([n, n * n]) -print t +print(t) ``` Similarly, the following is a Perl script that specifies that it From e244e323080fad9ad0b58c5158aaf76d46e82b1f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 16:52:43 +0300 Subject: [PATCH 205/382] libstore-tests: Obfuscate derivation paths in drv files for benchmarks Otherwise this might form a valid reference and nix repo will not be fetched as a FOD. --- src/libstore-tests/data/derivation/firefox.drv | 2 +- src/libstore-tests/data/derivation/hello.drv | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore-tests/data/derivation/firefox.drv b/src/libstore-tests/data/derivation/firefox.drv index 98ff69c81..d746d21e8 100644 --- a/src/libstore-tests/data/derivation/firefox.drv +++ b/src/libstore-tests/data/derivation/firefox.drv @@ -1 +1 @@ -Derive([("out","/nix/store/jycqnr8rdfy035ckiwmar4yql406jjh6-firefox-140.0.4","","")],[("/nix/store/01qkwh6g0xqlrchziwa8zm3c8p6s5ylc-krb5-1.21.3.drv",["lib"]),("/nix/store/03ixv9yqpmi015vv0m4gza1d2ac8wql8-libglvnd-1.7.0.drv",["out"]),("/nix/store/2g73b2yd3qhzp774i9svckdnspyrfrn8-firefox-unwrapped-140.0.4.drv",["out"]),("/nix/store/2wf9wi34rvwyq7dxk0xlk41ccll282z0-make-shell-wrapper-hook.drv",["out"]),("/nix/store/3gr22851nw1nj8zmy34961yb3fbr2iz5-libva-2.22.0.drv",["out"]),("/nix/store/7gzk0w5125wqhgp8sh26l3k15lklm82c-policies.json.drv",["out"]),("/nix/store/b3jvr65plcm9nm4wn8ssw2ymnv2zshmq-extract-binary-wrapper-cmd.drv",["out"]),("/nix/store/ck7ckwkvqv49hz5rl8yp52c4v6p2f6vd-pipewire-1.4.6.drv",["out"]),("/nix/store/fm2b1q5m1wrvy237j6x9xcvhxw95wjfp-alsa-lib-1.2.14.drv",["out"]),("/nix/store/fvch7vyklf8gr8i53p9ybv2azyh6q30y-ffmpeg-7.1.1.drv",["lib"]),("/nix/store/ggzpx6yx9xd5h241ri730acdfpdirxjq-mesa-libgbm-25.1.0.drv",["out"]),("/nix/store/hmwbmap5c8zp3kl20bda0qk1dg4f408d-vulkan-loader-1.4.313.0.drv",["out"]),("/nix/store/hy8x576wmvz2fk9a778r05lqdb0dcbl4-speech-dispatcher-0.12.1.drv",["out"]),("/nix/store/knjlgd5nfd7dw45axa6z7w7d7rb11801-pciutils-3.14.0.drv",["out"]),("/nix/store/l7y4nybngpffyndkw8rwx2g3c8dw4yyj-xdg-utils-1.2.1.drv",["out"]),("/nix/store/lh0mbixp7jiz7z8cxfypm8b17lf8z9la-cups-2.4.12.drv",["lib"]),("/nix/store/lmfgw4dz59sdvplfh3zazqmin8d7gh9d-libnotify-0.8.6.drv",["out"]),("/nix/store/m4zp8p8y4wzxg2lhgq43kbgr2xl5l9px-firefox.desktop.drv",["out"]),("/nix/store/n8hy5gvnrw0kzz6cjdhky3rxr0q024sh-jq-1.8.1.drv",["dev"]),("/nix/store/nz0xp44xmbqcp2sfhsgicsswrrb2cpl7-libXScrnSaver-1.2.4.drv",["out"]),("/nix/store/p2nvbwxxiarbqaq26rqrjqkyw8dl0cf4-lndir-1.0.5.drv",["out"]),("/nix/store/p306lljvhw8906r1lxyyk7jn3bckdypn-sndio-1.10.0.drv",["out"]),("/nix/store/pbqbdcp76r373256ajlbp3fankpw5pdh-libpulseaudio-17.0.drv",["out"]),("/nix/store/q27q075ckyjkpqxx8cgvpghcwr0nzz5v-systemd-minimal-libs-257.6.drv",["out"]),("/nix/store/s4b8yadif84kiv8gyr9nxdi6zbg69b4g-bash-5.2p37.drv",["out"]),("/nix/store/sc2pgkzc1s6zp5dp8j7wsd4msilsnijn-stdenv-linux.drv",["out"]),("/nix/store/sy2kn7hwfw0nyvxq7rvzrmvm2ypxs9x6-gtk+3-3.24.49.drv",["dev"]),("/nix/store/v3sc4j11fhsah2ij0wkziw6nd3il9dyy-libjack2-1.9.22.drv",["out"]),("/nix/store/xy07yjv0x8bcdc3lzyq45lrbbz08hg5p-adwaita-icon-theme-48.0.drv",["out"]),("/nix/store/z8v9j1cf6faasgqrw3jw74xjyjiqwk82-libcanberra-0.30.drv",["out"])],["/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh"],"x86_64-linux","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash",["-e","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh","/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh"],[("__json","{\"NIX_MAIN_PROGRAM\":\"firefox\",\"buildCommand\":\"if [ ! -x \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/bin/firefox\\\" ]\\nthen\\n echo \\\"cannot find executable file \\\\`/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/bin/firefox'\\\"\\n exit 1\\nfi\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# Link the runtime. The executable itself has to be copied,\\n# because it will resolve paths relative to its true location.\\n# Any symbolic links have to be replicated as well.\\ncd \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"\\nfind . -type d -exec mkdir -p \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -not -name \\\"firefox\\\" \\\\) -exec ln -sT \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/{} \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -name \\\"firefox\\\" -o -name \\\"firefox-bin\\\" \\\\) -print0 | while read -d $'\\\\0' f; do\\n cp -P --no-preserve=mode,ownership --remove-destination \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/$f\\\" \\\"$out/$f\\\"\\n chmod a+rwx \\\"$out/$f\\\"\\ndone\\n\\n# fix links and absolute references\\n\\nfind . -type l -print0 | while read -d $'\\\\0' l; do\\n target=\\\"$(readlink \\\"$l\\\")\\\"\\n target=${target/#\\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\n ln -sfT \\\"$target\\\" \\\"$out/$l\\\"\\ndone\\n\\ncd \\\"$out\\\"\\n\\n\\n# create the wrapper\\n\\nexecutablePrefix=\\\"$out/bin\\\"\\nexecutablePath=\\\"$out/bin/firefox\\\"\\noldWrapperArgs=()\\n\\nif [[ -L $executablePath ]]; then\\n # Symbolic link: wrap the link's target.\\n oldExe=\\\"$(readlink -v --canonicalize-existing \\\"$executablePath\\\")\\\"\\n rm \\\"$executablePath\\\"\\nelif wrapperCmd=$(/nix/store/qczbm5rh1vfkql4jznp1p5lv9nyjz99r-extract-binary-wrapper-cmd \\\"$executablePath\\\"); [[ $wrapperCmd ]]; then\\n # If the executable is a binary wrapper, we need to update its target to\\n # point to $out, but we can't just edit the binary in-place because of length\\n # issues. So we extract the command used to create the wrapper and add the\\n # arguments to our wrapper.\\n parseMakeCWrapperCall() {\\n shift # makeCWrapper\\n oldExe=$1; shift\\n oldWrapperArgs=(\\\"$@\\\")\\n }\\n eval \\\"parseMakeCWrapperCall ${wrapperCmd//\\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\\"\\n rm \\\"$executablePath\\\"\\nelse\\n if read -rn2 shebang < \\\"$executablePath\\\" && [[ $shebang == '#!' ]]; then\\n # Shell wrapper: patch in place to point to $out.\\n sed -i \\\"s@/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4@$out@g\\\" \\\"$executablePath\\\"\\n fi\\n # Suffix the executable with -old, because -wrapped might already be used by the old wrapper.\\n oldExe=\\\"$executablePrefix/.firefox\\\"-old\\n mv \\\"$executablePath\\\" \\\"$oldExe\\\"\\nfi\\nappendToVar makeWrapperArgs --prefix XDG_DATA_DIRS : \\\"$GSETTINGS_SCHEMAS_PATH\\\"\\nconcatTo makeWrapperArgs oldWrapperArgs\\n\\nmakeWrapper \\\"$oldExe\\\" \\\"$out/bin/firefox\\\" \\\"${makeWrapperArgs[@]}\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\nif [ -e \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/share/icons\\\" ]; then\\n mkdir -p \\\"$out/share\\\"\\n ln -s \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/share/icons\\\" \\\"$out/share/icons\\\"\\nelse\\n for res in 16 32 48 64 128; do\\n mkdir -p \\\"$out/share/icons/hicolor/${res}x${res}/apps\\\"\\n icon=$( find \\\"/nix/store/9ycwpmlavjsjgr0svaqdy0mmjg949nzq-firefox-unwrapped-140.0.4/lib/\\\" -name \\\"default${res}.png\\\" )\\n if [ -e \\\"$icon\\\" ]; then ln -s \\\"$icon\\\" \\\\\\n \\\"$out/share/icons/hicolor/${res}x${res}/apps/firefox.png\\\"\\n fi\\n done\\nfi\\n\\ninstall -m 644 -D -t $out/share/applications $desktopItem/share/applications/*\\n\\nmkdir -p $out/lib/mozilla/native-messaging-hosts\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/native-messaging-hosts $ext/lib/mozilla/native-messaging-hosts/*\\ndone\\n\\nmkdir -p $out/lib/mozilla/pkcs11-modules\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/pkcs11-modules $ext/lib/mozilla/pkcs11-modules/*\\ndone\\n\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# user customization\\nlibDir=\\\"$out/lib/firefox\\\"\\n\\n# creating policies.json\\nmkdir -p \\\"$libDir/distribution\\\"\\n\\nPOL_PATH=\\\"$libDir/distribution/policies.json\\\"\\nrm -f \\\"$POL_PATH\\\"\\ncat /nix/store/s9r3kncxydp3s94cari78f2dl68w1k3j-policies.json >> \\\"$POL_PATH\\\"\\n\\nextraPoliciesFiles=()\\nfor extraPoliciesFile in \\\"${extraPoliciesFiles[@]}\\\"; do\\n jq -s '.[0] * .[1]' $extraPoliciesFile \\\"$POL_PATH\\\" > .tmp.json\\n mv .tmp.json \\\"$POL_PATH\\\"\\ndone\\n\\n# preparing for autoconfig\\nprefsDir=\\\"$out/lib/firefox/defaults/pref\\\"\\nmkdir -p \\\"$prefsDir\\\"\\n\\necho 'pref(\\\"general.config.filename\\\", \\\"mozilla.cfg\\\");' > \\\"$prefsDir/autoconfig.js\\\"\\necho 'pref(\\\"general.config.obscure_value\\\", 0);' >> \\\"$prefsDir/autoconfig.js\\\"\\n\\ncat > \\\"$libDir/mozilla.cfg\\\" << EOF\\n// First line must be a comment\\n\\n// Disables addon signature checking\\n// to be able to install addons that do not have an extid\\n// Security is maintained because only user whitelisted addons\\n// with a checksum can be installed\\n\\n\\nEOF\\n\\nextraPrefsFiles=()\\nfor extraPrefsFile in \\\"${extraPrefsFiles[@]}\\\"; do\\n cat \\\"$extraPrefsFile\\\" >> \\\"$libDir/mozilla.cfg\\\"\\ndone\\n\\ncat >> \\\"$libDir/mozilla.cfg\\\" << EOF\\n\\nEOF\\n\\nmkdir -p \\\"$libDir/distribution/extensions\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\n\",\"buildInputs\":[\"/nix/store/09050l3isnxqyjbsn4qfbq190i6a85bx-gtk+3-3.24.49-dev\"],\"builder\":\"/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash\",\"cmakeFlags\":[],\"configureFlags\":[],\"depsBuildBuild\":[],\"depsBuildBuildPropagated\":[],\"depsBuildTarget\":[],\"depsBuildTargetPropagated\":[],\"depsHostHost\":[],\"depsHostHostPropagated\":[],\"depsTargetTarget\":[],\"depsTargetTargetPropagated\":[],\"desktopItem\":\"/nix/store/w8xccv3flpwzgwan7k0l4rrv9m8ipffa-firefox.desktop\",\"doCheck\":false,\"doInstallCheck\":false,\"env\":{\"NIX_MAIN_PROGRAM\":\"firefox\"},\"gtk_modules\":[\"/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib/gtk-3.0/\"],\"libs\":\"/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib:/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib64:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib64:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib64:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib64:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib64:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib64:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib64:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib64:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib64:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib64:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib64:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib64:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib64:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib64:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib64:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib64:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib64:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib64\",\"makeWrapperArgs\":[\"--prefix\",\"LD_LIBRARY_PATH\",\":\",\"/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib:/nix/store/pb6fwczgq5d08yppb0mxsbvvzi2wl71g-systemd-minimal-libs-257.6/lib64:/nix/store/ib8prgicm88f9xbg7cgbk72n3s69c0rx-libva-2.22.0/lib64:/nix/store/d6a8ckgb953nqr2qamidqzz1i7v473pm-mesa-libgbm-25.1.0/lib64:/nix/store/1mx1hccld2shxc3acmr32kydiw5kb0l3-libnotify-0.8.6/lib64:/nix/store/3ia435d0b41k0gz1hmg5yj134fh1j70x-libXScrnSaver-1.2.4/lib64:/nix/store/wj5sc0i81fb6hcz802gmsgdsjll79wfc-cups-2.4.12-lib/lib64:/nix/store/h0zj2k3q2iqs6b2qdjqrg29l2kaksgkz-pciutils-3.14.0/lib64:/nix/store/0jgicjfcml2v3plj470ggf8q88xkxq4d-vulkan-loader-1.4.313.0/lib64:/nix/store/z67zjqlvbgz80slzmmibmyv31k68l2r6-speech-dispatcher-0.12.1/lib64:/nix/store/gh2fi51xdj78cj9j9za5jfrrj8qgx90c-pipewire-1.4.6/lib64:/nix/store/r5a9sknnr626v8whd46h4fm7i6v5yl8l-ffmpeg-7.1.1-lib/lib64:/nix/store/7razjlx084wqwcaa359mvrcjd4lx1kn2-krb5-1.21.3-lib/lib64:/nix/store/iyy1g70fhkz3hsrckbmbqgxik1j9779c-libglvnd-1.7.0/lib64:/nix/store/vfiyznjv206ysafzl3ibnr7cr1lhq83q-libpulseaudio-17.0/lib64:/nix/store/7p8rmrv6hy8lx90a52nk94fdm4av51pl-alsa-lib-1.2.14/lib64:/nix/store/a5m2h89rn29n7374pdm34d7a120c1j6f-sndio-1.10.0/lib64:/nix/store/nwsvsihqzmgabv47kqqrsj94nlmraajp-libjack2-1.9.22/lib64:/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib64\",\"--suffix\",\"PATH\",\":\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/bin\",\"--set\",\"MOZ_APP_LAUNCHER\",\"firefox\",\"--set\",\"MOZ_LEGACY_PROFILES\",\"1\",\"--set\",\"MOZ_ALLOW_DOWNGRADE\",\"1\",\"--suffix\",\"GTK_PATH\",\":\",\"/nix/store/6pwfsghvp9fa6bpwryk7bwbjd4f5vdxy-libcanberra-0.30/lib/gtk-3.0/\",\"--suffix\",\"XDG_DATA_DIRS\",\":\",\"/nix/store/1w8x293926aq2vcyys36aw49fy5p8cm5-adwaita-icon-theme-48.0/share\",\"--set-default\",\"MOZ_ENABLE_WAYLAND\",\"1\",\"--suffix\",\"PATH\",\":\",\"/nix/store/yrzm7cya8nf8xnpi8xlfwx16plqkzhgh-xdg-utils-1.2.1/bin\",\"--set\",\"MOZ_SYSTEM_DIR\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/lib/mozilla\"],\"mesonFlags\":[],\"name\":\"firefox-140.0.4\",\"nativeBuildInputs\":[\"/nix/store/lnd6p3anjxgwawlhlpzvvl40d4yc2jd4-make-shell-wrapper-hook\",\"/nix/store/kn8zagv6mk3ykmax5fqh4h18raqhxbh6-lndir-1.0.5\",\"/nix/store/x0kva02y0iyh7l0qvnx3l8ci7ll1r5si-jq-1.8.1-dev\"],\"outputChecks\":{\"out\":{\"disallowedRequisites\":[\"/nix/store/7nlf5v84s4p2yhx327j8495yik60qnzh-gcc-wrapper-14.3.0\"]}},\"outputs\":[\"out\"],\"patches\":[],\"pname\":\"firefox\",\"preferLocalBuild\":true,\"propagatedBuildInputs\":[],\"propagatedNativeBuildInputs\":[],\"stdenv\":\"/nix/store/a13rl87yjhzqrbkc4gb0mrwz2mfkivcf-stdenv-linux\",\"strictDeps\":false,\"system\":\"x86_64-linux\",\"version\":\"140.0.4\"}"),("out","/nix/store/jycqnr8rdfy035ckiwmar4yql406jjh6-firefox-140.0.4")]) \ No newline at end of file +Derive([("out","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-140.0.4","","")],[("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-krb5-1.21.3.drv",["lib"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libglvnd-1.7.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-make-shell-wrapper-hook.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libva-2.22.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-policies.json.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-extract-binary-wrapper-cmd.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pipewire-1.4.6.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-alsa-lib-1.2.14.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-ffmpeg-7.1.1.drv",["lib"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-mesa-libgbm-25.1.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-vulkan-loader-1.4.313.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-speech-dispatcher-0.12.1.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pciutils-3.14.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-xdg-utils-1.2.1.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-cups-2.4.12.drv",["lib"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libnotify-0.8.6.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox.desktop.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-jq-1.8.1.drv",["dev"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libXScrnSaver-1.2.4.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-lndir-1.0.5.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-sndio-1.10.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libpulseaudio-17.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-systemd-minimal-libs-257.6.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-stdenv-linux.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-gtk+3-3.24.49.drv",["dev"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libjack2-1.9.22.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-adwaita-icon-theme-48.0.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30.drv",["out"])],["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-default-builder.sh","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-source-stdenv.sh"],"x86_64-linux","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37/bin/bash",["-e","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-source-stdenv.sh","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-default-builder.sh"],[("__json","{\"NIX_MAIN_PROGRAM\":\"firefox\",\"buildCommand\":\"if [ ! -x \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/bin/firefox\\\" ]\\nthen\\n echo \\\"cannot find executable file \\\\`/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/bin/firefox'\\\"\\n exit 1\\nfi\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# Link the runtime. The executable itself has to be copied,\\n# because it will resolve paths relative to its true location.\\n# Any symbolic links have to be replicated as well.\\ncd \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4\\\"\\nfind . -type d -exec mkdir -p \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -not -name \\\"firefox\\\" \\\\) -exec ln -sT \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4\\\"/{} \\\"$out\\\"/{} \\\\;\\n\\nfind . -type f \\\\( -name \\\"firefox\\\" -o -name \\\"firefox-bin\\\" \\\\) -print0 | while read -d $'\\\\0' f; do\\n cp -P --no-preserve=mode,ownership --remove-destination \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/$f\\\" \\\"$out/$f\\\"\\n chmod a+rwx \\\"$out/$f\\\"\\ndone\\n\\n# fix links and absolute references\\n\\nfind . -type l -print0 | while read -d $'\\\\0' l; do\\n target=\\\"$(readlink \\\"$l\\\")\\\"\\n target=${target/#\\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\n ln -sfT \\\"$target\\\" \\\"$out/$l\\\"\\ndone\\n\\ncd \\\"$out\\\"\\n\\n\\n# create the wrapper\\n\\nexecutablePrefix=\\\"$out/bin\\\"\\nexecutablePath=\\\"$out/bin/firefox\\\"\\noldWrapperArgs=()\\n\\nif [[ -L $executablePath ]]; then\\n # Symbolic link: wrap the link's target.\\n oldExe=\\\"$(readlink -v --canonicalize-existing \\\"$executablePath\\\")\\\"\\n rm \\\"$executablePath\\\"\\nelif wrapperCmd=$(/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-extract-binary-wrapper-cmd \\\"$executablePath\\\"); [[ $wrapperCmd ]]; then\\n # If the executable is a binary wrapper, we need to update its target to\\n # point to $out, but we can't just edit the binary in-place because of length\\n # issues. So we extract the command used to create the wrapper and add the\\n # arguments to our wrapper.\\n parseMakeCWrapperCall() {\\n shift # makeCWrapper\\n oldExe=$1; shift\\n oldWrapperArgs=(\\\"$@\\\")\\n }\\n eval \\\"parseMakeCWrapperCall ${wrapperCmd//\\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4\\\"/\\\"$out\\\"}\\\"\\n rm \\\"$executablePath\\\"\\nelse\\n if read -rn2 shebang < \\\"$executablePath\\\" && [[ $shebang == '#!' ]]; then\\n # Shell wrapper: patch in place to point to $out.\\n sed -i \\\"s@/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4@$out@g\\\" \\\"$executablePath\\\"\\n fi\\n # Suffix the executable with -old, because -wrapped might already be used by the old wrapper.\\n oldExe=\\\"$executablePrefix/.firefox\\\"-old\\n mv \\\"$executablePath\\\" \\\"$oldExe\\\"\\nfi\\nappendToVar makeWrapperArgs --prefix XDG_DATA_DIRS : \\\"$GSETTINGS_SCHEMAS_PATH\\\"\\nconcatTo makeWrapperArgs oldWrapperArgs\\n\\nmakeWrapper \\\"$oldExe\\\" \\\"$out/bin/firefox\\\" \\\"${makeWrapperArgs[@]}\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\nif [ -e \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/share/icons\\\" ]; then\\n mkdir -p \\\"$out/share\\\"\\n ln -s \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/share/icons\\\" \\\"$out/share/icons\\\"\\nelse\\n for res in 16 32 48 64 128; do\\n mkdir -p \\\"$out/share/icons/hicolor/${res}x${res}/apps\\\"\\n icon=$( find \\\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-unwrapped-140.0.4/lib/\\\" -name \\\"default${res}.png\\\" )\\n if [ -e \\\"$icon\\\" ]; then ln -s \\\"$icon\\\" \\\\\\n \\\"$out/share/icons/hicolor/${res}x${res}/apps/firefox.png\\\"\\n fi\\n done\\nfi\\n\\ninstall -m 644 -D -t $out/share/applications $desktopItem/share/applications/*\\n\\nmkdir -p $out/lib/mozilla/native-messaging-hosts\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/native-messaging-hosts $ext/lib/mozilla/native-messaging-hosts/*\\ndone\\n\\nmkdir -p $out/lib/mozilla/pkcs11-modules\\nfor ext in ; do\\n ln -sLt $out/lib/mozilla/pkcs11-modules $ext/lib/mozilla/pkcs11-modules/*\\ndone\\n\\n\\n#########################\\n# #\\n# EXTRA PREF CHANGES #\\n# #\\n#########################\\n# user customization\\nlibDir=\\\"$out/lib/firefox\\\"\\n\\n# creating policies.json\\nmkdir -p \\\"$libDir/distribution\\\"\\n\\nPOL_PATH=\\\"$libDir/distribution/policies.json\\\"\\nrm -f \\\"$POL_PATH\\\"\\ncat /nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-policies.json >> \\\"$POL_PATH\\\"\\n\\nextraPoliciesFiles=()\\nfor extraPoliciesFile in \\\"${extraPoliciesFiles[@]}\\\"; do\\n jq -s '.[0] * .[1]' $extraPoliciesFile \\\"$POL_PATH\\\" > .tmp.json\\n mv .tmp.json \\\"$POL_PATH\\\"\\ndone\\n\\n# preparing for autoconfig\\nprefsDir=\\\"$out/lib/firefox/defaults/pref\\\"\\nmkdir -p \\\"$prefsDir\\\"\\n\\necho 'pref(\\\"general.config.filename\\\", \\\"mozilla.cfg\\\");' > \\\"$prefsDir/autoconfig.js\\\"\\necho 'pref(\\\"general.config.obscure_value\\\", 0);' >> \\\"$prefsDir/autoconfig.js\\\"\\n\\ncat > \\\"$libDir/mozilla.cfg\\\" << EOF\\n// First line must be a comment\\n\\n// Disables addon signature checking\\n// to be able to install addons that do not have an extid\\n// Security is maintained because only user whitelisted addons\\n// with a checksum can be installed\\n\\n\\nEOF\\n\\nextraPrefsFiles=()\\nfor extraPrefsFile in \\\"${extraPrefsFiles[@]}\\\"; do\\n cat \\\"$extraPrefsFile\\\" >> \\\"$libDir/mozilla.cfg\\\"\\ndone\\n\\ncat >> \\\"$libDir/mozilla.cfg\\\" << EOF\\n\\nEOF\\n\\nmkdir -p \\\"$libDir/distribution/extensions\\\"\\n\\n#############################\\n# #\\n# END EXTRA PREF CHANGES #\\n# #\\n#############################\\n\",\"buildInputs\":[\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-gtk+3-3.24.49-dev\"],\"builder\":\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37/bin/bash\",\"cmakeFlags\":[],\"configureFlags\":[],\"depsBuildBuild\":[],\"depsBuildBuildPropagated\":[],\"depsBuildTarget\":[],\"depsBuildTargetPropagated\":[],\"depsHostHost\":[],\"depsHostHostPropagated\":[],\"depsTargetTarget\":[],\"depsTargetTargetPropagated\":[],\"desktopItem\":\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox.desktop\",\"doCheck\":false,\"doInstallCheck\":false,\"env\":{\"NIX_MAIN_PROGRAM\":\"firefox\"},\"gtk_modules\":[\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib/gtk-3.0/\"],\"libs\":\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-systemd-minimal-libs-257.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libva-2.22.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-mesa-libgbm-25.1.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libnotify-0.8.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libXScrnSaver-1.2.4/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-cups-2.4.12-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pciutils-3.14.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-vulkan-loader-1.4.313.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-speech-dispatcher-0.12.1/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pipewire-1.4.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-ffmpeg-7.1.1-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-krb5-1.21.3-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libglvnd-1.7.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libpulseaudio-17.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-alsa-lib-1.2.14/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-sndio-1.10.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libjack2-1.9.22/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-systemd-minimal-libs-257.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libva-2.22.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-mesa-libgbm-25.1.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libnotify-0.8.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libXScrnSaver-1.2.4/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-cups-2.4.12-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pciutils-3.14.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-vulkan-loader-1.4.313.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-speech-dispatcher-0.12.1/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pipewire-1.4.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-ffmpeg-7.1.1-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-krb5-1.21.3-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libglvnd-1.7.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libpulseaudio-17.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-alsa-lib-1.2.14/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-sndio-1.10.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libjack2-1.9.22/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib64\",\"makeWrapperArgs\":[\"--prefix\",\"LD_LIBRARY_PATH\",\":\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-systemd-minimal-libs-257.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libva-2.22.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-mesa-libgbm-25.1.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libnotify-0.8.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libXScrnSaver-1.2.4/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-cups-2.4.12-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pciutils-3.14.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-vulkan-loader-1.4.313.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-speech-dispatcher-0.12.1/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pipewire-1.4.6/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-ffmpeg-7.1.1-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-krb5-1.21.3-lib/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libglvnd-1.7.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libpulseaudio-17.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-alsa-lib-1.2.14/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-sndio-1.10.0/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libjack2-1.9.22/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-systemd-minimal-libs-257.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libva-2.22.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-mesa-libgbm-25.1.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libnotify-0.8.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libXScrnSaver-1.2.4/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-cups-2.4.12-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pciutils-3.14.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-vulkan-loader-1.4.313.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-speech-dispatcher-0.12.1/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-pipewire-1.4.6/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-ffmpeg-7.1.1-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-krb5-1.21.3-lib/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libglvnd-1.7.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libpulseaudio-17.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-alsa-lib-1.2.14/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-sndio-1.10.0/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libjack2-1.9.22/lib64:/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib64\",\"--suffix\",\"PATH\",\":\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/bin\",\"--set\",\"MOZ_APP_LAUNCHER\",\"firefox\",\"--set\",\"MOZ_LEGACY_PROFILES\",\"1\",\"--set\",\"MOZ_ALLOW_DOWNGRADE\",\"1\",\"--suffix\",\"GTK_PATH\",\":\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-libcanberra-0.30/lib/gtk-3.0/\",\"--suffix\",\"XDG_DATA_DIRS\",\":\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-adwaita-icon-theme-48.0/share\",\"--set-default\",\"MOZ_ENABLE_WAYLAND\",\"1\",\"--suffix\",\"PATH\",\":\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-xdg-utils-1.2.1/bin\",\"--set\",\"MOZ_SYSTEM_DIR\",\"/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9/lib/mozilla\"],\"mesonFlags\":[],\"name\":\"firefox-140.0.4\",\"nativeBuildInputs\":[\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-make-shell-wrapper-hook\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-lndir-1.0.5\",\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-jq-1.8.1-dev\"],\"outputChecks\":{\"out\":{\"disallowedRequisites\":[\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-gcc-wrapper-14.3.0\"]}},\"outputs\":[\"out\"],\"patches\":[],\"pname\":\"firefox\",\"preferLocalBuild\":true,\"propagatedBuildInputs\":[],\"propagatedNativeBuildInputs\":[],\"stdenv\":\"/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-stdenv-linux\",\"strictDeps\":false,\"system\":\"x86_64-linux\",\"version\":\"140.0.4\"}"),("out","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-firefox-140.0.4")]) \ No newline at end of file diff --git a/src/libstore-tests/data/derivation/hello.drv b/src/libstore-tests/data/derivation/hello.drv index 741f7745c..f92a4b485 100644 --- a/src/libstore-tests/data/derivation/hello.drv +++ b/src/libstore-tests/data/derivation/hello.drv @@ -1 +1 @@ -Derive([("out","/nix/store/hhg83gh653wjw4ny49xn92f13v2j1za4-hello-2.12.2","","")],[("/nix/store/1xz4avqqrxqsxw7idz119vdzw837p1n1-version-check-hook.drv",["out"]),("/nix/store/bsv47sbqcar3205il55spxqacxp8j0fj-hello-2.12.2.tar.gz.drv",["out"]),("/nix/store/s4b8yadif84kiv8gyr9nxdi6zbg69b4g-bash-5.2p37.drv",["out"]),("/nix/store/sc2pgkzc1s6zp5dp8j7wsd4msilsnijn-stdenv-linux.drv",["out"])],["/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh"],"x86_64-linux","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash",["-e","/nix/store/vj1c3wf9c11a0qs6p3ymfvrnsdgsdcbq-source-stdenv.sh","/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh"],[("NIX_MAIN_PROGRAM","hello"),("__structuredAttrs",""),("buildInputs",""),("builder","/nix/store/p79bgyzmmmddi554ckwzbqlavbkw07zh-bash-5.2p37/bin/bash"),("cmakeFlags",""),("configureFlags",""),("depsBuildBuild",""),("depsBuildBuildPropagated",""),("depsBuildTarget",""),("depsBuildTargetPropagated",""),("depsHostHost",""),("depsHostHostPropagated",""),("depsTargetTarget",""),("depsTargetTargetPropagated",""),("doCheck","1"),("doInstallCheck","1"),("mesonFlags",""),("name","hello-2.12.2"),("nativeBuildInputs","/nix/store/fxzn6kr5anxn5jgh511x56wrg8b3a99a-version-check-hook"),("out","/nix/store/hhg83gh653wjw4ny49xn92f13v2j1za4-hello-2.12.2"),("outputs","out"),("patches",""),("pname","hello"),("postInstallCheck","stat \"${!outputBin}/bin/hello\"\n"),("propagatedBuildInputs",""),("propagatedNativeBuildInputs",""),("src","/nix/store/dw402azxjrgrzrk6j0p66wkqrab5mwgw-hello-2.12.2.tar.gz"),("stdenv","/nix/store/a13rl87yjhzqrbkc4gb0mrwz2mfkivcf-stdenv-linux"),("strictDeps",""),("system","x86_64-linux"),("version","2.12.2")]) \ No newline at end of file +Derive([("out","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-hello-2.12.2","","")],[("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-version-check-hook.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-hello-2.12.2.tar.gz.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37.drv",["out"]),("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-stdenv-linux.drv",["out"])],["/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-default-builder.sh","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-source-stdenv.sh"],"x86_64-linux","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37/bin/bash",["-e","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-source-stdenv.sh","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-default-builder.sh"],[("NIX_MAIN_PROGRAM","hello"),("__structuredAttrs",""),("buildInputs",""),("builder","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bash-5.2p37/bin/bash"),("cmakeFlags",""),("configureFlags",""),("depsBuildBuild",""),("depsBuildBuildPropagated",""),("depsBuildTarget",""),("depsBuildTargetPropagated",""),("depsHostHost",""),("depsHostHostPropagated",""),("depsTargetTarget",""),("depsTargetTargetPropagated",""),("doCheck","1"),("doInstallCheck","1"),("mesonFlags",""),("name","hello-2.12.2"),("nativeBuildInputs","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-version-check-hook"),("out","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-hello-2.12.2"),("outputs","out"),("patches",""),("pname","hello"),("postInstallCheck","stat \"${!outputBin}/bin/hello\"\n"),("propagatedBuildInputs",""),("propagatedNativeBuildInputs",""),("src","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-hello-2.12.2.tar.gz"),("stdenv","/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-stdenv-linux"),("strictDeps",""),("system","x86_64-linux"),("version","2.12.2")]) \ No newline at end of file From 1d7ac6603c8b23575b4cad1e42bbcd31455c6c65 Mon Sep 17 00:00:00 2001 From: Volker Diels-Grabsch Date: Sun, 17 Aug 2025 17:28:46 +0200 Subject: [PATCH 206/382] Fix NIX_CONF_DIR description Fix description of `NIX_CONF_DIR`. It currently say that it defaults to `prefix/etc/nix`, which would mean `/nix/etc/nix` on default installations, and contradicts the description in `conf-file-prefix.md`. This fix makes the description of `NIX_CONF_DIR` consistent with `conf-file-prefix.md`, assuming that the latter is correct. --- doc/manual/source/command-ref/env-common.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/command-ref/env-common.md b/doc/manual/source/command-ref/env-common.md index ee3995111..e0fd2b00e 100644 --- a/doc/manual/source/command-ref/env-common.md +++ b/doc/manual/source/command-ref/env-common.md @@ -75,7 +75,7 @@ Most Nix commands interpret the following environment variables: - [`NIX_CONF_DIR`](#env-NIX_CONF_DIR) Overrides the location of the system Nix configuration directory - (default `prefix/etc/nix`). + (default `sysconfdir/nix`, i.e. `/etc/nix` on most systems). - [`NIX_CONFIG`](#env-NIX_CONFIG) From da8759bb419b453a88df9f16c3d9e08d22965e90 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 20:31:34 +0300 Subject: [PATCH 207/382] libcmd: Fix rendering of absolute URLs in markdown lowdown >= 1.4.0 supports LOWDOWN_TERM_NORELLINK to render absolute urls. This is useful, since we want to keep links to web resources and such intact. --- src/libcmd/markdown.cc | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 09cd9c1fb..c3341da73 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -37,9 +37,17 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown) .vmargin = 0, # endif .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES, - .oflags = LOWDOWN_TERM_NOLINK, + .oflags = +# if HAVE_LOWDOWN_1_4 + LOWDOWN_TERM_NORELLINK // To render full links while skipping relative ones +# else + LOWDOWN_TERM_NOLINK +# endif }; + if (!isTTY()) + opts.oflags |= LOWDOWN_TERM_NOANSI; + auto doc = lowdown_doc_new(&opts); if (!doc) throw Error("cannot allocate Markdown document"); @@ -65,7 +73,7 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown) if (!rndr_res) throw Error("allocation error while rendering Markdown"); - return filterANSIEscapes(std::string(buf->data, buf->size), !isTTY()); + return std::string(buf->data, buf->size); } std::string renderMarkdownToTerminal(std::string_view markdown) From 3ffeed497a8401a18ea6f222afde5c7024749d15 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 20:49:50 +0300 Subject: [PATCH 208/382] packaging: Update lowdown to 2.0.2 Nixpkgs has been updated with this version and seems like next stable nixos release will ship with lowdown 2.0. --- packaging/dependencies.nix | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 17ba06b4d..5f1dc55e6 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -64,6 +64,18 @@ scope: { NIX_CFLAGS_COMPILE = "-DINITIAL_MARK_STACK_SIZE=1048576"; }); + lowdown = pkgs.lowdown.overrideAttrs (prevAttrs: rec { + version = "2.0.2"; + src = pkgs.fetchurl { + url = "https://kristaps.bsd.lv/lowdown/snapshots/lowdown-${version}.tar.gz"; + hash = "sha512-cfzhuF4EnGmLJf5EGSIbWqJItY3npbRSALm+GarZ7SMU7Hr1xw0gtBFMpOdi5PBar4TgtvbnG4oRPh+COINGlA=="; + }; + nativeBuildInputs = prevAttrs.nativeBuildInputs ++ [ pkgs.bmake ]; + postInstall = + lib.replaceStrings [ "lowdown.so.1" "lowdown.1.dylib" ] [ "lowdown.so.2" "lowdown.2.dylib" ] + prevAttrs.postInstall; + }); + # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. boost = (pkgs.boost.override { From b21304fe4c491b871682fc292d9c31410cbb418a Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Thu, 14 Aug 2025 20:42:21 -0700 Subject: [PATCH 209/382] libfetchers/git: Add support for '.' in gitsubmodules Period '.' is a special branch name in the gitsubmodule file which represents the branch of the parent repository [1]. We add support for this by registering the ref of the InputAccessor to be that of the parent input if '.' is encountered. Fixes #13215 [1]: man gitmodules --- src/libfetchers-tests/git.cc | 199 ++++++++++++++++++++++++++++++ src/libfetchers-tests/meson.build | 1 + src/libfetchers-tests/package.nix | 18 ++- src/libfetchers/git.cc | 12 +- 4 files changed, 217 insertions(+), 13 deletions(-) create mode 100644 src/libfetchers-tests/git.cc diff --git a/src/libfetchers-tests/git.cc b/src/libfetchers-tests/git.cc new file mode 100644 index 000000000..83ee1a1a2 --- /dev/null +++ b/src/libfetchers-tests/git.cc @@ -0,0 +1,199 @@ +#include "nix/store/store-open.hh" +#include "nix/fetchers/fetch-settings.hh" +#include "nix/fetchers/fetchers.hh" +#include "nix/fetchers/git-utils.hh" + +#include +#include + +#include + +namespace { + +template +struct Deleter +{ + void operator()(T * p) const + { + if (p) + F(p); + } +}; + +template +using Handle = std::unique_ptr>; + +using RepoHandle = Handle; +using IndexHandle = Handle; +using TreeHandle = Handle; +using SigHandle = Handle; +using RefHandle = Handle; +using CommitHandle = Handle; +using SubmoduleHandle = Handle; + +#define CHECK_LIBGIT(expr) ASSERT_TRUE((expr) >= 0) << git_error_last() + +static void commitAll(git_repository * repo, const char * msg) +{ + IndexHandle idx; + { + git_index * raw = nullptr; + CHECK_LIBGIT(git_repository_index(&raw, repo)); + idx.reset(raw); + } + CHECK_LIBGIT(git_index_add_all(idx.get(), nullptr, 0, nullptr, nullptr)); + CHECK_LIBGIT(git_index_write(idx.get())); + + git_oid treeId{}; + CHECK_LIBGIT(git_index_write_tree(&treeId, idx.get())); + TreeHandle tree; + { + git_tree * raw = nullptr; + CHECK_LIBGIT(git_tree_lookup(&raw, repo, &treeId)); + tree.reset(raw); + } + + SigHandle sig; + { + git_signature * raw = nullptr; + CHECK_LIBGIT(git_signature_now(&raw, "you", "you@example.com")); + sig.reset(raw); + } + + git_oid commitId{}; + if (git_repository_is_empty(repo) == 1) { + CHECK_LIBGIT(git_commit_create_v(&commitId, repo, "HEAD", sig.get(), sig.get(), nullptr, msg, tree.get(), 0)); + CHECK_LIBGIT(git_reference_create(nullptr, repo, "refs/heads/main", &commitId, true, nullptr)); + CHECK_LIBGIT(git_repository_set_head(repo, "refs/heads/main")); + } else { + RefHandle head; + { + git_reference * raw = nullptr; + CHECK_LIBGIT(git_repository_head(&raw, repo)); + head.reset(raw); + } + CommitHandle parent; + { + git_commit * raw = nullptr; + CHECK_LIBGIT(git_commit_lookup(&raw, repo, git_reference_target(head.get()))); + parent.reset(raw); + } + const git_commit * parents[] = {parent.get()}; + CHECK_LIBGIT(git_commit_create( + &commitId, + repo, + "HEAD", + sig.get(), + sig.get(), + /*message_encoding=*/nullptr, + msg, + tree.get(), + /*parent_count=*/1, + &parents[0])); + } +} + +} // namespace + +using namespace nix; + +class GitTest : public ::testing::Test +{ + std::unique_ptr delTmpDir; + +protected: + std::filesystem::path tmpDir; + + void SetUp() override + { + tmpDir = createTempDir(); + delTmpDir = std::make_unique(tmpDir, /*recursive=*/true); + nix::initLibStore(/*loadConfig=*/false); + git_libgit2_init(); + } + + void TearDown() override + { + delTmpDir.reset(); + } +}; + +// Regression test for https://github.com/NixOS/nix/issues/13215 +TEST_F(GitTest, submodulePeriodSupport) +{ + auto storePath = tmpDir / "store"; + auto repoPath = tmpDir / "repo"; + auto submodulePath = tmpDir / "submodule"; + + // Set up our git directories: one top level and a submodule + // the submodule in the .gitmodules has the branch listed as '.' + + // 1) Create sub repo + { + git_repository * raw = nullptr; + CHECK_LIBGIT(git_repository_init(&raw, submodulePath.string().c_str(), /*is_bare=*/0)); + RepoHandle sub(raw); + writeFile(submodulePath / "lib.txt", "hello from submodule\n"); + commitAll(sub.get(), "init sub"); + } + + // 2) Create super repo + RepoHandle super; + { + git_repository * raw = nullptr; + CHECK_LIBGIT(git_repository_init(&raw, repoPath.string().c_str(), /*is_bare=*/0)); + super.reset(raw); + } + + writeFile(repoPath / "README.md", "# super\n"); + commitAll(super.get(), "init super"); + + // 3) Add submodule at deps/sub + { + git_repository * raw = nullptr; + git_clone_options cloneOpts = GIT_CLONE_OPTIONS_INIT; + // clone from local subPath into superPath/deps/sub + CHECK_LIBGIT( + git_clone(&raw, submodulePath.string().c_str(), (repoPath / "deps" / "sub").string().c_str(), &cloneOpts)); + RepoHandle sub(raw); + } + + // 4) Add submodule and set branch="." + SubmoduleHandle sm; + { + git_submodule * raw = nullptr; + CHECK_LIBGIT(git_submodule_add_setup( + &raw, + super.get(), + "../submodule", + "deps/sub", + /*use_gitlink=*/1)); + sm.reset(raw); + } + CHECK_LIBGIT(git_submodule_set_branch(super.get(), git_submodule_name(sm.get()), /*branch=*/".")); + CHECK_LIBGIT(git_submodule_sync(sm.get())); + + // 5) Finalize now that the worktree exists; libgit2 can read its HEAD OID + CHECK_LIBGIT(git_submodule_add_finalize(sm.get())); + // 6) Commit the addition in super + commitAll(super.get(), "Add submodule with branch='.'"); + + // TODO: Use dummy:// store with MemorySourceAccessor. + Path storeTmpDir = createTempDir(); + auto storeTmpDirAutoDelete = AutoDelete(storeTmpDir, true); + ref store = openStore(storeTmpDir); + + auto settings = fetchers::Settings{}; + auto input = fetchers::Input::fromAttrs( + settings, + { + {"url", "file://" + repoPath.string()}, + {"submodules", Explicit{true}}, + {"type", "git"}, + {"ref", "main"}, + }); + + auto [accessor, i] = input.getAccessor(store); + + ASSERT_EQ(accessor->readFile(CanonPath("deps/sub/lib.txt")), "hello from submodule\n"); +} diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index 51373aefa..cd23c1f0c 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -41,6 +41,7 @@ subdir('nix-meson-build-support/common') sources = files( 'access-tokens.cc', 'git-utils.cc', + 'git.cc', 'nix_api_fetchers.cc', 'public-key.cc', ) diff --git a/src/libfetchers-tests/package.nix b/src/libfetchers-tests/package.nix index 48c1a07d8..780618725 100644 --- a/src/libfetchers-tests/package.nix +++ b/src/libfetchers-tests/package.nix @@ -3,6 +3,7 @@ buildPackages, stdenv, mkMesonExecutable, + writableTmpDirAsHomeHook, nix-fetchers, nix-fetchers-c, @@ -57,18 +58,13 @@ mkMesonExecutable (finalAttrs: { runCommand "${finalAttrs.pname}-run" { meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; + buildInputs = [ writableTmpDirAsHomeHook ]; } - ( - lib.optionalString stdenv.hostPlatform.isWindows '' - export HOME="$PWD/home-dir" - mkdir -p "$HOME" - '' - + '' - export _NIX_TEST_UNIT_DATA=${resolvePath ./data} - ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} - touch $out - '' - ); + '' + export _NIX_TEST_UNIT_DATA=${resolvePath ./data} + ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} + touch $out + ''; }; }; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 43105c699..bd1e1fffe 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -739,8 +739,16 @@ struct GitInputScheme : InputScheme fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", resolved); - if (submodule.branch != "") - attrs.insert_or_assign("ref", submodule.branch); + if (submodule.branch != "") { + // A special value of . is used to indicate that the name of the branch in the submodule + // should be the same name as the current branch in the current repository. + // https://git-scm.com/docs/gitmodules + if (submodule.branch == ".") { + attrs.insert_or_assign("ref", ref); + } else { + attrs.insert_or_assign("ref", submodule.branch); + } + } attrs.insert_or_assign("rev", submoduleRev.gitRev()); attrs.insert_or_assign("exportIgnore", Explicit{exportIgnore}); attrs.insert_or_assign("submodules", Explicit{true}); From 0216775ab78030d507807febd7ff4b9da777fc51 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 12:57:01 +0300 Subject: [PATCH 210/382] packaging: Fix lowdown cross Stupid mistake on my part. `binaryTarballCross.x86_64-linux.x86_64-unknown-freebsd` now builds as expected. --- packaging/dependencies.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 5f1dc55e6..3d7da9acb 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -70,7 +70,7 @@ scope: { url = "https://kristaps.bsd.lv/lowdown/snapshots/lowdown-${version}.tar.gz"; hash = "sha512-cfzhuF4EnGmLJf5EGSIbWqJItY3npbRSALm+GarZ7SMU7Hr1xw0gtBFMpOdi5PBar4TgtvbnG4oRPh+COINGlA=="; }; - nativeBuildInputs = prevAttrs.nativeBuildInputs ++ [ pkgs.bmake ]; + nativeBuildInputs = prevAttrs.nativeBuildInputs ++ [ pkgs.buildPackages.bmake ]; postInstall = lib.replaceStrings [ "lowdown.so.1" "lowdown.1.dylib" ] [ "lowdown.so.2" "lowdown.2.dylib" ] prevAttrs.postInstall; From c82b67fa05a161600b264347321dfda618c79efc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 10 Aug 2025 17:48:19 +0200 Subject: [PATCH 211/382] BasicClientConnection::queryPathInfo(): Don't throw exception for invalid paths This caused RemoteStore::queryPathInfoUncached() to mark the connection as invalid (see RemoteStore::ConnectionHandle::~ConnectionHandle()), causing it to disconnect and reconnect after every lookup of an invalid path. This caused huge slowdowns in conjunction with 19f89eb6842747570f262c003d977f02cb155968 and lazy-trees. --- .../include/nix/store/worker-protocol-connection.hh | 3 ++- src/libstore/remote-store.cc | 13 +++++++------ src/libstore/worker-protocol-connection.cc | 6 +++--- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/libstore/include/nix/store/worker-protocol-connection.hh b/src/libstore/include/nix/store/worker-protocol-connection.hh index f7ddfea4f..73dd50719 100644 --- a/src/libstore/include/nix/store/worker-protocol-connection.hh +++ b/src/libstore/include/nix/store/worker-protocol-connection.hh @@ -109,7 +109,8 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection const StorePathSet & paths, SubstituteFlag maybeSubstitute); - UnkeyedValidPathInfo queryPathInfo(const StoreDirConfig & store, bool * daemonException, const StorePath & path); + std::optional + queryPathInfo(const StoreDirConfig & store, bool * daemonException, const StorePath & path); void putBuildDerivationRequest( const StoreDirConfig & store, diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index d3446093d..8c2f268c3 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -259,13 +259,14 @@ void RemoteStore::queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept { try { - std::shared_ptr info; - { + auto info = ({ auto conn(getConnection()); - info = std::make_shared( - StorePath{path}, conn->queryPathInfo(*this, &conn.daemonException, path)); - } - callback(std::move(info)); + conn->queryPathInfo(*this, &conn.daemonException, path); + }); + if (!info) + callback(nullptr); + else + callback(std::make_shared(StorePath{path}, *info)); } catch (...) { callback.rethrow(); } diff --git a/src/libstore/worker-protocol-connection.cc b/src/libstore/worker-protocol-connection.cc index 015a79ad6..987d0c8dd 100644 --- a/src/libstore/worker-protocol-connection.cc +++ b/src/libstore/worker-protocol-connection.cc @@ -244,7 +244,7 @@ void WorkerProto::BasicServerConnection::postHandshake(const StoreDirConfig & st WorkerProto::write(store, *this, info); } -UnkeyedValidPathInfo WorkerProto::BasicClientConnection::queryPathInfo( +std::optional WorkerProto::BasicClientConnection::queryPathInfo( const StoreDirConfig & store, bool * daemonException, const StorePath & path) { to << WorkerProto::Op::QueryPathInfo << store.printStorePath(path); @@ -253,14 +253,14 @@ UnkeyedValidPathInfo WorkerProto::BasicClientConnection::queryPathInfo( } catch (Error & e) { // Ugly backwards compatibility hack. if (e.msg().find("is not valid") != std::string::npos) - throw InvalidPath(std::move(e.info())); + return std::nullopt; throw; } if (GET_PROTOCOL_MINOR(protoVersion) >= 17) { bool valid; from >> valid; if (!valid) - throw InvalidPath("path '%s' is not valid", store.printStorePath(path)); + return std::nullopt; } return WorkerProto::Serialise::read(store, *this); } From f51779ee2518c961cf4c063e676ea328cb9ea6cf Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 10 Aug 2025 21:40:03 +0200 Subject: [PATCH 212/382] RemoteStore::addToStoreFromDump(): Invalidate cache entry for added path --- src/libstore/include/nix/store/store-api.hh | 2 ++ src/libstore/remote-store.cc | 4 +++- src/libstore/store-api.cc | 5 +++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 528375851..6393ccbc7 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -311,6 +311,8 @@ protected: LRUCache pathInfoCache; }; + void invalidatePathInfoCacheFor(const StorePath & path); + SharedSync state; std::shared_ptr diskCache; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 8c2f268c3..5694fa466 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -457,7 +457,9 @@ StorePath RemoteStore::addToStoreFromDump( } if (fsm != dumpMethod) unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination"); - return addCAToStore(dump, name, hashMethod, hashAlgo, references, repair)->path; + auto storePath = addCAToStore(dump, name, hashMethod, hashAlgo, references, repair)->path; + invalidatePathInfoCacheFor(storePath); + return storePath; } void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index dd8c39557..bd5ae9284 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -319,6 +319,11 @@ bool Store::PathInfoCacheValue::isKnownNow() return std::chrono::steady_clock::now() < time_point + ttl; } +void Store::invalidatePathInfoCacheFor(const StorePath & path) +{ + state.lock()->pathInfoCache.erase(path.to_string()); +} + std::map> Store::queryStaticPartialDerivationOutputMap(const StorePath & path) { std::map> outputs; From fc336815833fd6accb1b3a83a006d9f014765aff Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 19:58:45 +0300 Subject: [PATCH 213/382] flake: nixpkgs: nixos-unstable -> nixos-25.05-small MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit About time we upgraded our nixpkgs flake input. Ideally we'd have automation to do this. Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/adaa24fbf46737f3f1b5497bf64bae750f82942e?narHash=sha256-qhFMmDkeJX9KJwr5H32f1r7Prs7XbQWtO0h3V0a0rFY%3D' (2025-05-13) → 'github:NixOS/nixpkgs/cd32a774ac52caaa03bcfc9e7591ac8c18617ced?narHash=sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI%3D' (2025-08-17) --- flake.lock | 8 ++++---- flake.nix | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index 3075eabc2..073e371f5 100644 --- a/flake.lock +++ b/flake.lock @@ -63,16 +63,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1747179050, - "narHash": "sha256-qhFMmDkeJX9KJwr5H32f1r7Prs7XbQWtO0h3V0a0rFY=", + "lastModified": 1755442223, + "narHash": "sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "adaa24fbf46737f3f1b5497bf64bae750f82942e", + "rev": "cd32a774ac52caaa03bcfc9e7591ac8c18617ced", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-unstable", + "ref": "nixos-25.05-small", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 6a6f2cfd8..fd623c807 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; From 1d943f581908f35075a84a3d89c2eba3ff35067f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 20:29:45 +0300 Subject: [PATCH 214/382] flake: Apply nixfmt 1.0.0 --- ci/gha/tests/default.nix | 12 +- doc/manual/package.nix | 35 +++--- docker.nix | 103 +++++++++--------- packaging/components.nix | 15 +-- packaging/dev-shell.nix | 46 ++++---- packaging/everything.nix | 63 ++++++----- src/libcmd/package.nix | 3 +- src/libexpr/package.nix | 6 +- src/libstore-tests/package.nix | 17 ++- src/libstore/package.nix | 34 +++--- src/libutil/package.nix | 3 +- src/perl/package.nix | 3 +- tests/functional/lang/eval-okay-attrnames.nix | 19 ++-- tests/functional/lang/eval-okay-attrs.nix | 21 ++-- tests/functional/lang/eval-okay-attrs2.nix | 21 ++-- tests/functional/lang/eval-okay-import.nix | 3 +- tests/functional/nested-sandboxing/runner.nix | 47 ++++---- tests/functional/package.nix | 37 +++---- tests/nixos/remote-builds.nix | 3 +- 19 files changed, 243 insertions(+), 248 deletions(-) diff --git a/ci/gha/tests/default.nix b/ci/gha/tests/default.nix index 5dbb3f407..74d0b8c7e 100644 --- a/ci/gha/tests/default.nix +++ b/ci/gha/tests/default.nix @@ -55,13 +55,11 @@ let }; # Done in a pre-configure hook, because $NIX_BUILD_TOP needs to be substituted. - preConfigure = - prevAttrs.preConfigure or "" - + '' - mappingFlag=" -fcoverage-prefix-map=$NIX_BUILD_TOP/${finalAttrs.src.name}=${finalAttrs.src}" - CFLAGS+="$mappingFlag" - CXXFLAGS+="$mappingFlag" - ''; + preConfigure = prevAttrs.preConfigure or "" + '' + mappingFlag=" -fcoverage-prefix-map=$NIX_BUILD_TOP/${finalAttrs.src.name}=${finalAttrs.src}" + CFLAGS+="$mappingFlag" + CXXFLAGS+="$mappingFlag" + ''; }; componentOverrides = diff --git a/doc/manual/package.nix b/doc/manual/package.nix index af6d46a2a..69b7c0e49 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -46,24 +46,23 @@ mkMesonDerivation (finalAttrs: { ]; # Hack for sake of the dev shell - passthru.externalNativeBuildInputs = - [ - meson - ninja - (lib.getBin lowdown-unsandboxed) - mdbook - mdbook-linkcheck - jq - python3 - rsync - changelog-d - ] - ++ lib.optionals (!officialRelease) [ - # When not an official release, we likely have changelog entries that have - # yet to be rendered. - # When released, these are rendered into a committed file to save a dependency. - changelog-d - ]; + passthru.externalNativeBuildInputs = [ + meson + ninja + (lib.getBin lowdown-unsandboxed) + mdbook + mdbook-linkcheck + jq + python3 + rsync + changelog-d + ] + ++ lib.optionals (!officialRelease) [ + # When not an official release, we likely have changelog entries that have + # yet to be rendered. + # When released, these are rendered into a committed file to save a dependency. + changelog-d + ]; nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ nix-cli diff --git a/docker.nix b/docker.nix index ed62c3a4e..619e75c54 100644 --- a/docker.nix +++ b/docker.nix @@ -65,62 +65,61 @@ let iana-etc gitMinimal openssh - ] ++ extraPkgs; + ] + ++ extraPkgs; - users = - { + users = { - root = { - uid = 0; - shell = lib.getExe bashInteractive; - home = "/root"; - gid = 0; - groups = [ "root" ]; - description = "System administrator"; - }; - - nobody = { - uid = 65534; - shell = lib.getExe' shadow "nologin"; - home = "/var/empty"; - gid = 65534; - groups = [ "nobody" ]; - description = "Unprivileged account (don't use!)"; - }; - - } - // lib.optionalAttrs (uid != 0) { - "${uname}" = { - uid = uid; - shell = lib.getExe bashInteractive; - home = "/home/${uname}"; - gid = gid; - groups = [ "${gname}" ]; - description = "Nix user"; - }; - } - // lib.listToAttrs ( - map (n: { - name = "nixbld${toString n}"; - value = { - uid = 30000 + n; - gid = 30000; - groups = [ "nixbld" ]; - description = "Nix build user ${toString n}"; - }; - }) (lib.lists.range 1 32) - ); - - groups = - { - root.gid = 0; - nixbld.gid = 30000; - nobody.gid = 65534; - } - // lib.optionalAttrs (gid != 0) { - "${gname}".gid = gid; + root = { + uid = 0; + shell = lib.getExe bashInteractive; + home = "/root"; + gid = 0; + groups = [ "root" ]; + description = "System administrator"; }; + nobody = { + uid = 65534; + shell = lib.getExe' shadow "nologin"; + home = "/var/empty"; + gid = 65534; + groups = [ "nobody" ]; + description = "Unprivileged account (don't use!)"; + }; + + } + // lib.optionalAttrs (uid != 0) { + "${uname}" = { + uid = uid; + shell = lib.getExe bashInteractive; + home = "/home/${uname}"; + gid = gid; + groups = [ "${gname}" ]; + description = "Nix user"; + }; + } + // lib.listToAttrs ( + map (n: { + name = "nixbld${toString n}"; + value = { + uid = 30000 + n; + gid = 30000; + groups = [ "nixbld" ]; + description = "Nix build user ${toString n}"; + }; + }) (lib.lists.range 1 32) + ); + + groups = { + root.gid = 0; + nixbld.gid = 30000; + nobody.gid = 65534; + } + // lib.optionalAttrs (gid != 0) { + "${gname}".gid = gid; + }; + userToPasswd = ( k: { diff --git a/packaging/components.nix b/packaging/components.nix index 661857833..b5fad4043 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -54,12 +54,12 @@ let preConfigure = prevAttrs.preConfigure or "" + - # Update the repo-global .version file. - # Symlink ./.version points there, but by default only workDir is writable. - '' - chmod u+w ./.version - echo ${finalAttrs.version} > ./.version - ''; + # Update the repo-global .version file. + # Symlink ./.version points there, but by default only workDir is writable. + '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; }; localSourceLayer = @@ -148,7 +148,8 @@ let nativeBuildInputs = [ meson ninja - ] ++ prevAttrs.nativeBuildInputs or [ ]; + ] + ++ prevAttrs.nativeBuildInputs or [ ]; mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [ "--print-errorlogs" ]; diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 27d819ec4..949f79752 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -71,17 +71,16 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( # We use this shell with the local checkout, not unpackPhase. src = null; - env = - { - # For `make format`, to work without installing pre-commit - _NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" - modular.pre-commit.settings.rawConfig - }"; - } - // lib.optionalAttrs stdenv.hostPlatform.isLinux { - CC_LD = "mold"; - CXX_LD = "mold"; - }; + env = { + # For `make format`, to work without installing pre-commit + _NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" + modular.pre-commit.settings.rawConfig + }"; + } + // lib.optionalAttrs stdenv.hostPlatform.isLinux { + CC_LD = "mold"; + CXX_LD = "mold"; + }; mesonFlags = map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents2.nix-util.mesonFlags) @@ -126,17 +125,18 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ) ++ lib.optional stdenv.hostPlatform.isLinux pkgs.buildPackages.mold-wrapped; - buildInputs = - [ pkgs.gbenchmark ] - ++ attrs.buildInputs or [ ] - ++ pkgs.nixComponents2.nix-util.buildInputs - ++ pkgs.nixComponents2.nix-store.buildInputs - ++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs - ++ pkgs.nixComponents2.nix-fetchers.buildInputs - ++ pkgs.nixComponents2.nix-expr.buildInputs - ++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs - ++ pkgs.nixComponents2.nix-cmd.buildInputs - ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs - ++ lib.optional havePerl pkgs.perl; + buildInputs = [ + pkgs.gbenchmark + ] + ++ attrs.buildInputs or [ ] + ++ pkgs.nixComponents2.nix-util.buildInputs + ++ pkgs.nixComponents2.nix-store.buildInputs + ++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs + ++ pkgs.nixComponents2.nix-fetchers.buildInputs + ++ pkgs.nixComponents2.nix-expr.buildInputs + ++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs + ++ pkgs.nixComponents2.nix-cmd.buildInputs + ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs + ++ lib.optional havePerl pkgs.perl; } ) diff --git a/packaging/everything.nix b/packaging/everything.nix index 5bf57f95a..f6bdad490 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -47,25 +47,25 @@ }: let - libs = - { - inherit - nix-util - nix-util-c - nix-store - nix-store-c - nix-fetchers - nix-fetchers-c - nix-expr - nix-expr-c - nix-flake - nix-flake-c - nix-main - nix-main-c - nix-cmd - ; - } - // lib.optionalAttrs + libs = { + inherit + nix-util + nix-util-c + nix-store + nix-store-c + nix-fetchers + nix-fetchers-c + nix-expr + nix-expr-c + nix-flake + nix-flake-c + nix-main + nix-main-c + nix-cmd + ; + } + // + lib.optionalAttrs (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) { # Currently fails in static build @@ -127,20 +127,19 @@ stdenv.mkDerivation (finalAttrs: { */ dontFixup = true; - checkInputs = - [ - # Make sure the unit tests have passed - nix-util-tests.tests.run - nix-store-tests.tests.run - nix-expr-tests.tests.run - nix-fetchers-tests.tests.run - nix-flake-tests.tests.run + checkInputs = [ + # Make sure the unit tests have passed + nix-util-tests.tests.run + nix-store-tests.tests.run + nix-expr-tests.tests.run + nix-fetchers-tests.tests.run + nix-flake-tests.tests.run - # Make sure the functional tests have passed - nix-functional-tests - ] - ++ lib.optionals - (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) + # Make sure the functional tests have passed + nix-functional-tests + ] + ++ + lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) [ # Perl currently fails in static build # TODO: Split out tests into a separate derivation? diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix index be5054f64..c382f0e57 100644 --- a/src/libcmd/package.nix +++ b/src/libcmd/package.nix @@ -53,7 +53,8 @@ mkMesonLibrary (finalAttrs: { buildInputs = [ ({ inherit editline readline; }.${readlineFlavor}) - ] ++ lib.optional enableMarkdown lowdown; + ] + ++ lib.optional enableMarkdown lowdown; propagatedBuildInputs = [ nix-util diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index 50161c58b..a67a8cc49 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -70,13 +70,15 @@ mkMesonLibrary (finalAttrs: { nix-util nix-store nix-fetchers - ] ++ finalAttrs.passthru.externalPropagatedBuildInputs; + ] + ++ finalAttrs.passthru.externalPropagatedBuildInputs; # Hack for sake of the dev shell passthru.externalPropagatedBuildInputs = [ boost nlohmann_json - ] ++ lib.optional enableGC boehmgc; + ] + ++ lib.optional enableGC boehmgc; mesonFlags = [ (lib.mesonEnable "gc" enableGC) diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 00d40365e..90e6af519 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -43,15 +43,14 @@ mkMesonExecutable (finalAttrs: { ]; # Hack for sake of the dev shell - passthru.externalBuildInputs = - [ - sqlite - rapidcheck - gtest - ] - ++ lib.optionals withBenchmarks [ - gbenchmark - ]; + passthru.externalBuildInputs = [ + sqlite + rapidcheck + gtest + ] + ++ lib.optionals withBenchmarks [ + gbenchmark + ]; buildInputs = finalAttrs.passthru.externalBuildInputs ++ [ nix-store diff --git a/src/libstore/package.nix b/src/libstore/package.nix index 775776139..47805547b 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -58,30 +58,28 @@ mkMesonLibrary (finalAttrs: { nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump; - buildInputs = - [ - boost - curl - sqlite - ] - ++ lib.optional stdenv.hostPlatform.isLinux libseccomp - # There have been issues building these dependencies - ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox - ++ lib.optional withAWS aws-sdk-cpp; + buildInputs = [ + boost + curl + sqlite + ] + ++ lib.optional stdenv.hostPlatform.isLinux libseccomp + # There have been issues building these dependencies + ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox + ++ lib.optional withAWS aws-sdk-cpp; propagatedBuildInputs = [ nix-util nlohmann_json ]; - mesonFlags = - [ - (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) - (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) - ] - ++ lib.optionals stdenv.hostPlatform.isLinux [ - (lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") - ]; + mesonFlags = [ + (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) + (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ + (lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") + ]; meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; diff --git a/src/libutil/package.nix b/src/libutil/package.nix index 46f56e07e..3deb7ba3a 100644 --- a/src/libutil/package.nix +++ b/src/libutil/package.nix @@ -52,7 +52,8 @@ mkMesonLibrary (finalAttrs: { libblake3 libsodium openssl - ] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; + ] + ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; propagatedBuildInputs = [ boost diff --git a/src/perl/package.nix b/src/perl/package.nix index 5841570cd..10d84de77 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -45,7 +45,8 @@ perl.pkgs.toPerlModule ( buildInputs = [ nix-store - ] ++ finalAttrs.passthru.externalBuildInputs; + ] + ++ finalAttrs.passthru.externalBuildInputs; # Hack for sake of the dev shell passthru.externalBuildInputs = [ diff --git a/tests/functional/lang/eval-okay-attrnames.nix b/tests/functional/lang/eval-okay-attrnames.nix index 085e78084..7cdb41538 100644 --- a/tests/functional/lang/eval-okay-attrnames.nix +++ b/tests/functional/lang/eval-okay-attrnames.nix @@ -2,16 +2,15 @@ with import ./lib.nix; let - attrs = - { - y = "y"; - x = "x"; - foo = "foo"; - } - // rec { - x = "newx"; - bar = x; - }; + attrs = { + y = "y"; + x = "x"; + foo = "foo"; + } + // rec { + x = "newx"; + bar = x; + }; names = builtins.attrNames attrs; diff --git a/tests/functional/lang/eval-okay-attrs.nix b/tests/functional/lang/eval-okay-attrs.nix index 787b9a933..0350e6e72 100644 --- a/tests/functional/lang/eval-okay-attrs.nix +++ b/tests/functional/lang/eval-okay-attrs.nix @@ -1,15 +1,14 @@ let { - as = - { - x = 123; - y = 456; - } - // { - z = 789; - } - // { - z = 987; - }; + as = { + x = 123; + y = 456; + } + // { + z = 789; + } + // { + z = 987; + }; body = if as ? a then diff --git a/tests/functional/lang/eval-okay-attrs2.nix b/tests/functional/lang/eval-okay-attrs2.nix index 0896f9cf1..234ed1be7 100644 --- a/tests/functional/lang/eval-okay-attrs2.nix +++ b/tests/functional/lang/eval-okay-attrs2.nix @@ -1,15 +1,14 @@ let { - as = - { - x = 123; - y = 456; - } - // { - z = 789; - } - // { - z = 987; - }; + as = { + x = 123; + y = 456; + } + // { + z = 789; + } + // { + z = 987; + }; A = "a"; Z = "z"; diff --git a/tests/functional/lang/eval-okay-import.nix b/tests/functional/lang/eval-okay-import.nix index 484dccac0..9558b7ffc 100644 --- a/tests/functional/lang/eval-okay-import.nix +++ b/tests/functional/lang/eval-okay-import.nix @@ -6,7 +6,8 @@ let scopedImport = attrs: fn: scopedImport (overrides // attrs) fn; builtins = builtins // overrides; - } // import ./lib.nix; + } + // import ./lib.nix; in scopedImport overrides ./imported.nix diff --git a/tests/functional/nested-sandboxing/runner.nix b/tests/functional/nested-sandboxing/runner.nix index d0d441a82..cc193844d 100644 --- a/tests/functional/nested-sandboxing/runner.nix +++ b/tests/functional/nested-sandboxing/runner.nix @@ -6,32 +6,31 @@ mkDerivation { name = "nested-sandboxing"; busybox = builtins.getEnv "busybox"; EXTRA_SANDBOX = builtins.getEnv "EXTRA_SANDBOX"; - buildCommand = - '' - set -x - set -eu -o pipefail - '' - + ( - if altitude == 0 then - '' - echo Deep enough! > $out - '' - else - '' - cp -r ${../common} ./common - cp ${../common.sh} ./common.sh - cp ${../config.nix} ./config.nix - cp -r ${./.} ./nested-sandboxing + buildCommand = '' + set -x + set -eu -o pipefail + '' + + ( + if altitude == 0 then + '' + echo Deep enough! > $out + '' + else + '' + cp -r ${../common} ./common + cp ${../common.sh} ./common.sh + cp ${../config.nix} ./config.nix + cp -r ${./.} ./nested-sandboxing - export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH + export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH - export _NIX_TEST_SOURCE_DIR=$PWD - export _NIX_TEST_BUILD_DIR=$PWD + export _NIX_TEST_SOURCE_DIR=$PWD + export _NIX_TEST_BUILD_DIR=$PWD - source common.sh - source ./nested-sandboxing/command.sh + source common.sh + source ./nested-sandboxing/command.sh - runNixBuild ${storeFun} ${toString altitude} >> $out - '' - ); + runNixBuild ${storeFun} ${toString altitude} >> $out + '' + ); } diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 716e21fe4..1f1d10ea8 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -47,26 +47,25 @@ mkMesonDerivation ( ]; # Hack for sake of the dev shell - passthru.externalNativeBuildInputs = - [ - meson - ninja - pkg-config + passthru.externalNativeBuildInputs = [ + meson + ninja + pkg-config - jq - git - mercurial - unixtools.script - ] - ++ lib.optionals stdenv.hostPlatform.isLinux [ - # For various sandboxing tests that needs a statically-linked shell, - # etc. - busybox-sandbox-shell - # For Overlay FS tests need `mount`, `umount`, and `unshare`. - # For `script` command (ensuring a TTY) - # TODO use `unixtools` to be precise over which executables instead? - util-linux - ]; + jq + git + mercurial + unixtools.script + ] + ++ lib.optionals stdenv.hostPlatform.isLinux [ + # For various sandboxing tests that needs a statically-linked shell, + # etc. + busybox-sandbox-shell + # For Overlay FS tests need `mount`, `umount`, and `unshare`. + # For `script` command (ensuring a TTY) + # TODO use `unixtools` to be precise over which executables instead? + util-linux + ]; nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ nix-cli diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 3bfb651bd..ba5fdc2af 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -20,7 +20,8 @@ let nix.settings.sandbox = true; services.openssh.ports = [ 22 - ] ++ lib.optional supportsCustomPort 2222; + ] + ++ lib.optional supportsCustomPort 2222; # Regression test for use of PID namespaces when /proc has # filesystems mounted on top of it From 73f6729147f9a5162f802e99d86f6dcc428a0268 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 20:30:54 +0300 Subject: [PATCH 215/382] git-blame-ignore-revs: Add nixfmt 1.0.0 reformat --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 0c0d7fde8..2bea19741 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -2,3 +2,5 @@ e4f62e46088919428a68bd8014201dc8e379fed7 # !autorebase ./maintainers/format.sh --until-stable # meson re-formatting 385e2c3542c707d95e3784f7f6d623f67e77ab61 # !autorebase ./maintainers/format.sh --until-stable +# nixfmt 1.0.0 +1d943f581908f35075a84a3d89c2eba3ff35067f # !autorebase ./maintainers/format.sh --until-stable From 051290b155c154ff937d804878026334e74b8685 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 21:42:55 +0300 Subject: [PATCH 216/382] hydra: Fix otherNixes.nix_2_3 25.05 has it marked as insecure, but we don't care about it for testing purposes. --- tests/nixos/default.nix | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index e200ae7d9..2031e02a4 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -79,7 +79,15 @@ let { lib, pkgs, ... }: { imports = [ checkOverrideNixVersion ]; - nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + nix.package = lib.mkForce ( + pkgs.nixVersions.nix_2_3.overrideAttrs (o: { + meta = o.meta // { + # This version shouldn't be used by end-users, but we run tests against + # it to ensure we don't break protocol compatibility. + knownVulnerabilities = [ ]; + }; + }) + ); }; otherNixes.nix_2_13.setNixPackage = @@ -90,6 +98,8 @@ let nixpkgs-23-11.legacyPackages.${pkgs.stdenv.hostPlatform.system}.nixVersions.nix_2_13.overrideAttrs (o: { meta = o.meta // { + # This version shouldn't be used by end-users, but we run tests against + # it to ensure we don't break protocol compatibility. knownVulnerabilities = [ ]; }; }) From 62018b3a7fdb39a8b8bd01489f01dd32216f55f8 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 18 Aug 2025 23:24:07 +0300 Subject: [PATCH 217/382] libstore: Fix formatting Wasn't caught by CI because the PR wasn't fresh enough and didn't have formatting checks enabled. --- src/libstore/local-store.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7f50b43f2..81768e4eb 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -232,7 +232,8 @@ LocalStore::LocalStore(ref config) globalLock = openLockFile(globalLockPath.c_str(), true); } catch (SysError & e) { if (e.errNo == EACCES || e.errNo == EPERM) { - e.addTrace({}, + e.addTrace( + {}, "This command may have been run as non-root in a single-user Nix installation,\n" "or the Nix daemon may have crashed."); } From 03101cc8398aac222ac2c637ac04de2471f57b9d Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 17 Aug 2025 22:41:57 +0300 Subject: [PATCH 218/382] treewide: Bump C++ standard version to C++23 Compilers in nixpkgs have caught up and major distros should also have recent enough compilers. It would be nice to have newer features like more full featured ranges and deducing this. --- doc/manual/source/installation/prerequisites-source.md | 2 +- nix-meson-build-support/export/meson.build | 4 ++-- src/libcmd/meson.build | 2 +- src/libexpr-c/meson.build | 2 +- src/libexpr-test-support/meson.build | 2 +- src/libexpr-test-support/tests/value/context.cc | 1 + src/libexpr-tests/derived-path.cc | 1 + src/libexpr-tests/meson.build | 2 +- src/libexpr/meson.build | 2 +- src/libfetchers-c/meson.build | 2 +- src/libfetchers-tests/meson.build | 2 +- src/libfetchers/meson.build | 2 +- src/libflake-c/meson.build | 2 +- src/libflake-tests/meson.build | 2 +- src/libflake/meson.build | 2 +- src/libmain-c/meson.build | 2 +- src/libmain/meson.build | 2 +- src/libstore-c/meson.build | 2 +- src/libstore-test-support/derived-path.cc | 1 + .../include/nix/store/tests/outputs-spec.hh | 1 + src/libstore-test-support/meson.build | 2 +- src/libstore-test-support/path.cc | 3 ++- src/libstore-tests/meson.build | 2 +- src/libstore/meson.build | 2 +- src/libutil-c/meson.build | 2 +- src/libutil-test-support/hash.cc | 1 + src/libutil-test-support/meson.build | 2 +- src/libutil-tests/meson.build | 2 +- src/libutil/meson.build | 2 +- src/nix/meson.build | 2 +- tests/functional/meson.build | 2 +- 31 files changed, 33 insertions(+), 27 deletions(-) diff --git a/doc/manual/source/installation/prerequisites-source.md b/doc/manual/source/installation/prerequisites-source.md index c346a0a4b..057fd4443 100644 --- a/doc/manual/source/installation/prerequisites-source.md +++ b/doc/manual/source/installation/prerequisites-source.md @@ -10,7 +10,7 @@ - Bash Shell. The `./configure` script relies on bashisms, so Bash is required. - - A version of GCC or Clang that supports C++20. + - A version of GCC or Clang that supports C++23. - `pkg-config` to locate dependencies. If your distribution does not provide it, you can get it from diff --git a/nix-meson-build-support/export/meson.build b/nix-meson-build-support/export/meson.build index 26f778324..62a27bd48 100644 --- a/nix-meson-build-support/export/meson.build +++ b/nix-meson-build-support/export/meson.build @@ -14,7 +14,7 @@ extra_pkg_config_variables = get_variable('extra_pkg_config_variables', {}) extra_cflags = [] if not meson.project_name().endswith('-c') - extra_cflags += [ '-std=c++2a' ] + extra_cflags += [ '-std=c++23' ] endif import('pkgconfig').generate( @@ -34,7 +34,7 @@ meson.override_dependency( declare_dependency( include_directories : include_dirs, link_with : this_library, - compile_args : [ '-std=c++2a' ], + compile_args : [ '-std=c++23' ], dependencies : deps_public_subproject + deps_public, variables : extra_pkg_config_variables, ), diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 2f8079496..24e075246 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index 606b93e70..7c014d61d 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index 89dc1d20a..d762eb85e 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libexpr-test-support/tests/value/context.cc b/src/libexpr-test-support/tests/value/context.cc index b24d83a5a..d6036601a 100644 --- a/src/libexpr-test-support/tests/value/context.cc +++ b/src/libexpr-test-support/tests/value/context.cc @@ -1,3 +1,4 @@ +#include // Needed by rapidcheck on Darwin #include #include "nix/store/tests/path.hh" diff --git a/src/libexpr-tests/derived-path.cc b/src/libexpr-tests/derived-path.cc index a4bd29c1c..e9f9fcd07 100644 --- a/src/libexpr-tests/derived-path.cc +++ b/src/libexpr-tests/derived-path.cc @@ -1,5 +1,6 @@ #include #include +#include // Needed by rapidcheck on Darwin #include #include "nix/store/tests/derived-path.hh" diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index cc203d60e..a876e9705 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index e1a12106d..15bca88f0 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index 74ec9c9c2..8542744b4 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index cd23c1f0c..a18f64d79 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 3da58cb70..922a2c491 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index 8b844371d..933e06d90 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libflake-tests/meson.build b/src/libflake-tests/meson.build index 08c48f137..59094abe8 100644 --- a/src/libflake-tests/meson.build +++ b/src/libflake-tests/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libflake/meson.build b/src/libflake/meson.build index faa12e7a9..191d8f068 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 2c08cac41..9e26ad8ad 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libmain/meson.build b/src/libmain/meson.build index 252b28169..4a90d2d83 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index 1d01aa3aa..f8eaef803 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libstore-test-support/derived-path.cc b/src/libstore-test-support/derived-path.cc index 225b86c79..cb1d23ac5 100644 --- a/src/libstore-test-support/derived-path.cc +++ b/src/libstore-test-support/derived-path.cc @@ -1,5 +1,6 @@ #include +#include // Needed by rapidcheck on Darwin #include #include "nix/store/tests/derived-path.hh" diff --git a/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh b/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh index 865a97352..5bbcc7340 100644 --- a/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh +++ b/src/libstore-test-support/include/nix/store/tests/outputs-spec.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include // Needed by rapidcheck on Darwin #include #include "nix/store/outputs-spec.hh" diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index 26da5d0f2..b2977941f 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libstore-test-support/path.cc b/src/libstore-test-support/path.cc index 5d5902cc9..145920310 100644 --- a/src/libstore-test-support/path.cc +++ b/src/libstore-test-support/path.cc @@ -1,6 +1,7 @@ -#include +#include // Needed by rapidcheck on Darwin #include +#include #include #include "nix/store/path-regex.hh" diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 87f6a234a..d325e57a6 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libstore/meson.build b/src/libstore/meson.build index ad76582d8..4a5f4b2cf 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 3688ddeb6..8131c517c 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libutil-test-support/hash.cc b/src/libutil-test-support/hash.cc index ffff27926..853da8e90 100644 --- a/src/libutil-test-support/hash.cc +++ b/src/libutil-test-support/hash.cc @@ -1,5 +1,6 @@ #include +#include // Needed by rapidcheck on Darwin #include #include "nix/util/hash.hh" diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index 9bee4a1a6..910f1d881 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 6f04b46fd..0e2a2e468 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/libutil/meson.build b/src/libutil/meson.build index ffd1ebd49..c294f895a 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/src/nix/meson.build b/src/nix/meson.build index e17b39f98..8accd4755 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -3,7 +3,7 @@ project( 'cpp', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail diff --git a/tests/functional/meson.build b/tests/functional/meson.build index e501aa102..54e13b26d 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -2,7 +2,7 @@ project( 'nix-functional-tests', version : files('.version'), default_options : [ - 'cpp_std=c++2a', + 'cpp_std=c++23', # TODO(Qyriad): increase the warning level 'warning_level=1', 'errorlogs=true', # Please print logs for tests that fail From 69fcc2cfc13df425e3bcd0f73385c89f1634e022 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 19 Aug 2025 23:21:36 +0300 Subject: [PATCH 219/382] libstore: Introduce ParsedS3URL type This systematizes the way our s3:// URLs are parsed in filetransfer.cc. Yoinked out and refactored out of [1]. [1]: https://github.com/NixOS/nix/pull/13752 Co-authored-by: Bernardo Meurer Costa --- src/libstore-tests/meson.build | 1 + src/libstore-tests/s3.cc | 96 ++++++++++++++++++++++++++++ src/libstore/filetransfer.cc | 28 ++------ src/libstore/include/nix/store/s3.hh | 33 ++++++++++ src/libstore/meson.build | 1 + src/libstore/s3.cc | 71 ++++++++++++++++++++ src/libutil/include/nix/util/url.hh | 6 +- src/libutil/url.cc | 4 +- 8 files changed, 212 insertions(+), 28 deletions(-) create mode 100644 src/libstore-tests/s3.cc create mode 100644 src/libstore/s3.cc diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 87f6a234a..9b98a2812 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -74,6 +74,7 @@ sources = files( 'path.cc', 'references.cc', 's3-binary-cache-store.cc', + 's3.cc', 'serve-protocol.cc', 'ssh-store.cc', 'store-reference.cc', diff --git a/src/libstore-tests/s3.cc b/src/libstore-tests/s3.cc new file mode 100644 index 000000000..b66005cb9 --- /dev/null +++ b/src/libstore-tests/s3.cc @@ -0,0 +1,96 @@ +#include "nix/store/s3.hh" +#include "nix/util/tests/gmock-matchers.hh" + +#if NIX_WITH_S3_SUPPORT + +# include +# include + +namespace nix { + +struct ParsedS3URLTestCase +{ + std::string url; + ParsedS3URL expected; + std::string description; +}; + +class ParsedS3URLTest : public ::testing::WithParamInterface, public ::testing::Test +{}; + +TEST_P(ParsedS3URLTest, parseS3URLSuccessfully) +{ + const auto & testCase = GetParam(); + auto parsed = ParsedS3URL::parse(testCase.url); + ASSERT_EQ(parsed, testCase.expected); +} + +INSTANTIATE_TEST_SUITE_P( + QueryParams, + ParsedS3URLTest, + ::testing::Values( + ParsedS3URLTestCase{ + "s3://my-bucket/my-key.txt", + { + .bucket = "my-bucket", + .key = "my-key.txt", + }, + "basic_s3_bucket"}, + ParsedS3URLTestCase{ + "s3://prod-cache/nix/store/abc123.nar.xz?region=eu-west-1", + { + .bucket = "prod-cache", + .key = "nix/store/abc123.nar.xz", + .region = "eu-west-1", + }, + "with_region"}, + ParsedS3URLTestCase{ + "s3://bucket/key?region=us-west-2&profile=prod&endpoint=custom.s3.com&scheme=https®ion=us-east-1", + { + .bucket = "bucket", + .key = "key", + .profile = "prod", + .region = "us-west-2", //< using the first parameter (decodeQuery ignores dupicates) + .scheme = "https", + .endpoint = ParsedURL::Authority{.host = "custom.s3.com"}, + }, + "complex"}, + ParsedS3URLTestCase{ + "s3://cache/file.txt?profile=production®ion=ap-southeast-2", + { + .bucket = "cache", + .key = "file.txt", + .profile = "production", + .region = "ap-southeast-2", + }, + "with_profile_and_region"}, + ParsedS3URLTestCase{ + "s3://bucket/key?endpoint=https://minio.local&scheme=http", + { + .bucket = "bucket", + .key = "key", + /* TODO: Figure out what AWS SDK is doing when both endpointOverride and scheme are set. */ + .scheme = "http", + .endpoint = + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "minio.local"}, + }, + }, + "with_absolute_endpoint_uri"}), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + +TEST(InvalidParsedS3URLTest, parseS3URLErrors) +{ + auto invalidBucketMatcher = ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("error: URI has a missing or invalid bucket name")); + + /* Empty bucket (authority) */ + ASSERT_THAT([]() { ParsedS3URL::parse("s3:///key"); }, invalidBucketMatcher); + /* Invalid bucket name */ + ASSERT_THAT([]() { ParsedS3URL::parse("s3://127.0.0.1"); }, invalidBucketMatcher); +} + +} // namespace nix + +#endif diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index c29da12e8..974797e12 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -798,22 +798,6 @@ struct curlFileTransfer : public FileTransfer #endif } -#if NIX_WITH_S3_SUPPORT - std::tuple parseS3Uri(std::string uri) - { - auto [path, params] = splitUriAndParams(uri); - - auto slash = path.find('/', 5); // 5 is the length of "s3://" prefix - if (slash == std::string::npos) - throw nix::Error("bad S3 URI '%s'", path); - - std::string bucketName(path, 5, slash - 5); - std::string key(path, slash + 1); - - return {bucketName, key, params}; - } -#endif - void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override { /* Ugly hack to support s3:// URIs. */ @@ -821,17 +805,17 @@ struct curlFileTransfer : public FileTransfer // FIXME: do this on a worker thread try { #if NIX_WITH_S3_SUPPORT - auto [bucketName, key, params] = parseS3Uri(request.uri); + auto parsed = ParsedS3URL::parse(request.uri); - std::string profile = getOr(params, "profile", ""); - std::string region = getOr(params, "region", Aws::Region::US_EAST_1); - std::string scheme = getOr(params, "scheme", ""); - std::string endpoint = getOr(params, "endpoint", ""); + std::string profile = parsed.profile.value_or(""); + std::string region = parsed.region.value_or(Aws::Region::US_EAST_1); + std::string scheme = parsed.scheme.value_or(""); + std::string endpoint = parsed.getEncodedEndpoint().value_or(""); S3Helper s3Helper(profile, region, scheme, endpoint); // FIXME: implement ETag - auto s3Res = s3Helper.getObject(bucketName, key); + auto s3Res = s3Helper.getObject(parsed.bucket, parsed.key); FileTransferResult res; if (!s3Res.data) throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index 57e03a065..517825952 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -4,9 +4,12 @@ #if NIX_WITH_S3_SUPPORT # include "nix/util/ref.hh" +# include "nix/util/url.hh" +# include "nix/util/util.hh" # include # include +# include namespace Aws { namespace Client { @@ -45,6 +48,36 @@ struct S3Helper FileTransferResult getObject(const std::string & bucketName, const std::string & key); }; +/** + * Parsed S3 URL. + */ +struct ParsedS3URL +{ + std::string bucket; + std::string key; + std::optional profile; + std::optional region; + std::optional scheme; + /** + * The endpoint can be either missing, be an absolute URI (with a scheme like `http:`) + * or an authority (so an IP address or a registered name). + */ + std::variant endpoint; + + std::optional getEncodedEndpoint() const + { + return std::visit( + overloaded{ + [](std::monostate) -> std::optional { return std::nullopt; }, + [](const auto & authorityOrUrl) -> std::optional { return authorityOrUrl.to_string(); }, + }, + endpoint); + } + + static ParsedS3URL parse(std::string_view uri); + auto operator<=>(const ParsedS3URL & other) const = default; +}; + } // namespace nix #endif diff --git a/src/libstore/meson.build b/src/libstore/meson.build index ad76582d8..e98ba7545 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -321,6 +321,7 @@ sources = files( 'remote-store.cc', 'restricted-store.cc', 's3-binary-cache-store.cc', + 's3.cc', 'serve-protocol-connection.cc', 'serve-protocol.cc', 'sqlite.cc', diff --git a/src/libstore/s3.cc b/src/libstore/s3.cc new file mode 100644 index 000000000..9ed4e7fd9 --- /dev/null +++ b/src/libstore/s3.cc @@ -0,0 +1,71 @@ +#include "nix/store/s3.hh" +#include "nix/util/split.hh" +#include "nix/util/url.hh" + +namespace nix { + +using namespace std::string_view_literals; + +#if NIX_WITH_S3_SUPPORT + +ParsedS3URL ParsedS3URL::parse(std::string_view uri) +try { + auto parsed = parseURL(uri); + + if (parsed.scheme != "s3"sv) + throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme); + + /* Yeah, S3 URLs in Nix have the bucket name as authority. Luckily registered name type + authority has the same restrictions (mostly) as S3 bucket names. + TODO: Validate against: + https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html#general-purpose-bucket-names + */ + if (!parsed.authority || parsed.authority->host.empty() + || parsed.authority->hostType != ParsedURL::Authority::HostType::Name) + throw BadURL("URI has a missing or invalid bucket name"); + + std::string_view key = parsed.path; + /* Make the key a relative path. */ + splitPrefix(key, "/"); + + /* TODO: Validate the key against: + * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html#object-key-guidelines + */ + + auto getOptionalParam = [&](std::string_view key) -> std::optional { + const auto & query = parsed.query; + auto it = query.find(key); + if (it == query.end()) + return std::nullopt; + return it->second; + }; + + auto endpoint = getOptionalParam("endpoint"); + + return ParsedS3URL{ + .bucket = std::move(parsed.authority->host), + .key = std::string{key}, + .profile = getOptionalParam("profile"), + .region = getOptionalParam("region"), + .scheme = getOptionalParam("scheme"), + .endpoint = [&]() -> decltype(ParsedS3URL::endpoint) { + if (!endpoint) + return std::monostate(); + + /* Try to parse the endpoint as a full-fledged URL with a scheme. */ + try { + return parseURL(*endpoint); + } catch (BadURL &) { + } + + return ParsedURL::Authority::parse(*endpoint); + }(), + }; +} catch (BadURL & e) { + e.addTrace({}, "while parsing S3 URI: '%s'", uri); + throw; +} + +#endif + +} // namespace nix diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 0aa1eac9f..cd20a08c6 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -30,7 +30,7 @@ struct ParsedURL }; static Authority parse(std::string_view encodedAuthority); - bool operator==(const Authority & other) const = default; + auto operator<=>(const Authority & other) const = default; std::string to_string() const; friend std::ostream & operator<<(std::ostream & os, const Authority & self); @@ -81,7 +81,7 @@ struct ParsedURL std::string to_string() const; - bool operator==(const ParsedURL & other) const noexcept = default; + auto operator<=>(const ParsedURL & other) const noexcept = default; /** * Remove `.` and `..` path elements. @@ -111,7 +111,7 @@ std::string encodeQuery(const StringMap & query); * * @throws BadURL */ -ParsedURL parseURL(const std::string & url); +ParsedURL parseURL(std::string_view url); /** * Although that’s not really standardized anywhere, an number of tools diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 75f62d445..cdfba8a83 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -108,10 +108,8 @@ static std::string percentEncodeCharSet(std::string_view s, auto charSet) return res; } -ParsedURL parseURL(const std::string & url) +ParsedURL parseURL(std::string_view url) try { - auto unparsedView = url; - /* Account for several non-standard properties of nix urls (for back-compat): * - Allow unescaped spaces ' ' and '"' characters in queries. * - Allow '"', ' ' and '^' characters in the fragment component. From 4134258c03f524b2ac4cd92e4e045f3e6a409bf6 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 19 Aug 2025 23:00:32 +0200 Subject: [PATCH 220/382] onboarding.md: List the private rooms We forgot one in the latest onboarding --- maintainers/onboarding.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/maintainers/onboarding.md b/maintainers/onboarding.md index d7491db16..311cc64e6 100644 --- a/maintainers/onboarding.md +++ b/maintainers/onboarding.md @@ -3,5 +3,9 @@ - https://github.com/NixOS/nixos-homepage/ - https://github.com/orgs/NixOS/teams/nix-team -- Matrix room +- Matrix rooms + - [private] Nix maintainer team + - Nix ∪ Lix devs (also private) + - any open security issues if present and needed + - Team member should subscribe to notifications for the [Nix development category on Discourse](https://discourse.nixos.org/c/dev/nix/50) From 349d2c58e579ddd2e423a8a111d4e9cb102e7a1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 15 Jul 2025 18:21:29 +0200 Subject: [PATCH 221/382] Use WAL mode for SQLite cache databases With "truncate" mode, if we try to write to the database while another process has an active write transaction, we'll block until the other transaction finishes. This is a problem for the evaluation cache in particular, since it uses long-running transactions. WAL mode does not have this issue: it just returns "busy" right away, so Nix will print error (ignored): SQLite database '/home/eelco/.cache/nix/eval-cache-v5/...' is busy and stop trying to write to the evaluation cache. (This was the intended/original behaviour, see AttrDb::doSQLite().) --- src/libstore/sqlite.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index dd9f10422..41c9f9e7d 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -99,7 +99,7 @@ SQLite::~SQLite() void SQLite::isCache() { exec("pragma synchronous = off"); - exec("pragma main.journal_mode = truncate"); + exec("pragma main.journal_mode = wal"); } void SQLite::exec(const std::string & stmt) From 4ab8ff5b4ce538bde5c51d2225c3747316fcf304 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 22 Jul 2025 11:47:52 +0200 Subject: [PATCH 222/382] SQLite: Use std::filesystem::path --- src/libexpr/eval-cache.cc | 4 ++-- src/libstore/include/nix/store/sqlite.hh | 3 ++- src/libstore/local-store.cc | 3 +-- src/libstore/sqlite.cc | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index dfb1b1a7e..4115196c5 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -69,10 +69,10 @@ struct AttrDb { auto state(_state->lock()); - Path cacheDir = getCacheDir() + "/eval-cache-v5"; + auto cacheDir = std::filesystem::path(getCacheDir()) / "eval-cache-v5"; createDirs(cacheDir); - Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite"; + auto dbPath = cacheDir / (fingerprint.to_string(HashFormat::Base16, false) + ".sqlite"); state->db = SQLite(dbPath); state->db.isCache(); diff --git a/src/libstore/include/nix/store/sqlite.hh b/src/libstore/include/nix/store/sqlite.hh index e6d8a818a..3495c0bd1 100644 --- a/src/libstore/include/nix/store/sqlite.hh +++ b/src/libstore/include/nix/store/sqlite.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include #include #include @@ -41,7 +42,7 @@ struct SQLite SQLite() {} - SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal); + SQLite(const std::filesystem::path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal); SQLite(const SQLite & from) = delete; SQLite & operator=(const SQLite & from) = delete; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 81768e4eb..d8e103093 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -483,12 +483,11 @@ void LocalStore::openDB(State & state, bool create) throw SysError("Nix database directory '%1%' is not writable", dbDir); /* Open the Nix database. */ - std::string dbPath = dbDir + "/db.sqlite"; auto & db(state.db); auto openMode = config->readOnly ? SQLiteOpenMode::Immutable : create ? SQLiteOpenMode::Normal : SQLiteOpenMode::NoCreate; - state.db = SQLite(dbPath, openMode); + state.db = SQLite(std::filesystem::path(dbDir) / "db.sqlite", openMode); #ifdef __CYGWIN__ /* The cygwin version of sqlite3 has a patch which calls diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 41c9f9e7d..56a69470a 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -58,7 +58,7 @@ static void traceSQL(void * x, const char * sql) notice("SQL<[%1%]>", sql); }; -SQLite::SQLite(const Path & path, SQLiteOpenMode mode) +SQLite::SQLite(const std::filesystem::path & path, SQLiteOpenMode mode) { // useSQLiteWAL also indicates what virtual file system we need. Using // `unix-dotfile` is needed on NFS file systems and on Windows' Subsystem @@ -68,7 +68,7 @@ SQLite::SQLite(const Path & path, SQLiteOpenMode mode) int flags = immutable ? SQLITE_OPEN_READONLY : SQLITE_OPEN_READWRITE; if (mode == SQLiteOpenMode::Normal) flags |= SQLITE_OPEN_CREATE; - auto uri = "file:" + percentEncode(path) + "?immutable=" + (immutable ? "1" : "0"); + auto uri = "file:" + percentEncode(path.string()) + "?immutable=" + (immutable ? "1" : "0"); int ret = sqlite3_open_v2(uri.c_str(), &db, SQLITE_OPEN_URI | flags, vfs); if (ret != SQLITE_OK) { const char * err = sqlite3_errstr(ret); From 0df147b145f787377c4a883a9dbb1ad28c7405e3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 1 Aug 2025 16:40:37 +0200 Subject: [PATCH 223/382] Bump the version of the SQLite caches This avoids problems with older versions of Nix that don't put the caches in WAL mode. That's generally not a problem, until you do something like nix build --print-out-paths ... | cachix which deadlocks because cachix tries to switch the caches to truncate mode, which requires exclusive access. But the first process cannot make progress because the cachix process isn't reading from the pipe. --- src/libexpr/eval-cache.cc | 2 +- src/libfetchers/cache.cc | 2 +- src/libstore/nar-info-disk-cache.cc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 4115196c5..3ec344c41 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -69,7 +69,7 @@ struct AttrDb { auto state(_state->lock()); - auto cacheDir = std::filesystem::path(getCacheDir()) / "eval-cache-v5"; + auto cacheDir = std::filesystem::path(getCacheDir()) / "eval-cache-v6"; createDirs(cacheDir); auto dbPath = cacheDir / (fingerprint.to_string(HashFormat::Base16, false) + ".sqlite"); diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 85fd94590..85a33e472 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -37,7 +37,7 @@ struct CacheImpl : Cache { auto state(_state.lock()); - auto dbPath = getCacheDir() + "/fetcher-cache-v3.sqlite"; + auto dbPath = getCacheDir() + "/fetcher-cache-v4.sqlite"; createDirs(dirOf(dbPath)); state->db = SQLite(dbPath); diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 0350c874a..69d8d2e14 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -86,7 +86,7 @@ public: Sync _state; - NarInfoDiskCacheImpl(Path dbPath = getCacheDir() + "/binary-cache-v6.sqlite") + NarInfoDiskCacheImpl(Path dbPath = getCacheDir() + "/binary-cache-v7.sqlite") { auto state(_state.lock()); From 4ab579b4692fd812c3f39f810a9da8fbb474c406 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 19 Aug 2025 17:42:05 -0400 Subject: [PATCH 224/382] Remove constructor from `ChrootPath` I rather use designated initializers. --- .../unix/build/chroot-derivation-builder.cc | 2 +- .../unix/build/darwin-derivation-builder.cc | 2 +- src/libstore/unix/build/derivation-builder.cc | 20 +++++++------------ 3 files changed, 9 insertions(+), 15 deletions(-) diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc index 669e3ffb7..887bb47f0 100644 --- a/src/libstore/unix/build/chroot-derivation-builder.cc +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -135,7 +135,7 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl for (auto & i : inputPaths) { auto p = store.printStorePath(i); - pathsInChroot.insert_or_assign(p, store.toRealPath(p)); + pathsInChroot.insert_or_assign(p, ChrootPath{.source = store.toRealPath(p)}); } /* If we're repairing, checking or rebuilding part of a diff --git a/src/libstore/unix/build/darwin-derivation-builder.cc b/src/libstore/unix/build/darwin-derivation-builder.cc index 5889ecf8f..21b3c6cb9 100644 --- a/src/libstore/unix/build/darwin-derivation-builder.cc +++ b/src/libstore/unix/build/darwin-derivation-builder.cc @@ -69,7 +69,7 @@ struct DarwinDerivationBuilder : DerivationBuilderImpl /* Add all our input paths to the chroot */ for (auto & i : inputPaths) { auto p = store.printStorePath(i); - pathsInChroot.insert_or_assign(p, p); + pathsInChroot.insert_or_assign(p, ChrootPath{.source = p}); } /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 3ea208924..7c326371f 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -112,13 +112,7 @@ protected: struct ChrootPath { Path source; - bool optional; - - ChrootPath(Path source = "", bool optional = false) - : source(source) - , optional(optional) - { - } + bool optional = false; }; typedef std::map PathsInChroot; // maps target path to source path @@ -886,14 +880,14 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() } size_t p = i.find('='); if (p == std::string::npos) - pathsInChroot[i] = {i, optional}; + pathsInChroot[i] = {.source = i, .optional = optional}; else - pathsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional}; + pathsInChroot[i.substr(0, p)] = {.source = i.substr(p + 1), .optional = optional}; } if (hasPrefix(store.storeDir, tmpDirInSandbox())) { throw Error("`sandbox-build-dir` must not contain the storeDir"); } - pathsInChroot[tmpDirInSandbox()] = tmpDir; + pathsInChroot[tmpDirInSandbox()] = {.source = tmpDir}; /* Add the closure of store paths to the chroot. */ StorePathSet closure; @@ -908,7 +902,7 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() } for (auto & i : closure) { auto p = store.printStorePath(i); - pathsInChroot.insert_or_assign(p, p); + pathsInChroot.insert_or_assign(p, ChrootPath{.source = p}); } PathSet allowedPaths = settings.allowedImpureHostPrefixes; @@ -964,9 +958,9 @@ DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() } else { auto p = line.find('='); if (p == std::string::npos) - pathsInChroot[line] = line; + pathsInChroot[line] = {.source = line}; else - pathsInChroot[line.substr(0, p)] = line.substr(p + 1); + pathsInChroot[line.substr(0, p)] = {.source = line.substr(p + 1)}; } } } From d53c7b816bc8d4b3a47a7dc661895fa794b8553f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 19 Aug 2025 17:45:18 -0400 Subject: [PATCH 225/382] Push `#include` down to `.cc` file That is where it should be. --- src/libstore/unix/build/derivation-builder.cc | 1 + src/libstore/unix/include/nix/store/build/derivation-builder.hh | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 7c326371f..ce335ab19 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -15,6 +15,7 @@ #include "nix/store/posix-fs-canonicalise.hh" #include "nix/util/posix-source-accessor.hh" #include "nix/store/restricted-store.hh" +#include "nix/store/user-lock.hh" #include diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/unix/include/nix/store/build/derivation-builder.hh index 8a36a6a8f..a7ebb6c84 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/unix/include/nix/store/build/derivation-builder.hh @@ -8,7 +8,6 @@ #include "nix/store/parsed-derivations.hh" #include "nix/util/processes.hh" #include "nix/store/restricted-store.hh" -#include "nix/store/user-lock.hh" namespace nix { From 8463fef161ff06a19a5b593fab7f73d86c45a200 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 19 Aug 2025 17:46:26 -0400 Subject: [PATCH 226/382] Expose `ChrootPath`, `PathsInChroot` Will want these for settings in a moment. --- src/libstore/unix/build/derivation-builder.cc | 13 +------------ .../include/nix/store/build/derivation-builder.hh | 11 +++++++++++ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index ce335ab19..932696029 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -107,17 +107,6 @@ protected: */ const DerivationType derivationType; - /** - * Stuff we need to pass to initChild(). - */ - struct ChrootPath - { - Path source; - bool optional = false; - }; - - typedef std::map PathsInChroot; // maps target path to source path - typedef StringMap Environment; Environment env; @@ -865,7 +854,7 @@ void DerivationBuilderImpl::startBuilder() processSandboxSetupMessages(); } -DerivationBuilderImpl::PathsInChroot DerivationBuilderImpl::getPathsInSandbox() +PathsInChroot DerivationBuilderImpl::getPathsInSandbox() { PathsInChroot pathsInChroot; diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/unix/include/nix/store/build/derivation-builder.hh index a7ebb6c84..1d9b44163 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/unix/include/nix/store/build/derivation-builder.hh @@ -11,6 +11,17 @@ namespace nix { +/** + * Stuff we need to pass to initChild(). + */ +struct ChrootPath +{ + Path source; + bool optional = false; +}; + +typedef std::map PathsInChroot; // maps target path to source path + /** * Parameters by (mostly) `const` reference for `DerivationBuilder`. */ From bce29ab2cf744cce2f0a33d21ac6102f32c1dd0a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 19 Aug 2025 17:48:29 -0400 Subject: [PATCH 227/382] Move header outside Unix-only portion We'll neeed some definitions elsewhere --- .../{unix => }/include/nix/store/build/derivation-builder.hh | 2 ++ src/libstore/include/nix/store/meson.build | 1 + src/libstore/unix/include/nix/store/meson.build | 1 - 3 files changed, 3 insertions(+), 1 deletion(-) rename src/libstore/{unix => }/include/nix/store/build/derivation-builder.hh (98%) diff --git a/src/libstore/unix/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh similarity index 98% rename from src/libstore/unix/include/nix/store/build/derivation-builder.hh rename to src/libstore/include/nix/store/build/derivation-builder.hh index 1d9b44163..462352c76 100644 --- a/src/libstore/unix/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -188,7 +188,9 @@ struct DerivationBuilder : RestrictionContext virtual void killSandbox(bool getStats) = 0; }; +#ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows std::unique_ptr makeDerivationBuilder( LocalStore & store, std::unique_ptr miscMethods, DerivationBuilderParams params); +#endif } // namespace nix diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index e41a7da4d..cba5d9ca5 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -12,6 +12,7 @@ config_pub_h = configure_file( headers = [ config_pub_h ] + files( 'binary-cache-store.hh', 'build-result.hh', + 'build/derivation-builder.hh', 'build/derivation-building-goal.hh', 'build/derivation-building-misc.hh', 'build/derivation-goal.hh', diff --git a/src/libstore/unix/include/nix/store/meson.build b/src/libstore/unix/include/nix/store/meson.build index 7cf973223..bdc4b2f20 100644 --- a/src/libstore/unix/include/nix/store/meson.build +++ b/src/libstore/unix/include/nix/store/meson.build @@ -2,7 +2,6 @@ include_dirs += include_directories('../..') headers += files( 'build/child.hh', - 'build/derivation-builder.hh', 'build/hook-instance.hh', 'user-lock.hh', ) From 52212635db147cb2f09d8e39e873abe00c525371 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 19 Aug 2025 18:25:26 -0400 Subject: [PATCH 228/382] No more `globals.hh` in headers This is needed to rearrange include order, but I also think it is a good thing anyways, as we seek to reduce the use of global settings variables over time. --- src/libcmd/common-eval-args.cc | 1 + src/libexpr/eval-cache.cc | 1 + src/libexpr/eval.cc | 1 + src/libexpr/primops.cc | 1 + src/libexpr/primops/context.cc | 1 + src/libexpr/primops/fetchClosure.cc | 1 + src/libfetchers-tests/git.cc | 1 + src/libfetchers/cache.cc | 1 + .../include/nix/store/tests/libstore.hh | 1 + .../build/derivation-building-goal.cc | 1 + src/libstore/build/derivation-goal.cc | 2 +- .../build/drv-output-substitution-goal.cc | 1 + src/libstore/build/goal.cc | 1 + src/libstore/build/substitution-goal.cc | 2 ++ src/libstore/build/worker.cc | 1 + src/libstore/builtins/fetchurl.cc | 1 + src/libstore/daemon.cc | 1 + src/libstore/derivation-options.cc | 1 + src/libstore/include/nix/store/globals.hh | 1 + .../include/nix/store/local-fs-store.hh | 20 +++++++++++++++++-- src/libstore/include/nix/store/local-store.hh | 12 ++++++++++- .../include/nix/store/restricted-store.hh | 7 +++++-- src/libstore/include/nix/store/store-api.hh | 13 ++++++++++-- .../include/nix/store/store-dir-config.hh | 1 - src/libstore/include/nix/store/store-open.hh | 9 ++++++--- src/libstore/legacy-ssh-store.cc | 1 + src/libstore/local-fs-store.cc | 10 ++++++++++ src/libstore/local-store.cc | 5 +++++ src/libstore/profiles.cc | 1 + src/libstore/restricted-store.cc | 1 + src/libstore/store-api.cc | 5 +++++ src/libstore/store-registration.cc | 6 ++++++ src/libstore/uds-remote-store.cc | 1 + src/libstore/unix/build/derivation-builder.cc | 1 + src/nix/build-remote/build-remote.cc | 1 + src/nix/bundle.cc | 1 + src/nix/config-check.cc | 1 + src/nix/develop.cc | 1 + src/nix/env.cc | 1 + src/nix/flake.cc | 5 ++++- src/nix/formatter.cc | 2 ++ src/nix/log.cc | 1 + src/nix/nix-store/nix-store.cc | 2 ++ src/nix/prefetch.cc | 1 + src/nix/run.cc | 2 ++ src/nix/upgrade-nix.cc | 1 + 46 files changed, 120 insertions(+), 13 deletions(-) diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 2e6ca4344..f7e086c16 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -15,6 +15,7 @@ #include "nix/fetchers/fetch-to-store.hh" #include "nix/cmd/compatibility-settings.hh" #include "nix/expr/eval-settings.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index dfb1b1a7e..8faa73028 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -4,6 +4,7 @@ #include "nix/expr/eval.hh" #include "nix/expr/eval-inline.hh" #include "nix/store/store-api.hh" +#include "nix/store/globals.hh" // Need specialization involving `SymbolStr` just in this one module. #include "nix/util/strings-inline.hh" diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 293b05953..f0b199946 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -6,6 +6,7 @@ #include "nix/util/exit.hh" #include "nix/util/types.hh" #include "nix/util/util.hh" +#include "nix/util/environment-variables.hh" #include "nix/store/store-api.hh" #include "nix/store/derivations.hh" #include "nix/store/downstream-placeholder.hh" diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 6af179e4e..ca84f3038 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -5,6 +5,7 @@ #include "nix/expr/eval-settings.hh" #include "nix/expr/gc-small-vector.hh" #include "nix/expr/json-to-value.hh" +#include "nix/store/globals.hh" #include "nix/store/names.hh" #include "nix/store/path-references.hh" #include "nix/store/store-api.hh" diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 11b59efcd..f037fdb80 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -2,6 +2,7 @@ #include "nix/expr/eval-inline.hh" #include "nix/store/derivations.hh" #include "nix/store/store-api.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index d3b38e5a3..469459818 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -3,6 +3,7 @@ #include "nix/store/realisation.hh" #include "nix/store/make-content-addressed.hh" #include "nix/util/url.hh" +#include "nix/util/environment-variables.hh" namespace nix { diff --git a/src/libfetchers-tests/git.cc b/src/libfetchers-tests/git.cc index 83ee1a1a2..af987e260 100644 --- a/src/libfetchers-tests/git.cc +++ b/src/libfetchers-tests/git.cc @@ -1,4 +1,5 @@ #include "nix/store/store-open.hh" +#include "nix/store/globals.hh" #include "nix/fetchers/fetch-settings.hh" #include "nix/fetchers/fetchers.hh" #include "nix/fetchers/git-utils.hh" diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 85fd94590..ed4776704 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -4,6 +4,7 @@ #include "nix/store/sqlite.hh" #include "nix/util/sync.hh" #include "nix/store/store-api.hh" +#include "nix/store/globals.hh" #include diff --git a/src/libstore-test-support/include/nix/store/tests/libstore.hh b/src/libstore-test-support/include/nix/store/tests/libstore.hh index 822ec3aa8..28b29fa31 100644 --- a/src/libstore-test-support/include/nix/store/tests/libstore.hh +++ b/src/libstore-test-support/include/nix/store/tests/libstore.hh @@ -6,6 +6,7 @@ #include "nix/store/store-api.hh" #include "nix/store/store-open.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 32abde041..e0a8717a0 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -12,6 +12,7 @@ #include "nix/store/common-protocol.hh" #include "nix/store/common-protocol-impl.hh" #include "nix/store/local-store.hh" // TODO remove, along with remaining downcasts +#include "nix/store/globals.hh" #include #include diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index e8523569d..883121d94 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -11,7 +11,7 @@ #include "nix/util/compression.hh" #include "nix/store/common-protocol.hh" #include "nix/store/common-protocol-impl.hh" // Don't remove is actually needed -#include "nix/store/local-store.hh" // TODO remove, along with remaining downcasts +#include "nix/store/globals.hh" #include #include diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index 222cd8618..b6ace4784 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -4,6 +4,7 @@ #include "nix/store/build/substitution-goal.hh" #include "nix/util/callback.hh" #include "nix/store/store-open.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libstore/build/goal.cc b/src/libstore/build/goal.cc index 2e9ca5bf7..6266329e7 100644 --- a/src/libstore/build/goal.cc +++ b/src/libstore/build/goal.cc @@ -1,5 +1,6 @@ #include "nix/store/build/goal.hh" #include "nix/store/build/worker.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 3c0e96152..ab95ea4a2 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -4,6 +4,8 @@ #include "nix/store/nar-info.hh" #include "nix/util/finally.hh" #include "nix/util/signals.hh" +#include "nix/store/globals.hh" + #include namespace nix { diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 9cb36fa16..3e6e0bef0 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -10,6 +10,7 @@ # include "nix/store/build/hook-instance.hh" #endif #include "nix/util/signals.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 55add7876..519ad9428 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -1,6 +1,7 @@ #include "nix/store/builtins.hh" #include "nix/store/filetransfer.hh" #include "nix/store/store-api.hh" +#include "nix/store/globals.hh" #include "nix/util/archive.hh" #include "nix/util/compression.hh" diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 6211850cb..4f28a1e0d 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -16,6 +16,7 @@ #include "nix/util/args.hh" #include "nix/util/git.hh" #include "nix/util/logging.hh" +#include "nix/store/globals.hh" #ifndef _WIN32 // TODO need graceful async exit support on Windows? # include "nix/util/monitor-fd.hh" diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 6656a4798..b41b97f4c 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -5,6 +5,7 @@ #include "nix/store/store-api.hh" #include "nix/util/types.hh" #include "nix/util/util.hh" +#include "nix/store/globals.hh" #include #include diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index e97210892..2ac4678e7 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -11,6 +11,7 @@ #include "nix/util/environment-variables.hh" #include "nix/util/experimental-features.hh" #include "nix/util/users.hh" +#include "nix/store/build/derivation-builder.hh" #include "nix/store/config.hh" diff --git a/src/libstore/include/nix/store/local-fs-store.hh b/src/libstore/include/nix/store/local-fs-store.hh index cae50e762..84777f3d7 100644 --- a/src/libstore/include/nix/store/local-fs-store.hh +++ b/src/libstore/include/nix/store/local-fs-store.hh @@ -22,15 +22,31 @@ struct LocalFSStoreConfig : virtual StoreConfig OptionalPathSetting rootDir{this, std::nullopt, "root", "Directory prefixed to all other paths."}; +private: + + /** + * An indirection so that we don't need to refer to global settings + * in headers. + */ + static Path getDefaultStateDir(); + + /** + * An indirection so that we don't need to refer to global settings + * in headers. + */ + static Path getDefaultLogDir(); + +public: + PathSetting stateDir{ this, - rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir, + rootDir.get() ? *rootDir.get() + "/nix/var/nix" : getDefaultStateDir(), "state", "Directory where Nix stores state."}; PathSetting logDir{ this, - rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir, + rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : getDefaultLogDir(), "log", "directory where Nix stores log files."}; diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index 3d7e8301a..f7dfcb5ad 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -74,9 +74,19 @@ struct LocalStoreConfig : std::enable_shared_from_this, LocalStoreConfig(std::string_view scheme, std::string_view authority, const Params & params); +private: + + /** + * An indirection so that we don't need to refer to global settings + * in headers. + */ + bool getDefaultRequireSigs(); + +public: + Setting requireSigs{ this, - settings.requireSigs, + getDefaultRequireSigs(), "require-sigs", "Whether store paths copied into this store should have a trusted signature."}; diff --git a/src/libstore/include/nix/store/restricted-store.hh b/src/libstore/include/nix/store/restricted-store.hh index b5680da4d..8bbb2ff54 100644 --- a/src/libstore/include/nix/store/restricted-store.hh +++ b/src/libstore/include/nix/store/restricted-store.hh @@ -1,10 +1,13 @@ #pragma once ///@file -#include "nix/store/local-store.hh" +#include "nix/store/store-api.hh" namespace nix { +class LocalStore; +struct LocalStoreConfig; + /** * A restricted store has a pointer to one of these, which manages the * restrictions that are in place. @@ -55,6 +58,6 @@ struct RestrictionContext /** * Create a shared pointer to a restricted store. */ -ref makeRestrictedStore(ref config, ref next, RestrictionContext & context); +ref makeRestrictedStore(ref config, ref next, RestrictionContext & context); } // namespace nix diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 6393ccbc7..987ed4d48 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -8,7 +8,6 @@ #include "nix/util/serialise.hh" #include "nix/util/lru-cache.hh" #include "nix/util/sync.hh" -#include "nix/store/globals.hh" #include "nix/util/configuration.hh" #include "nix/store/path-info.hh" #include "nix/util/repair-flag.hh" @@ -89,9 +88,19 @@ struct StoreConfigBase : Config { using Config::Config; +private: + + /** + * An indirection so that we don't need to refer to global settings + * in headers. + */ + static Path getDefaultNixStoreDir(); + +public: + const PathSetting storeDir_{ this, - settings.nixStore, + getDefaultNixStoreDir(), "store", R"( Logical location of the Nix store, usually diff --git a/src/libstore/include/nix/store/store-dir-config.hh b/src/libstore/include/nix/store/store-dir-config.hh index 2dfd601f1..07cda5c12 100644 --- a/src/libstore/include/nix/store/store-dir-config.hh +++ b/src/libstore/include/nix/store/store-dir-config.hh @@ -3,7 +3,6 @@ #include "nix/store/path.hh" #include "nix/util/hash.hh" #include "nix/store/content-address.hh" -#include "nix/store/globals.hh" #include "nix/util/configuration.hh" #include diff --git a/src/libstore/include/nix/store/store-open.hh b/src/libstore/include/nix/store/store-open.hh index 0e8724990..ef7d81675 100644 --- a/src/libstore/include/nix/store/store-open.hh +++ b/src/libstore/include/nix/store/store-open.hh @@ -30,9 +30,12 @@ ref openStore(StoreReference && storeURI); * Opens the store at `uri`, where `uri` is in the format expected by * `StoreReference::parse` */ -ref openStore( - const std::string & uri = settings.storeUri.get(), - const StoreReference::Params & extraParams = StoreReference::Params()); +ref openStore(const std::string & uri, const StoreReference::Params & extraParams = StoreReference::Params()); + +/** + * Short-hand which opens the default store, according to global settings + */ +ref openStore(); /** * @return the default substituter stores, defined by the diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 0435cfa62..0e9ee35bf 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -13,6 +13,7 @@ #include "nix/store/derivations.hh" #include "nix/util/callback.hh" #include "nix/store/store-registration.hh" +#include "nix/store/globals.hh" namespace nix { diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index fd1fe4459..e0f07b91b 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -8,6 +8,16 @@ namespace nix { +Path LocalFSStoreConfig::getDefaultStateDir() +{ + return settings.nixStateDir; +} + +Path LocalFSStoreConfig::getDefaultLogDir() +{ + return settings.nixLogDir; +} + LocalFSStoreConfig::LocalFSStoreConfig(PathView rootDir, const Params & params) : StoreConfig(params) // Default `?root` from `rootDir` if non set diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 81768e4eb..55862477c 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -86,6 +86,11 @@ ref LocalStore::Config::openStore() const return make_ref(ref{shared_from_this()}); } +bool LocalStoreConfig::getDefaultRequireSigs() +{ + return settings.requireSigs; +} + struct LocalStore::State::Stmts { /* Some precompiled SQLite statements. */ diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 2b679e2a3..3f6fcb6ff 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -1,5 +1,6 @@ #include "nix/store/profiles.hh" #include "nix/util/signals.hh" +#include "nix/store/globals.hh" #include "nix/store/store-api.hh" #include "nix/store/local-fs-store.hh" #include "nix/util/users.hh" diff --git a/src/libstore/restricted-store.cc b/src/libstore/restricted-store.cc index 1fb139dff..e0f43ab6c 100644 --- a/src/libstore/restricted-store.cc +++ b/src/libstore/restricted-store.cc @@ -2,6 +2,7 @@ #include "nix/store/build-result.hh" #include "nix/util/callback.hh" #include "nix/store/realisation.hh" +#include "nix/store/local-store.hh" namespace nix { diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index bd5ae9284..fad79a83e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -27,6 +27,11 @@ using json = nlohmann::json; namespace nix { +Path StoreConfigBase::getDefaultNixStoreDir() +{ + return settings.nixStore; +} + StoreConfig::StoreConfig(const Params & params) : StoreConfigBase(params) , StoreDirConfig{storeDir_} diff --git a/src/libstore/store-registration.cc b/src/libstore/store-registration.cc index fd8d67437..cfaf86b1e 100644 --- a/src/libstore/store-registration.cc +++ b/src/libstore/store-registration.cc @@ -2,9 +2,15 @@ #include "nix/store/store-open.hh" #include "nix/store/local-store.hh" #include "nix/store/uds-remote-store.hh" +#include "nix/store/globals.hh" namespace nix { +ref openStore() +{ + return openStore(settings.storeUri.get()); +} + ref openStore(const std::string & uri, const Store::Config::Params & extraParams) { return openStore(StoreReference::parse(uri, extraParams)); diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index 1d3ecb415..4871b4913 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -2,6 +2,7 @@ #include "nix/util/unix-domain-socket.hh" #include "nix/store/worker-protocol.hh" #include "nix/store/store-registration.hh" +#include "nix/store/globals.hh" #include #include diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 932696029..cef2340dd 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -16,6 +16,7 @@ #include "nix/util/posix-source-accessor.hh" #include "nix/store/restricted-store.hh" #include "nix/store/user-lock.hh" +#include "nix/store/globals.hh" #include diff --git a/src/nix/build-remote/build-remote.cc b/src/nix/build-remote/build-remote.cc index 786085106..11df8cc5e 100644 --- a/src/nix/build-remote/build-remote.cc +++ b/src/nix/build-remote/build-remote.cc @@ -22,6 +22,7 @@ #include "nix/store/local-store.hh" #include "nix/cmd/legacy.hh" #include "nix/util/experimental-features.hh" +#include "nix/store/globals.hh" using namespace nix; using std::cin; diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index ed70ba47e..29960c281 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -5,6 +5,7 @@ #include "nix/store/store-api.hh" #include "nix/store/local-fs-store.hh" #include "nix/expr/eval-inline.hh" +#include "nix/store/globals.hh" namespace nix::fs { using namespace std::filesystem; diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index dc6453e27..c04943eab 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -9,6 +9,7 @@ #include "nix/store/local-fs-store.hh" #include "nix/store/worker-protocol.hh" #include "nix/util/executable-path.hh" +#include "nix/store/globals.hh" namespace nix::fs { using namespace std::filesystem; diff --git a/src/nix/develop.cc b/src/nix/develop.cc index d3381a988..f59dc5bee 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -5,6 +5,7 @@ #include "nix/main/common-args.hh" #include "nix/main/shared.hh" #include "nix/store/store-api.hh" +#include "nix/store/globals.hh" #include "nix/store/outputs-spec.hh" #include "nix/store/derivations.hh" diff --git a/src/nix/env.cc b/src/nix/env.cc index 277bd0fdd..d91ee72d7 100644 --- a/src/nix/env.cc +++ b/src/nix/env.cc @@ -6,6 +6,7 @@ #include "run.hh" #include "nix/util/strings.hh" #include "nix/util/executable-path.hh" +#include "nix/util/environment-variables.hh" using namespace nix; diff --git a/src/nix/flake.cc b/src/nix/flake.cc index a7b72c7e1..c04eab291 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1,4 +1,3 @@ -#include "flake-command.hh" #include "nix/main/common-args.hh" #include "nix/main/shared.hh" #include "nix/expr/eval.hh" @@ -17,6 +16,7 @@ #include "nix/util/users.hh" #include "nix/fetchers/fetch-to-store.hh" #include "nix/store/local-fs-store.hh" +#include "nix/store/globals.hh" #include #include @@ -24,6 +24,9 @@ #include "nix/util/strings-inline.hh" +// FIXME is this supposed to be private or not? +#include "flake-command.hh" + namespace nix::fs { using namespace std::filesystem; } diff --git a/src/nix/formatter.cc b/src/nix/formatter.cc index 212bb8d70..f5eb966d6 100644 --- a/src/nix/formatter.cc +++ b/src/nix/formatter.cc @@ -5,6 +5,8 @@ #include "nix/store/local-fs-store.hh" #include "nix/cmd/installable-derived-path.hh" #include "nix/util/environment-variables.hh" +#include "nix/store/globals.hh" + #include "run.hh" using namespace nix; diff --git a/src/nix/log.cc b/src/nix/log.cc index cabe611fa..150b4b371 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -1,6 +1,7 @@ #include "nix/cmd/command.hh" #include "nix/main/common-args.hh" #include "nix/main/shared.hh" +#include "nix/store/globals.hh" #include "nix/store/store-open.hh" #include "nix/store/log-store.hh" diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 93fe4df45..4191ea0d6 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -12,7 +12,9 @@ #include "graphml.hh" #include "nix/cmd/legacy.hh" #include "nix/util/posix-source-accessor.hh" +#include "nix/store/globals.hh" #include "nix/store/path-with-outputs.hh" + #include "man-pages.hh" #ifndef _WIN32 // TODO implement on Windows or provide allowed-to-noop interface diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index b651a4c97..b23b11d02 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -12,6 +12,7 @@ #include "nix/util/posix-source-accessor.hh" #include "nix/cmd/misc-store-flags.hh" #include "nix/util/terminal.hh" +#include "nix/util/environment-variables.hh" #include "man-pages.hh" diff --git a/src/nix/run.cc b/src/nix/run.cc index c3d416a6e..368a5ed57 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -11,6 +11,8 @@ #include "nix/util/source-accessor.hh" #include "nix/expr/eval.hh" #include "nix/util/util.hh" +#include "nix/store/globals.hh" + #include #ifdef __linux__ diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 3037d1986..f6668f6dc 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -8,6 +8,7 @@ #include "nix/expr/attr-path.hh" #include "nix/store/names.hh" #include "nix/util/executable-path.hh" +#include "nix/store/globals.hh" #include "self-exe.hh" using namespace nix; From a712445a7a48a00148d0f2b41917c03d7259dabc Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Aug 2025 12:21:42 -0400 Subject: [PATCH 229/382] Make `Settings::sandboxPaths` well-typed Parsing logic is moved from `DerivationBuilder`, where is doesn't belong, to `Settings` itself, where it does. --- src/libstore/globals.cc | 59 ++++++++++++++++++- src/libstore/include/nix/store/globals.hh | 16 ++++- src/libstore/unix/build/derivation-builder.cc | 18 +----- 3 files changed, 73 insertions(+), 20 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 966d37090..612e79ab0 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -86,13 +86,22 @@ Settings::Settings() } #if (defined(__linux__) || defined(__FreeBSD__)) && defined(SANDBOX_SHELL) - sandboxPaths = tokenizeString("/bin/sh=" SANDBOX_SHELL); + sandboxPaths = {{"/bin/sh", {.source = SANDBOX_SHELL}}}; #endif /* chroot-like behavior from Apple's sandbox */ #ifdef __APPLE__ - sandboxPaths = tokenizeString( - "/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib"); + for (PathView p : { + "/System/Library/Frameworks", + "/System/Library/PrivateFrameworks", + "/bin/sh", + "/bin/bash", + "/private/tmp", + "/private/var/tmp", + "/usr/lib", + }) { + sandboxPaths.get().insert_or_assign(std::string{p}, ChrootPath{.source = std::string{p}}); + } allowedImpureHostPrefixes = tokenizeString("/System/Library /usr/lib /dev /bin/sh"); #endif } @@ -317,6 +326,42 @@ void BaseSetting::convertToArg(Args & args, const std::string & cat }); } +NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(ChrootPath, source, optional) + +template<> +PathsInChroot BaseSetting::parse(const std::string & str) const +{ + PathsInChroot pathsInChroot; + for (auto i : tokenizeString(str)) { + if (i.empty()) + continue; + bool optional = false; + if (i[i.size() - 1] == '?') { + optional = true; + i.pop_back(); + } + size_t p = i.find('='); + if (p == std::string::npos) + pathsInChroot[i] = {.source = i, .optional = optional}; + else + pathsInChroot[i.substr(0, p)] = {.source = i.substr(p + 1), .optional = optional}; + } + return pathsInChroot; +} + +template<> +std::string BaseSetting::to_string() const +{ + std::vector accum; + for (auto & [name, cp] : value) { + std::string s = name == cp.source ? name : name + "=" + cp.source; + if (cp.optional) + s += "?"; + accum.push_back(std::move(s)); + } + return concatStringsSep(" ", accum); +} + unsigned int MaxBuildJobsSetting::parse(const std::string & str) const { if (str == "auto") @@ -329,6 +374,14 @@ unsigned int MaxBuildJobsSetting::parse(const std::string & str) const } } +template<> +void BaseSetting::appendOrSet(PathsInChroot newValue, bool append) +{ + if (!append) + value.clear(); + value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end())); +} + static void preloadNSS() { /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 2ac4678e7..2cd92467c 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -24,6 +24,20 @@ SandboxMode BaseSetting::parse(const std::string & str) const; template<> std::string BaseSetting::to_string() const; +template<> +PathsInChroot BaseSetting::parse(const std::string & str) const; +template<> +std::string BaseSetting::to_string() const; + +template<> +struct BaseSetting::trait +{ + static constexpr bool appendable = true; +}; + +template<> +void BaseSetting::appendOrSet(PathsInChroot newValue, bool append); + struct MaxBuildJobsSetting : public BaseSetting { MaxBuildJobsSetting( @@ -698,7 +712,7 @@ public: )", {"build-use-chroot", "build-use-sandbox"}}; - Setting sandboxPaths{ + Setting sandboxPaths{ this, {}, "sandbox-paths", diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index cef2340dd..f6546ec62 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -857,24 +857,10 @@ void DerivationBuilderImpl::startBuilder() PathsInChroot DerivationBuilderImpl::getPathsInSandbox() { - PathsInChroot pathsInChroot; - /* Allow a user-configurable set of directories from the host file system. */ - for (auto i : settings.sandboxPaths.get()) { - if (i.empty()) - continue; - bool optional = false; - if (i[i.size() - 1] == '?') { - optional = true; - i.pop_back(); - } - size_t p = i.find('='); - if (p == std::string::npos) - pathsInChroot[i] = {.source = i, .optional = optional}; - else - pathsInChroot[i.substr(0, p)] = {.source = i.substr(p + 1), .optional = optional}; - } + PathsInChroot pathsInChroot = settings.sandboxPaths.get(); + if (hasPrefix(store.storeDir, tmpDirInSandbox())) { throw Error("`sandbox-build-dir` must not contain the storeDir"); } From 2767ae35d9824e861df3211f4153bff7d3690023 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 15 Aug 2025 16:50:46 -0400 Subject: [PATCH 230/382] Deduplicate "export reference graph" logic a bit The first part on `drvOptions.exportReferencesGraph` is the same in both cases. It is just how the information is finally rendered that is different. --- src/libstore/derivation-options.cc | 18 ++++++++++++++++++ .../include/nix/store/derivation-options.hh | 18 ++++++++++++++++++ src/libstore/parsed-derivations.cc | 5 +---- src/libstore/unix/build/derivation-builder.cc | 10 ++-------- 4 files changed, 39 insertions(+), 12 deletions(-) diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index b41b97f4c..1acb9dc03 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -256,6 +256,24 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt }; } +std::map +DerivationOptions::getParsedExportReferencesGraph(const StoreDirConfig & store) const +{ + std::map res; + + for (auto & [fileName, ss] : exportReferencesGraph) { + StorePathSet storePaths; + for (auto & storePathS : ss) { + if (!store.isInStore(storePathS)) + throw BuildError("'exportReferencesGraph' contains a non-store path '%1%'", storePathS); + storePaths.insert(store.toStorePath(storePathS).first); + } + res.insert_or_assign(fileName, storePaths); + } + + return res; +} + StringSet DerivationOptions::getRequiredSystemFeatures(const BasicDerivation & drv) const { // FIXME: cache this? diff --git a/src/libstore/include/nix/store/derivation-options.hh b/src/libstore/include/nix/store/derivation-options.hh index 98517e904..88694f730 100644 --- a/src/libstore/include/nix/store/derivation-options.hh +++ b/src/libstore/include/nix/store/derivation-options.hh @@ -8,10 +8,12 @@ #include "nix/util/types.hh" #include "nix/util/json-impls.hh" +#include "nix/store/path.hh" namespace nix { class Store; +struct StoreDirConfig; struct BasicDerivation; struct StructuredAttrs; @@ -116,6 +118,22 @@ struct DerivationOptions */ std::map exportReferencesGraph; + /** + * Once a derivations is resolved, the strings in in + * `exportReferencesGraph` should all be store paths (with possible + * suffix paths, but those are discarded). + * + * @return The parsed path set for for each key in the map. + * + * @todo Ideally, `exportReferencesGraph` would just store + * `StorePath`s for this, but we can't just do that, because for CA + * derivations they is actually in general `DerivedPath`s (via + * placeholder strings) until the derivation is resolved and exact + * inputs store paths are known. We can use better types for that + * too, but that is a longer project. + */ + std::map getParsedExportReferencesGraph(const StoreDirConfig & store) const; + /** * env: __sandboxProfile * diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 1006bbc0a..9e8d44d6e 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -113,10 +113,7 @@ nlohmann::json StructuredAttrs::prepareStructuredAttrs( json["outputs"] = std::move(outputsJson); /* Handle exportReferencesGraph. */ - for (auto & [key, inputPaths] : drvOptions.exportReferencesGraph) { - StorePathSet storePaths; - for (auto & p : inputPaths) - storePaths.insert(store.toStorePath(p).first); + for (auto & [key, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { json[key] = pathInfoToJSON(store, store.exportReferences(storePaths, storePaths)); } diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index f6546ec62..76468cca0 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -785,17 +785,11 @@ void DerivationBuilderImpl::startBuilder() /* Handle exportReferencesGraph(), if set. */ if (!drv.structuredAttrs) { - for (auto & [fileName, ss] : drvOptions.exportReferencesGraph) { - StorePathSet storePathSet; - for (auto & storePathS : ss) { - if (!store.isInStore(storePathS)) - throw BuildError("'exportReferencesGraph' contains a non-store path '%1%'", storePathS); - storePathSet.insert(store.toStorePath(storePathS).first); - } + for (auto & [fileName, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { /* Write closure info to . */ writeFile( tmpDir + "/" + fileName, - store.makeValidityRegistration(store.exportReferences(storePathSet, inputPaths), false, false)); + store.makeValidityRegistration(store.exportReferences(storePaths, inputPaths), false, false)); } } From 92b10cf3f5061553b4f0400a64c661d57c6ef674 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 18 Aug 2025 15:26:54 -0400 Subject: [PATCH 231/382] `DerivationBuilderImpl::writeStructuredAttrs` remove a rewrite As much as I prefer rewriting the parsed rather than unparsed JSON for elegance, this gets in the way of the separation of concerns that I am trying to do. As a practical matter, any rewriting that this did will also be done by the second round of rewriting that remains below, so removing this code should have no effect. --- src/libstore/unix/build/derivation-builder.cc | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 76468cca0..0b6dbe670 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1106,14 +1106,6 @@ void DerivationBuilderImpl::writeStructuredAttrs() { if (drv.structuredAttrs) { auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); - nlohmann::json rewritten; - for (auto & [i, v] : json["outputs"].get()) { - /* The placeholder must have a rewrite, so we use it to cover both the - cases where we know or don't know the output path ahead of time. */ - rewritten[i] = rewriteStrings((std::string) v, inputRewrites); - } - - json["outputs"] = rewritten; auto jsonSh = StructuredAttrs::writeShell(json); From e3c74f5a134061f44b91fccaf6ec87786c790bae Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 18 Aug 2025 16:37:06 -0400 Subject: [PATCH 232/382] Desugar structured attrs, "export reference graph" outside `DerivationBuilder` I think this is a better separation of concerns. `DerivationBuilder` doesn't need to to the final, query-heavy details about how these things are constructed. It just operates on the level of "simple, stupid" files and environment variables. --- .../build/derivation-building-goal.cc | 38 +++++++++++++ .../nix/store/build/derivation-builder.hh | 35 +++++++++++- src/libstore/unix/build/derivation-builder.cc | 54 ++++++++----------- 3 files changed, 93 insertions(+), 34 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index e0a8717a0..dae13c728 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -677,6 +677,42 @@ Goal::Co DerivationBuildingGoal::tryToBuild() auto * localStoreP = dynamic_cast(&worker.store); assert(localStoreP); + decltype(DerivationBuilderParams::extraEnv) extraEnv; + decltype(DerivationBuilderParams::extraFiles) extraFiles; + + try { + if (drv->structuredAttrs) { + auto json = drv->structuredAttrs->prepareStructuredAttrs( + worker.store, *drvOptions, inputPaths, drv->outputs); + + extraEnv.insert_or_assign( + "NIX_ATTRS_SH_FILE", + DerivationBuilderParams::EnvEntry{ + .nameOfPassAsFile = ".attrs.sh", + .value = StructuredAttrs::writeShell(json), + }); + extraEnv.insert_or_assign( + "NIX_ATTRS_JSON_FILE", + DerivationBuilderParams::EnvEntry{ + .nameOfPassAsFile = ".attrs.json", + .value = json.dump(), + }); + } else { + /* Handle exportReferencesGraph(), if set. */ + for (auto & [fileName, storePaths] : drvOptions->getParsedExportReferencesGraph(worker.store)) { + /* Write closure info to . */ + extraFiles.insert_or_assign( + fileName, + worker.store.makeValidityRegistration( + worker.store.exportReferences(storePaths, inputPaths), false, false)); + } + } + } catch (BuildError & e) { + outputLocks.unlock(); + worker.permanentFailure = true; + co_return done(BuildResult::InputRejected, {}, std::move(e)); + } + /* If we have to wait and retry (see below), then `builder` will already be created, so we don't need to create it again. */ builder = makeDerivationBuilder( @@ -690,6 +726,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() *drvOptions, inputPaths, initialOutputs, + std::move(extraEnv), + std::move(extraFiles), }); } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 462352c76..da8f74a09 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -59,6 +59,35 @@ struct DerivationBuilderParams const BuildMode & buildMode; + struct EnvEntry + { + /** + * Actually, this should be passed as a file, but with a custom + * name (rather than hash-derived name for usual "pass as file"). + */ + std::optional nameOfPassAsFile; + + /** + * String value of env var, or contents of the file + */ + std::string value; + }; + + /** + * Extra environment variables to additionally set, possibly + * indirectly via a file. + * + * This is used by the caller to desugar the "structured attrs" + * mechanism, so `DerivationBuilder` doesn't need to know about it. + */ + std::map> extraEnv; + + /** + * Inserted in the temp dir, but no file names placed in env, unlike + * `EnvEntry::nameOfPassAsFile` above. + */ + StringMap extraFiles; + DerivationBuilderParams( const StorePath & drvPath, const BuildMode & buildMode, @@ -66,7 +95,9 @@ struct DerivationBuilderParams const Derivation & drv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths, - std::map & initialOutputs) + std::map & initialOutputs, + std::map> extraEnv, + StringMap extraFiles) : drvPath{drvPath} , buildResult{buildResult} , drv{drv} @@ -74,6 +105,8 @@ struct DerivationBuilderParams , inputPaths{inputPaths} , initialOutputs{initialOutputs} , buildMode{buildMode} + , extraEnv{std::move(extraEnv)} + , extraFiles{std::move(extraFiles)} { } diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 0b6dbe670..2c735a7c6 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -273,11 +273,6 @@ protected: private: - /** - * Write a JSON file containing the derivation attributes. - */ - void writeStructuredAttrs(); - /** * Start an in-process nix daemon thread for recursive-nix. */ @@ -781,18 +776,6 @@ void DerivationBuilderImpl::startBuilder() /* Construct the environment passed to the builder. */ initEnv(); - writeStructuredAttrs(); - - /* Handle exportReferencesGraph(), if set. */ - if (!drv.structuredAttrs) { - for (auto & [fileName, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { - /* Write closure info to . */ - writeFile( - tmpDir + "/" + fileName, - store.makeValidityRegistration(store.exportReferences(storePaths, inputPaths), false, false)); - } - } - prepareSandbox(); if (needsHashRewrite() && pathExists(homeDir)) @@ -1033,6 +1016,11 @@ void DerivationBuilderImpl::initEnv() /* The maximum number of cores to utilize for parallel building. */ env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); + auto writeEnv = [&](const std::string & envVarName, const std::string & fileName, const std::string & value) { + writeBuilderFile(fileName, rewriteStrings(value, inputRewrites)); + env[envVarName] = tmpDirInSandbox() + "/" + fileName; + }; + /* In non-structured mode, set all bindings either directory in the environment or via a file, as specified by `DerivationOptions::passAsFile`. */ @@ -1043,12 +1031,26 @@ void DerivationBuilderImpl::initEnv() } else { auto hash = hashString(HashAlgorithm::SHA256, i.first); std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false); - writeBuilderFile(fn, rewriteStrings(i.second, inputRewrites)); - env[i.first + "Path"] = tmpDirInSandbox() + "/" + fn; + writeEnv(i.first + "Path", fn, i.second); } } } + /* Do this with or without structured attrs --- actually, this is + used to desugar structured attrs. */ + for (const auto & [name, info] : extraEnv) { + if (info.nameOfPassAsFile) { + writeEnv(name, *info.nameOfPassAsFile, info.value); + } else { + env[name] = info.value; + } + } + + /* Add extra files, analogous to `extraEnv` */ + for (const auto & [fileName, value] : extraFiles) { + writeBuilderFile(fileName, value); + } + /* For convenience, set an environment pointing to the top build directory. */ env["NIX_BUILD_TOP"] = tmpDirInSandbox(); @@ -1102,20 +1104,6 @@ void DerivationBuilderImpl::initEnv() env["TERM"] = "xterm-256color"; } -void DerivationBuilderImpl::writeStructuredAttrs() -{ - if (drv.structuredAttrs) { - auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); - - auto jsonSh = StructuredAttrs::writeShell(json); - - writeBuilderFile(".attrs.sh", rewriteStrings(jsonSh, inputRewrites)); - env["NIX_ATTRS_SH_FILE"] = tmpDirInSandbox() + "/.attrs.sh"; - writeBuilderFile(".attrs.json", rewriteStrings(json.dump(), inputRewrites)); - env["NIX_ATTRS_JSON_FILE"] = tmpDirInSandbox() + "/.attrs.json"; - } -} - void DerivationBuilderImpl::startDaemon() { experimentalFeatureSettings.require(Xp::RecursiveNix); From 1d3ddb21faf95ee72af3126bc475f706839a8669 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 18 Aug 2025 18:08:35 -0400 Subject: [PATCH 233/382] Further consolidate environment variable processing outside `DerivationBuilder` Now, `DerivationBuilder` only concerns itself with `finalEnv` and `extraFiles`, in straightforward unconditional code. All the fancy desugaring logic is consolidated in `DerivationBuildingGoal`. We should better share the pulled-out logic with `nix-shell`/`nix develop`, which would fill in some missing features, arguably fixing bugs. --- .../build/derivation-building-goal.cc | 30 ++++++++++++++--- .../nix/store/build/derivation-builder.hh | 8 ++--- src/libstore/unix/build/derivation-builder.cc | 33 +++++-------------- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index dae13c728..965ffa525 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -677,7 +677,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() auto * localStoreP = dynamic_cast(&worker.store); assert(localStoreP); - decltype(DerivationBuilderParams::extraEnv) extraEnv; + decltype(DerivationBuilderParams::finalEnv) finalEnv; decltype(DerivationBuilderParams::extraFiles) extraFiles; try { @@ -685,19 +685,41 @@ Goal::Co DerivationBuildingGoal::tryToBuild() auto json = drv->structuredAttrs->prepareStructuredAttrs( worker.store, *drvOptions, inputPaths, drv->outputs); - extraEnv.insert_or_assign( + finalEnv.insert_or_assign( "NIX_ATTRS_SH_FILE", DerivationBuilderParams::EnvEntry{ .nameOfPassAsFile = ".attrs.sh", .value = StructuredAttrs::writeShell(json), }); - extraEnv.insert_or_assign( + finalEnv.insert_or_assign( "NIX_ATTRS_JSON_FILE", DerivationBuilderParams::EnvEntry{ .nameOfPassAsFile = ".attrs.json", .value = json.dump(), }); } else { + /* In non-structured mode, set all bindings either directory in the + environment or via a file, as specified by + `DerivationOptions::passAsFile`. */ + for (auto & [envName, envValue] : drv->env) { + if (drvOptions->passAsFile.find(envName) == drvOptions->passAsFile.end()) { + finalEnv.insert_or_assign( + envName, + DerivationBuilderParams::EnvEntry{ + .nameOfPassAsFile = std::nullopt, + .value = envValue, + }); + } else { + auto hash = hashString(HashAlgorithm::SHA256, envName); + finalEnv.insert_or_assign( + envName + "Path", + DerivationBuilderParams::EnvEntry{ + .nameOfPassAsFile = ".attr-" + hash.to_string(HashFormat::Nix32, false), + .value = envValue, + }); + } + } + /* Handle exportReferencesGraph(), if set. */ for (auto & [fileName, storePaths] : drvOptions->getParsedExportReferencesGraph(worker.store)) { /* Write closure info to . */ @@ -726,7 +748,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() *drvOptions, inputPaths, initialOutputs, - std::move(extraEnv), + std::move(finalEnv), std::move(extraFiles), }); } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index da8f74a09..301283cdc 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -74,13 +74,13 @@ struct DerivationBuilderParams }; /** - * Extra environment variables to additionally set, possibly + * The final environment variables to additionally set, possibly * indirectly via a file. * * This is used by the caller to desugar the "structured attrs" * mechanism, so `DerivationBuilder` doesn't need to know about it. */ - std::map> extraEnv; + std::map> finalEnv; /** * Inserted in the temp dir, but no file names placed in env, unlike @@ -96,7 +96,7 @@ struct DerivationBuilderParams const DerivationOptions & drvOptions, const StorePathSet & inputPaths, std::map & initialOutputs, - std::map> extraEnv, + std::map> finalEnv, StringMap extraFiles) : drvPath{drvPath} , buildResult{buildResult} @@ -105,7 +105,7 @@ struct DerivationBuilderParams , inputPaths{inputPaths} , initialOutputs{initialOutputs} , buildMode{buildMode} - , extraEnv{std::move(extraEnv)} + , finalEnv{std::move(finalEnv)} , extraFiles{std::move(extraFiles)} { } diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 2c735a7c6..62af9cd85 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1016,37 +1016,20 @@ void DerivationBuilderImpl::initEnv() /* The maximum number of cores to utilize for parallel building. */ env["NIX_BUILD_CORES"] = fmt("%d", settings.buildCores ? settings.buildCores : settings.getDefaultCores()); - auto writeEnv = [&](const std::string & envVarName, const std::string & fileName, const std::string & value) { - writeBuilderFile(fileName, rewriteStrings(value, inputRewrites)); - env[envVarName] = tmpDirInSandbox() + "/" + fileName; - }; - - /* In non-structured mode, set all bindings either directory in the - environment or via a file, as specified by - `DerivationOptions::passAsFile`. */ - if (!drv.structuredAttrs) { - for (auto & i : drv.env) { - if (drvOptions.passAsFile.find(i.first) == drvOptions.passAsFile.end()) { - env[i.first] = i.second; - } else { - auto hash = hashString(HashAlgorithm::SHA256, i.first); - std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false); - writeEnv(i.first + "Path", fn, i.second); - } - } - } - - /* Do this with or without structured attrs --- actually, this is - used to desugar structured attrs. */ - for (const auto & [name, info] : extraEnv) { + /* Write the final environment. Note that this is intentionally + *not* `drv.env`, because we've desugared things like like + "passAFile", "expandReferencesGraph", structured attrs, etc. */ + for (const auto & [name, info] : finalEnv) { if (info.nameOfPassAsFile) { - writeEnv(name, *info.nameOfPassAsFile, info.value); + auto & fileName = *info.nameOfPassAsFile; + writeBuilderFile(fileName, rewriteStrings(info.value, inputRewrites)); + env[name] = tmpDirInSandbox() + "/" + fileName; } else { env[name] = info.value; } } - /* Add extra files, analogous to `extraEnv` */ + /* Add extra files, similar to `finalEnv` */ for (const auto & [fileName, value] : extraFiles) { writeBuilderFile(fileName, value); } From 4c76db8e7c98e7a9afd21b0ed4123d8ee28509cf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Aug 2025 17:52:07 -0400 Subject: [PATCH 234/382] Make sure `settings.sandboxedPaths` is closed outside `DerivationBuilder` This is a nicer separation of concerns --- `DerivationBuilder` just mounts the extra paths you tell it too, and the outside world is responsible for making sure those extra paths make sense. Since the closure only depends on global settings, and not per-derivation information, we also have the option of moving this up further and caching it across all local builds. (I only just realized this after having done this refactor. I am not doing that change at this time, however.) --- src/libstore/build/derivation-building-goal.cc | 18 ++++++++++++++++++ .../nix/store/build/derivation-builder.hh | 8 ++++++++ src/libstore/unix/build/derivation-builder.cc | 18 +----------------- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 965ffa525..a82f7f928 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -677,9 +677,26 @@ Goal::Co DerivationBuildingGoal::tryToBuild() auto * localStoreP = dynamic_cast(&worker.store); assert(localStoreP); + decltype(DerivationBuilderParams::defaultPathsInChroot) defaultPathsInChroot = settings.sandboxPaths.get(); decltype(DerivationBuilderParams::finalEnv) finalEnv; decltype(DerivationBuilderParams::extraFiles) extraFiles; + /* Add the closure of store paths to the chroot. */ + StorePathSet closure; + for (auto & i : defaultPathsInChroot) + try { + if (worker.store.isInStore(i.second.source)) + worker.store.computeFSClosure(worker.store.toStorePath(i.second.source).first, closure); + } catch (InvalidPath & e) { + } catch (Error & e) { + e.addTrace({}, "while processing sandbox path '%s'", i.second.source); + throw; + } + for (auto & i : closure) { + auto p = worker.store.printStorePath(i); + defaultPathsInChroot.insert_or_assign(p, ChrootPath{.source = p}); + } + try { if (drv->structuredAttrs) { auto json = drv->structuredAttrs->prepareStructuredAttrs( @@ -748,6 +765,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() *drvOptions, inputPaths, initialOutputs, + std::move(defaultPathsInChroot), std::move(finalEnv), std::move(extraFiles), }); diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 301283cdc..144ca27b1 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -59,6 +59,12 @@ struct DerivationBuilderParams const BuildMode & buildMode; + /** + * Extra paths we want to be in the chroot, regardless of the + * derivation we are building. + */ + PathsInChroot defaultPathsInChroot; + struct EnvEntry { /** @@ -96,6 +102,7 @@ struct DerivationBuilderParams const DerivationOptions & drvOptions, const StorePathSet & inputPaths, std::map & initialOutputs, + PathsInChroot defaultPathsInChroot, std::map> finalEnv, StringMap extraFiles) : drvPath{drvPath} @@ -105,6 +112,7 @@ struct DerivationBuilderParams , inputPaths{inputPaths} , initialOutputs{initialOutputs} , buildMode{buildMode} + , defaultPathsInChroot{std::move(defaultPathsInChroot)} , finalEnv{std::move(finalEnv)} , extraFiles{std::move(extraFiles)} { diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 62af9cd85..15c99e3c0 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -836,29 +836,13 @@ PathsInChroot DerivationBuilderImpl::getPathsInSandbox() { /* Allow a user-configurable set of directories from the host file system. */ - PathsInChroot pathsInChroot = settings.sandboxPaths.get(); + PathsInChroot pathsInChroot = defaultPathsInChroot; if (hasPrefix(store.storeDir, tmpDirInSandbox())) { throw Error("`sandbox-build-dir` must not contain the storeDir"); } pathsInChroot[tmpDirInSandbox()] = {.source = tmpDir}; - /* Add the closure of store paths to the chroot. */ - StorePathSet closure; - for (auto & i : pathsInChroot) - try { - if (store.isInStore(i.second.source)) - store.computeFSClosure(store.toStorePath(i.second.source).first, closure); - } catch (InvalidPath & e) { - } catch (Error & e) { - e.addTrace({}, "while processing sandbox path '%s'", i.second.source); - throw; - } - for (auto & i : closure) { - auto p = store.printStorePath(i); - pathsInChroot.insert_or_assign(p, ChrootPath{.source = p}); - } - PathSet allowedPaths = settings.allowedImpureHostPrefixes; /* This works like the above, except on a per-derivation level */ From 4fe700b7cb73ed96c5ccecdfea5dc7dcf64a984e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 13:17:06 +0200 Subject: [PATCH 235/382] Fix meson-format --- src/nix/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/meson.build b/src/nix/meson.build index 1e1fa8638..e989e8016 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -77,8 +77,8 @@ nix_sources = [ config_priv_h ] + files( 'edit.cc', 'env.cc', 'eval.cc', - 'flake.cc', 'flake-prefetch-inputs.cc', + 'flake.cc', 'formatter.cc', 'hash.cc', 'log.cc', From 6c391b7446a499a77001a3512597666e08951aa8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 12:00:58 +0200 Subject: [PATCH 236/382] release notes: 2.31.0 --- doc/manual/rl-next/build-cores-auto-detect.md | 6 -- doc/manual/rl-next/git-lfs-ssh.md | 11 --- doc/manual/rl-next/port-in-store-uris.md | 13 ---- .../rl-next/rfc4007-zone-id-in-uri-rfc6874.md | 6 -- doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/release-notes/rl-2.31.md | 69 +++++++++++++++++++ 6 files changed, 70 insertions(+), 36 deletions(-) delete mode 100644 doc/manual/rl-next/build-cores-auto-detect.md delete mode 100644 doc/manual/rl-next/git-lfs-ssh.md delete mode 100644 doc/manual/rl-next/port-in-store-uris.md delete mode 100644 doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md create mode 100644 doc/manual/source/release-notes/rl-2.31.md diff --git a/doc/manual/rl-next/build-cores-auto-detect.md b/doc/manual/rl-next/build-cores-auto-detect.md deleted file mode 100644 index 67ab6995b..000000000 --- a/doc/manual/rl-next/build-cores-auto-detect.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: "`build-cores = 0` now auto-detects CPU cores" -prs: [13402] ---- - -When `build-cores` is set to `0`, nix now automatically detects the number of available CPU cores and passes this value via `NIX_BUILD_CORES`, instead of passing `0` directly. This matches the behavior when `build-cores` is unset. This prevents the builder from having to detect the number of cores. diff --git a/doc/manual/rl-next/git-lfs-ssh.md b/doc/manual/rl-next/git-lfs-ssh.md deleted file mode 100644 index c49addf13..000000000 --- a/doc/manual/rl-next/git-lfs-ssh.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -synopsis: "Fix Git LFS SSH issues" -prs: [13743] -issues: [13337] ---- - -Fixed some outstanding issues with Git LFS and SSH. - -* Added support for `NIX_SSHOPTS`. -* Properly use the parsed port from URL. -* Better use of the response of `git-lfs-authenticate` to determine API endpoint when the API is not exposed on port 443. diff --git a/doc/manual/rl-next/port-in-store-uris.md b/doc/manual/rl-next/port-in-store-uris.md deleted file mode 100644 index 8291c0fd1..000000000 --- a/doc/manual/rl-next/port-in-store-uris.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -synopsis: "Add support for user@address:port syntax in store URIs" -prs: [3425] -issues: [7044] ---- - -It's now possible to specify the port used for the SSH stores directly in the store URL in accordance with [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). Previously the only way to specify custom ports was via `ssh_config` or `NIX_SSHOPTS` environment variable, because Nix incorrectly passed the port number together with the host name to the SSH executable. This has now been fixed. - -This change affects [store references](@docroot@/store/types/index.md#store-url-format) passed via the `--store` and similar flags in CLI as well as in the configuration for [remote builders](@docroot@/command-ref/conf-file.md#conf-builders). For example, the following store URIs now work: - -- `ssh://127.0.0.1:2222` -- `ssh://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` -- `ssh-ng://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` diff --git a/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md b/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md deleted file mode 100644 index d5bc4736f..000000000 --- a/doc/manual/rl-next/rfc4007-zone-id-in-uri-rfc6874.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: "Represent IPv6 RFC4007 ZoneId literals in conformance with RFC6874" -prs: [13445] ---- - -Prior versions of Nix since [#4646](https://github.com/NixOS/nix/pull/4646) accepted [IPv6 scoped addresses](https://datatracker.ietf.org/doc/html/rfc4007) in URIs like [store references](@docroot@/store/types/index.md#store-url-format) in the textual representation with a literal percent character: `[fe80::1%18]`. This was ambiguous, because the the percent literal `%` is reserved by [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986), since it's used to indicate percent encoding. Nix now requires that the percent `%` symbol is percent-encoded as `%25`. This implements [RFC6874](https://datatracker.ietf.org/doc/html/rfc6874), which defines the representation of zone identifiers in URIs. The example from above now has to be specified as `[fe80::1%2518]`. diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index cc4748f56..8fed98c2c 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -138,6 +138,7 @@ - [Contributing](development/contributing.md) - [Releases](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} + - [Release 2.31 (2025-08-21)](release-notes/rl-2.31.md) - [Release 2.30 (2025-07-07)](release-notes/rl-2.30.md) - [Release 2.29 (2025-05-14)](release-notes/rl-2.29.md) - [Release 2.28 (2025-04-02)](release-notes/rl-2.28.md) diff --git a/doc/manual/source/release-notes/rl-2.31.md b/doc/manual/source/release-notes/rl-2.31.md new file mode 100644 index 000000000..44f76052e --- /dev/null +++ b/doc/manual/source/release-notes/rl-2.31.md @@ -0,0 +1,69 @@ +# Release 2.31.0 (2025-08-21) + +- `build-cores = 0` now auto-detects CPU cores [#13402](https://github.com/NixOS/nix/pull/13402) + + When `build-cores` is set to `0`, nix now automatically detects the number of available CPU cores and passes this value via `NIX_BUILD_CORES`, instead of passing `0` directly. This matches the behavior when `build-cores` is unset. This prevents the builder from having to detect the number of cores. + +- Fix Git LFS SSH issues [#13337](https://github.com/NixOS/nix/issues/13337) [#13743](https://github.com/NixOS/nix/pull/13743) + + Fixed some outstanding issues with Git LFS and SSH. + + * Added support for `NIX_SSHOPTS`. + * Properly use the parsed port from URL. + * Better use of the response of `git-lfs-authenticate` to determine API endpoint when the API is not exposed on port 443. + +- Add support for user@address:port syntax in store URIs [#7044](https://github.com/NixOS/nix/issues/7044) [#3425](https://github.com/NixOS/nix/pull/3425) + + It's now possible to specify the port used for the SSH stores directly in the store URL in accordance with [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). Previously the only way to specify custom ports was via `ssh_config` or `NIX_SSHOPTS` environment variable, because Nix incorrectly passed the port number together with the host name to the SSH executable. This has now been fixed. + + This change affects [store references](@docroot@/store/types/index.md#store-url-format) passed via the `--store` and similar flags in CLI as well as in the configuration for [remote builders](@docroot@/command-ref/conf-file.md#conf-builders). For example, the following store URIs now work: + + - `ssh://127.0.0.1:2222` + - `ssh://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` + - `ssh-ng://[b573:6a48:e224:840b:6007:6275:f8f7:ebf3]:22` + +- Represent IPv6 RFC4007 ZoneId literals in conformance with RFC6874 [#13445](https://github.com/NixOS/nix/pull/13445) + + Prior versions of Nix since [#4646](https://github.com/NixOS/nix/pull/4646) accepted [IPv6 scoped addresses](https://datatracker.ietf.org/doc/html/rfc4007) in URIs like [store references](@docroot@/store/types/index.md#store-url-format) in the textual representation with a literal percent character: `[fe80::1%18]`. This was ambiguous, because the the percent literal `%` is reserved by [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986), since it's used to indicate percent encoding. Nix now requires that the percent `%` symbol is percent-encoded as `%25`. This implements [RFC6874](https://datatracker.ietf.org/doc/html/rfc6874), which defines the representation of zone identifiers in URIs. The example from above now has to be specified as `[fe80::1%2518]`. + + +## Contributors + + +This release was made possible by the following 34 contributors: + +- John Soo [**(@jsoo1)**](https://github.com/jsoo1) +- Alan Urmancheev [**(@alurm)**](https://github.com/alurm) +- Manse [**(@PedroManse)**](https://github.com/PedroManse) +- Pol Dellaiera [**(@drupol)**](https://github.com/drupol) +- DavHau [**(@DavHau)**](https://github.com/DavHau) +- Leandro Emmanuel Reina Kiperman [**(@kip93)**](https://github.com/kip93) +- h0nIg [**(@h0nIg)**](https://github.com/h0nIg) +- Philip Taron [**(@philiptaron)**](https://github.com/philiptaron) +- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra) +- Connor Baker [**(@ConnorBaker)**](https://github.com/ConnorBaker) +- kenji [**(@a-kenji)**](https://github.com/a-kenji) +- Oleksandr Knyshuk [**(@k1gen)**](https://github.com/k1gen) +- Maciej Krüger [**(@mkg20001)**](https://github.com/mkg20001) +- Justin Bailey [**(@jgbailey-well)**](https://github.com/jgbailey-well) +- Emily [**(@emilazy)**](https://github.com/emilazy) +- Volker Diels-Grabsch [**(@vog)**](https://github.com/vog) +- gustavderdrache [**(@gustavderdrache)**](https://github.com/gustavderdrache) +- Elliot Cameron [**(@de11n)**](https://github.com/de11n) +- Alexander V. Nikolaev [**(@avnik)**](https://github.com/avnik) +- tomberek [**(@tomberek)**](https://github.com/tomberek) +- Matthew Kenigsberg [**(@mkenigs)**](https://github.com/mkenigs) +- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium) +- Cosima Neidahl [**(@OPNA2608)**](https://github.com/OPNA2608) +- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314) +- m4dc4p [**(@m4dc4p)**](https://github.com/m4dc4p) +- Graham Christensen [**(@grahamc)**](https://github.com/grahamc) +- Jason Yundt [**(@Jayman2000)**](https://github.com/Jayman2000) +- Jens Petersen [**(@juhp)**](https://github.com/juhp) +- the-sun-will-rise-tomorrow [**(@the-sun-will-rise-tomorrow)**](https://github.com/the-sun-will-rise-tomorrow) +- Farid Zakaria [**(@fzakaria)**](https://github.com/fzakaria) +- AGawas [**(@aln730)**](https://github.com/aln730) +- Robert Hensing [**(@roberth)**](https://github.com/roberth) +- Dmitry Bogatov [**(@KAction)**](https://github.com/KAction) +- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92) +- Philipp Otterbein From 0eaed891f4b5250fc907d68940df6952a2033664 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 12:01:27 +0200 Subject: [PATCH 237/382] Update release credits --- .../data/release-credits-email-to-handle.json | 20 ++++++++++++++++++- .../data/release-credits-handle-to-name.json | 17 +++++++++++++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/maintainers/data/release-credits-email-to-handle.json b/maintainers/data/release-credits-email-to-handle.json index 48e8685e6..ea37afb90 100644 --- a/maintainers/data/release-credits-email-to-handle.json +++ b/maintainers/data/release-credits-email-to-handle.json @@ -185,5 +185,23 @@ "gwenn.lebihan7@gmail.com": "gwennlbh", "hey@ewen.works": "gwennlbh", "matt@sturgeon.me.uk": "MattSturgeon", - "pbsds@hotmail.com": "pbsds" + "pbsds@hotmail.com": "pbsds", + "sergei@zimmerman.foo": "xokdvium", + "v@njh.eu": "vog", + "pedro.manse@dmk3.com.br": "PedroManse", + "arnavgawas707@gmail.com": "aln730", + "mkg20001@gmail.com": "mkg20001", + "avn@avnik.info": "avnik", + "olk@disr.it": "k1gen", + "108410815+alurm@users.noreply.github.com": "alurm", + "kaction.cc@gmail.com": "KAction", + "juhpetersen@gmail.com": "juhp", + "opna2608@protonmail.com": "OPNA2608", + "jgbailey@gmail.com": "m4dc4p", + "justin.bailey@well.co": "jgbailey-well", + "130508846+de11n@users.noreply.github.com": "de11n", + "ConnorBaker01@Gmail.com": "ConnorBaker", + "jsoo1@asu.edu": "jsoo1", + "hsngrmpf+github@gmail.com": "DavHau", + "matthew@floxdev.com": "mkenigs" } \ No newline at end of file diff --git a/maintainers/data/release-credits-handle-to-name.json b/maintainers/data/release-credits-handle-to-name.json index a6352c44b..e2510548d 100644 --- a/maintainers/data/release-credits-handle-to-name.json +++ b/maintainers/data/release-credits-handle-to-name.json @@ -162,5 +162,20 @@ "pbsds": "Peder Bergebakken Sundt", "egorkonovalov": "Egor Konovalov", "jayeshv": "jayeshv", - "vcunat": "Vladim\u00edr \u010cun\u00e1t" + "vcunat": "Vladim\u00edr \u010cun\u00e1t", + "mkenigs": "Matthew Kenigsberg", + "alurm": "Alan Urmancheev", + "jgbailey-well": "Justin Bailey", + "k1gen": "Oleksandr Knyshuk", + "juhp": "Jens Petersen", + "de11n": "Elliot Cameron", + "jsoo1": "John Soo", + "m4dc4p": null, + "PedroManse": "Manse", + "OPNA2608": "Cosima Neidahl", + "mkg20001": "Maciej Kr\u00fcger", + "avnik": "Alexander V. Nikolaev", + "DavHau": null, + "aln730": "AGawas", + "vog": "Volker Diels-Grabsch" } \ No newline at end of file From 2e7bb61a8390f7cb6c01fc09f6bed1ccdffa23ef Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 12:01:58 +0200 Subject: [PATCH 238/382] Document that you need to set GITHUB_TOKEN --- maintainers/release-process.md | 1 + 1 file changed, 1 insertion(+) diff --git a/maintainers/release-process.md b/maintainers/release-process.md index 37b38fb9f..fa47bbb1d 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -29,6 +29,7 @@ release: ```console $ export VERSION=X.YY $ git checkout -b release-notes + $ export GITHUB_TOKEN=... $ ./maintainers/release-notes ``` From 4dcfb36c1ed71e4ad68a07c478886aace6bf0ed8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 12:04:42 +0200 Subject: [PATCH 239/382] Cleanup --- doc/manual/source/release-notes/rl-2.31.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/manual/source/release-notes/rl-2.31.md b/doc/manual/source/release-notes/rl-2.31.md index 44f76052e..38911a986 100644 --- a/doc/manual/source/release-notes/rl-2.31.md +++ b/doc/manual/source/release-notes/rl-2.31.md @@ -2,7 +2,7 @@ - `build-cores = 0` now auto-detects CPU cores [#13402](https://github.com/NixOS/nix/pull/13402) - When `build-cores` is set to `0`, nix now automatically detects the number of available CPU cores and passes this value via `NIX_BUILD_CORES`, instead of passing `0` directly. This matches the behavior when `build-cores` is unset. This prevents the builder from having to detect the number of cores. + When `build-cores` is set to `0`, Nix now automatically detects the number of available CPU cores and passes this value via `NIX_BUILD_CORES`, instead of passing `0` directly. This matches the behavior when `build-cores` is unset. This prevents the builder from having to detect the number of cores. - Fix Git LFS SSH issues [#13337](https://github.com/NixOS/nix/issues/13337) [#13743](https://github.com/NixOS/nix/pull/13743) @@ -12,9 +12,9 @@ * Properly use the parsed port from URL. * Better use of the response of `git-lfs-authenticate` to determine API endpoint when the API is not exposed on port 443. -- Add support for user@address:port syntax in store URIs [#7044](https://github.com/NixOS/nix/issues/7044) [#3425](https://github.com/NixOS/nix/pull/3425) +- Add support for `user@address:port` syntax in store URIs [#7044](https://github.com/NixOS/nix/issues/7044) [#3425](https://github.com/NixOS/nix/pull/3425) - It's now possible to specify the port used for the SSH stores directly in the store URL in accordance with [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). Previously the only way to specify custom ports was via `ssh_config` or `NIX_SSHOPTS` environment variable, because Nix incorrectly passed the port number together with the host name to the SSH executable. This has now been fixed. + It's now possible to specify the port used for SSH stores directly in the store URL in accordance with [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). Previously the only way to specify custom ports was via `ssh_config` or the `NIX_SSHOPTS` environment variable, because Nix incorrectly passed the port number together with the host name to the SSH executable. This change affects [store references](@docroot@/store/types/index.md#store-url-format) passed via the `--store` and similar flags in CLI as well as in the configuration for [remote builders](@docroot@/command-ref/conf-file.md#conf-builders). For example, the following store URIs now work: From ae0948349804f276195b654248a898d10b2a2d63 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Aug 2025 12:44:05 +0200 Subject: [PATCH 240/382] Add more release notes --- doc/manual/source/release-notes/rl-2.31.md | 25 +++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/doc/manual/source/release-notes/rl-2.31.md b/doc/manual/source/release-notes/rl-2.31.md index 38911a986..dc8963920 100644 --- a/doc/manual/source/release-notes/rl-2.31.md +++ b/doc/manual/source/release-notes/rl-2.31.md @@ -26,10 +26,33 @@ Prior versions of Nix since [#4646](https://github.com/NixOS/nix/pull/4646) accepted [IPv6 scoped addresses](https://datatracker.ietf.org/doc/html/rfc4007) in URIs like [store references](@docroot@/store/types/index.md#store-url-format) in the textual representation with a literal percent character: `[fe80::1%18]`. This was ambiguous, because the the percent literal `%` is reserved by [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986), since it's used to indicate percent encoding. Nix now requires that the percent `%` symbol is percent-encoded as `%25`. This implements [RFC6874](https://datatracker.ietf.org/doc/html/rfc6874), which defines the representation of zone identifiers in URIs. The example from above now has to be specified as `[fe80::1%2518]`. +- Use WAL mode for SQLite cache databases [#13800](https://github.com/NixOS/nix/pull/13800) + + Previously, Nix used SQLite's "truncate" mode for caches. However, this could cause a Nix process to block if another process was updating the cache. This was a problem for the flake evaluation cache in particular, since it uses long-running transactions. Thus, concurrent Nix commands operating on the same flake could be blocked for an unbounded amount of time. WAL mode avoids this problem. + + This change required updating the versions of the SQLite caches. For instance, `eval-cache-v5.sqlite` is now `eval-cache-v6.sqlite`. + +- Enable parallel marking in bdwgc [#13708](https://github.com/NixOS/nix/pull/13708) + + Previously marking was done by only one thread, which takes a long time if the heap gets big. Enabling parallel marking speeds up evaluation a lot, for example (on a Ryzen 9 5900X 12-Core): + + * `nix search nixpkgs` from 24.3s to 18.9s. + * Evaluating the `NixOS/nix/2.21.2` flake regression test from 86.1s to 71.2s. + +- New command `nix flake prefetch-inputs` [#13565](https://github.com/NixOS/nix/pull/13565) + + This command fetches all inputs of a flake in parallel. This can be a lot faster than the serialized on-demand fetching during regular flake evaluation. The downside is that it may fetch inputs that aren't normally used. + +- Add `warn-short-path-literals` setting [#13489](https://github.com/NixOS/nix/pull/13489) + + This setting, when enabled, causes Nix to emit warnings when encountering relative path literals that don't start with `.` or `/`, for instance suggesting that `foo/bar` should be rewritten to `./foo/bar`. + +- When updating a lock, respect the input's lock file [#13437](https://github.com/NixOS/nix/pull/13437) + + For example, if a flake has a lock for `a` and `a/b`, and we change the flakeref for `a`, previously Nix would fetch the latest version of `b` rather than using the lock for `b` from `a`. ## Contributors - This release was made possible by the following 34 contributors: - John Soo [**(@jsoo1)**](https://github.com/jsoo1) From a1b3934a78a57b0fbd99fc951c53f8c875abbb3c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 21 Aug 2025 13:41:18 +0200 Subject: [PATCH 241/382] maintainers: Add script for release notes todo list --- maintainers/release-notes-todo | 58 ++++++++++++++++++++++++++++++++++ maintainers/release-process.md | 6 ++++ 2 files changed, 64 insertions(+) create mode 100755 maintainers/release-notes-todo diff --git a/maintainers/release-notes-todo b/maintainers/release-notes-todo new file mode 100755 index 000000000..7cadc2a79 --- /dev/null +++ b/maintainers/release-notes-todo @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +set -euo pipefail +# debug: +# set -x + +START_REF="${1}" +END_REF="${2:-upstream/master}" + +# Get the merge base +MERGE_BASE=$(git merge-base "$START_REF" "$END_REF") +unset START_REF + +# Get date range +START_DATE=$(git show -s --format=%cI "$MERGE_BASE") +END_DATE=$(git show -s --format=%cI "$END_REF") + +echo "Checking PRs merged between $START_DATE and $END_DATE" >&2 + +# Get all commits between merge base and HEAD +COMMITS=$(git rev-list "$MERGE_BASE..$END_REF") + +# Convert to set for fast lookup +declare -A commit_set +for commit in $COMMITS; do + commit_set["$commit"]=1 +done + +# Get the current changelog +LOG_DONE="$(changelog-d doc/manual/rl-next)" +is_done(){ + local nr="$1" + echo "$LOG_DONE" | grep -E "^- .*/pull/$nr)" +} + +# Query merged PRs in date range +gh pr list \ + --repo NixOS/nix \ + --state merged \ + --limit 1000 \ + --json number,title,author,mergeCommit \ + --search "merged:$START_DATE..$END_DATE" | \ +jq -r '.[] | [.number, .mergeCommit.oid, .title, .author.login] | @tsv' | \ +while IFS=$'\t' read -r pr_num merge_commit _title author; do + # Check if this PR's merge commit is in our branch + if [[ -n "${commit_set[$merge_commit]:-}" ]]; then + # Full detail, not suitable for comment due to mass ping and duplicate title + # echo "- #$pr_num $_title (@$author)" + echo "- #$pr_num ($author)" + if is_done "$pr_num" + then + echo " - [x] has note" + else + echo " - [ ] has note" + fi + echo " - [ ] skip" + fi +done diff --git a/maintainers/release-process.md b/maintainers/release-process.md index 37b38fb9f..68b7d8e00 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -24,6 +24,12 @@ release: * In a checkout of the Nix repo, make sure you're on `master` and run `git pull`. +* Compile a release notes to-do list by running + + ```console + $ ./maintainers/release-notes-todo PREV_RELEASE HEAD + ``` + * Compile the release notes by running ```console From b853994e7a53dea60679b29faedbd50673cabf4a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 22 Aug 2025 03:04:59 +0300 Subject: [PATCH 242/382] libutil: Replace hand-rolled enumerate with std::views::{zip,iota} It would have been nice to use std::views::enumerate here, but it uses a signed difference type for the value_type: > value_type = std::tuple> zip + iota has the same semantics as the code used to have, so there's no behavior change here. --- src/libutil/include/nix/util/util.hh | 53 ++++------------------------ 1 file changed, 6 insertions(+), 47 deletions(-) diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index dd6294c2a..561550c41 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -4,13 +4,13 @@ #include "nix/util/types.hh" #include "nix/util/error.hh" #include "nix/util/logging.hh" +#include "nix/util/strings.hh" #include #include #include #include - -#include "nix/util/strings.hh" +#include namespace nix { @@ -300,53 +300,12 @@ struct MaintainCount /** * A Rust/Python-like enumerate() iterator adapter. - * - * Borrowed from http://reedbeta.com/blog/python-like-enumerate-in-cpp17. */ -template< - typename T, - typename TIter = decltype(std::begin(std::declval())), - typename = decltype(std::end(std::declval()))> -constexpr auto enumerate(T && iterable) +template +constexpr auto enumerate(R && range) { - struct iterator - { - size_t i; - TIter iter; - - constexpr bool operator!=(const iterator & other) const - { - return iter != other.iter; - } - - constexpr void operator++() - { - ++i; - ++iter; - } - - constexpr auto operator*() const - { - return std::tie(i, *iter); - } - }; - - struct iterable_wrapper - { - T iterable; - - constexpr auto begin() - { - return iterator{0, std::begin(iterable)}; - } - - constexpr auto end() - { - return iterator{0, std::end(iterable)}; - } - }; - - return iterable_wrapper{std::forward(iterable)}; + /* Not std::views::enumerate because it uses difference_type for the index. */ + return std::views::zip(std::views::iota(size_t{0}), std::forward(range)); } /** From 0c46c2c37f1cce36bb313a4036f498dd1cd50f32 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 21 Aug 2025 23:17:12 -0400 Subject: [PATCH 243/382] Mention experimental SHA-256 git hashing in release notes --- doc/manual/source/release-notes/rl-2.31.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/source/release-notes/rl-2.31.md b/doc/manual/source/release-notes/rl-2.31.md index dc8963920..390706ce6 100644 --- a/doc/manual/source/release-notes/rl-2.31.md +++ b/doc/manual/source/release-notes/rl-2.31.md @@ -51,6 +51,8 @@ For example, if a flake has a lock for `a` and `a/b`, and we change the flakeref for `a`, previously Nix would fetch the latest version of `b` rather than using the lock for `b` from `a`. +- The experimental support for [Git-hashing](@docroot@/development/experimental-features.md#xp-feature-git-hashing) store objects now also includes support for SHA-256, not just SHA-1, in line up with upstream Git. + ## Contributors This release was made possible by the following 34 contributors: From c0246460a0351375500810d5bd84045f1260dc9e Mon Sep 17 00:00:00 2001 From: Glen Huang Date: Fri, 22 Aug 2025 15:39:18 +0800 Subject: [PATCH 244/382] doc: Fix typo --- doc/manual/source/store/derivation/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/store/derivation/index.md b/doc/manual/source/store/derivation/index.md index 1687ad8c0..0e12b4d5e 100644 --- a/doc/manual/source/store/derivation/index.md +++ b/doc/manual/source/store/derivation/index.md @@ -9,7 +9,7 @@ This is where Nix distinguishes itself. ## Store Derivation {#store-derivation} -A derivation is a specification for running an executable on precisely defined input to produce on more [store objects][store object]. +A derivation is a specification for running an executable on precisely defined input to produce one or more [store objects][store object]. These store objects are known as the derivation's *outputs*. Derivations are *built*, in which case the process is spawned according to the spec, and when it exits, required to leave behind files which will (after post-processing) become the outputs of the derivation. From 82d3662f09cdb358b0fda6e79a118e330d9435ba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Aug 2025 11:03:05 +0200 Subject: [PATCH 245/382] Tweak --- doc/manual/source/release-notes/rl-2.31.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/source/release-notes/rl-2.31.md b/doc/manual/source/release-notes/rl-2.31.md index 390706ce6..8b9d44c83 100644 --- a/doc/manual/source/release-notes/rl-2.31.md +++ b/doc/manual/source/release-notes/rl-2.31.md @@ -51,7 +51,9 @@ For example, if a flake has a lock for `a` and `a/b`, and we change the flakeref for `a`, previously Nix would fetch the latest version of `b` rather than using the lock for `b` from `a`. -- The experimental support for [Git-hashing](@docroot@/development/experimental-features.md#xp-feature-git-hashing) store objects now also includes support for SHA-256, not just SHA-1, in line up with upstream Git. +- Implement support for Git hashing with SHA-256 [#13543](https://github.com/NixOS/nix/pull/13543) + + The experimental support for [Git-hashing](@docroot@/development/experimental-features.md#xp-feature-git-hashing) store objects now also includes support for SHA-256, not just SHA-1, in line with upstream Git. ## Contributors From 4083eff0c01d8d71d7a9bf46a7144befd166fac2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 22 Aug 2025 12:02:02 -0400 Subject: [PATCH 246/382] `decodeQuery` Take `std::string_view` not string ref --- src/libflake/flakeref.cc | 2 +- src/libutil/url.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index 5b1c3e8b2..a562e29d2 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -82,7 +82,7 @@ std::pair parsePathFlakeRefWithFragment( auto succeeds = std::regex_match(url, match, pathFlakeRegex); assert(succeeds); auto path = match[1].str(); - auto query = decodeQuery(match[3]); + auto query = decodeQuery(match[3].str()); auto fragment = percentDecode(match[5].str()); if (baseDir) { diff --git a/src/libutil/url.cc b/src/libutil/url.cc index cdfba8a83..07f4b29ea 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -179,7 +179,7 @@ try { .scheme = scheme, .authority = authority, .path = path, - .query = decodeQuery(std::string(query)), + .query = decodeQuery(query), .fragment = fragment, }; } catch (boost::system::system_error & e) { @@ -201,7 +201,7 @@ std::string percentEncode(std::string_view s, std::string_view keep) s, [keep](char c) { return boost::urls::unreserved_chars(c) || keep.find(c) != keep.npos; }); } -StringMap decodeQuery(const std::string & query) +StringMap decodeQuery(std::string_view query) try { /* For back-compat unescaped characters are allowed. */ auto fixedEncodedQuery = percentEncodeCharSet(query, extraAllowedCharsInQuery); From 72a548ed6aa4677d66602c97f26a0b13d6729298 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 22 Aug 2025 12:26:11 -0400 Subject: [PATCH 247/382] Limit to lenient parsing of non-standard URLs only where needed This allows us to put `parseURL` in more spots without furthering technical debt. --- src/libexpr/primops/fetchClosure.cc | 2 +- src/libflake-tests/url-name.cc | 7 +-- src/libflake/flakeref.cc | 6 +-- src/libstore/store-reference.cc | 4 +- src/libutil-tests/url.cc | 11 +++-- src/libutil/include/nix/util/url.hh | 12 +++-- src/libutil/url.cc | 68 +++++++++++++++++------------ src/nix/profile.cc | 17 +++++--- 8 files changed, 76 insertions(+), 51 deletions(-) diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 469459818..63da53aa9 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -185,7 +185,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value ** args {.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), .pos = state.positions[pos]}); - auto parsedURL = parseURL(*fromStoreUrl); + auto parsedURL = parseURL(*fromStoreUrl, /*lenient=*/true); if (parsedURL.scheme != "http" && parsedURL.scheme != "https" && !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) diff --git a/src/libflake-tests/url-name.cc b/src/libflake-tests/url-name.cc index 78de34458..81ba516c8 100644 --- a/src/libflake-tests/url-name.cc +++ b/src/libflake-tests/url-name.cc @@ -13,8 +13,9 @@ TEST(getNameFromURL, getNameFromURL) ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); - ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); - ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); + ASSERT_EQ( + getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man", /*lenient=*/true)), "complex"); + ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*", /*lenient=*/true)), "myproj"); ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#defaultPackage.x86_64-linux")), "myproj"); ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); @@ -80,6 +81,6 @@ TEST(getNameFromURL, getNameFromURL) ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); - ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*", /*lenient=*/true)), std::nullopt); } } // namespace nix diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index a562e29d2..070f4e483 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -82,7 +82,7 @@ std::pair parsePathFlakeRefWithFragment( auto succeeds = std::regex_match(url, match, pathFlakeRegex); assert(succeeds); auto path = match[1].str(); - auto query = decodeQuery(match[3].str()); + auto query = decodeQuery(match[3].str(), /*lenient=*/true); auto fragment = percentDecode(match[5].str()); if (baseDir) { @@ -210,7 +210,7 @@ std::optional> parseURLFlakeRef( bool isFlake) { try { - auto parsed = parseURL(url); + auto parsed = parseURL(url, /*lenient=*/true); if (baseDir && (parsed.scheme == "path" || parsed.scheme == "git+file") && !isAbsolute(parsed.path)) parsed.path = absPath(parsed.path, *baseDir); return fromParsedURL(fetchSettings, std::move(parsed), isFlake); @@ -289,7 +289,7 @@ FlakeRef FlakeRef::canonicalize() const filtering the `dir` query parameter from the URL. */ if (auto url = fetchers::maybeGetStrAttr(flakeRef.input.attrs, "url")) { try { - auto parsed = parseURL(*url); + auto parsed = parseURL(*url, /*lenient=*/true); if (auto dir2 = get(parsed.query, "dir")) { if (flakeRef.subdir != "" && flakeRef.subdir == *dir2) parsed.query.erase("dir"); diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index 2b8305072..adc60b391 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -45,7 +45,7 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen { auto params = extraParams; try { - auto parsedUri = parseURL(uri); + auto parsedUri = parseURL(uri, /*lenient=*/true); params.insert(parsedUri.query.begin(), parsedUri.query.end()); auto baseURI = parsedUri.authority.value_or(ParsedURL::Authority{}).to_string() + parsedUri.path; @@ -107,7 +107,7 @@ std::pair splitUriAndParams(const std::stri StoreReference::Params params; auto q = uri.find('?'); if (q != std::string::npos) { - params = decodeQuery(uri.substr(q + 1)); + params = decodeQuery(uri.substr(q + 1), /*lenient=*/true); uri = uri_.substr(0, q); } return {uri, params}; diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 0dfb5f463..b248421b3 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -221,15 +221,20 @@ TEST(parseURL, parsedUrlsWithUnescapedChars) * 2. Unescaped spaces and quotes in query. */ auto s = "http://www.example.org/file.tar.gz?query \"= 123\"#shevron^quote\"space "; - auto url = parseURL(s); - ASSERT_EQ(url.fragment, "shevron^quote\"space "); + /* Without leniency for back compat, this should throw. */ + EXPECT_THROW(parseURL(s), Error); + + /* With leniency for back compat, this should parse. */ + auto url = parseURL(s, /*lenient=*/true); + + EXPECT_EQ(url.fragment, "shevron^quote\"space "); auto query = StringMap{ {"query \"", " 123\""}, }; - ASSERT_EQ(url.query, query); + EXPECT_EQ(url.query, query); } TEST(parseURL, parseFTPUrl) diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index cd20a08c6..3262b44b7 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -96,14 +96,18 @@ MakeError(BadURL, Error); std::string percentDecode(std::string_view in); std::string percentEncode(std::string_view s, std::string_view keep = ""); -StringMap decodeQuery(const std::string & query); +/** + * @param lenient @see parseURL + */ +StringMap decodeQuery(std::string_view query, bool lenient = false); std::string encodeQuery(const StringMap & query); /** - * Parse a Nix URL into a ParsedURL. + * Parse a URL into a ParsedURL. * - * Nix URI is mostly compliant with RFC3986, but with some deviations: + * @parm lenient Also allow some long-supported Nix URIs that are not quite compliant with RFC3986. + * Here are the deviations: * - Fragments can contain unescaped (not URL encoded) '^', '"' or space literals. * - Queries may contain unescaped '"' or spaces. * @@ -111,7 +115,7 @@ std::string encodeQuery(const StringMap & query); * * @throws BadURL */ -ParsedURL parseURL(std::string_view url); +ParsedURL parseURL(std::string_view url, bool lenient = false); /** * Although that’s not really standardized anywhere, an number of tools diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 07f4b29ea..b7f1eff30 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -108,41 +108,48 @@ static std::string percentEncodeCharSet(std::string_view s, auto charSet) return res; } -ParsedURL parseURL(std::string_view url) +ParsedURL parseURL(std::string_view url, bool lenient) try { /* Account for several non-standard properties of nix urls (for back-compat): * - Allow unescaped spaces ' ' and '"' characters in queries. * - Allow '"', ' ' and '^' characters in the fragment component. * We could write our own grammar for this, but fixing it up here seems * more concise, since the deviation is rather minor. + * + * If `!lenient` don't bother initializing, because we can just + * parse `url` directly`. */ - std::string fixedEncodedUrl = [&]() { - std::string fixed; - std::string_view view = url; + std::string fixedEncodedUrl; - if (auto beforeQuery = splitPrefixTo(view, '?')) { - fixed += *beforeQuery; - fixed += '?'; - auto fragmentStart = view.find('#'); - auto queryView = view.substr(0, fragmentStart); - auto fixedQuery = percentEncodeCharSet(queryView, extraAllowedCharsInQuery); - fixed += fixedQuery; - view.remove_prefix(std::min(fragmentStart, view.size())); - } + if (lenient) { + fixedEncodedUrl = [&] { + std::string fixed; + std::string_view view = url; - if (auto beforeFragment = splitPrefixTo(view, '#')) { - fixed += *beforeFragment; - fixed += '#'; - auto fixedFragment = percentEncodeCharSet(view, extraAllowedCharsInFragment); - fixed += fixedFragment; + if (auto beforeQuery = splitPrefixTo(view, '?')) { + fixed += *beforeQuery; + fixed += '?'; + auto fragmentStart = view.find('#'); + auto queryView = view.substr(0, fragmentStart); + auto fixedQuery = percentEncodeCharSet(queryView, extraAllowedCharsInQuery); + fixed += fixedQuery; + view.remove_prefix(std::min(fragmentStart, view.size())); + } + + if (auto beforeFragment = splitPrefixTo(view, '#')) { + fixed += *beforeFragment; + fixed += '#'; + auto fixedFragment = percentEncodeCharSet(view, extraAllowedCharsInFragment); + fixed += fixedFragment; + return fixed; + } + + fixed += view; return fixed; - } + }(); + } - fixed += view; - return fixed; - }(); - - auto urlView = boost::urls::url_view(fixedEncodedUrl); + auto urlView = boost::urls::url_view(lenient ? fixedEncodedUrl : url); if (!urlView.has_scheme()) throw BadURL("'%s' doesn't have a scheme", url); @@ -179,7 +186,7 @@ try { .scheme = scheme, .authority = authority, .path = path, - .query = decodeQuery(query), + .query = decodeQuery(query, lenient), .fragment = fragment, }; } catch (boost::system::system_error & e) { @@ -201,14 +208,17 @@ std::string percentEncode(std::string_view s, std::string_view keep) s, [keep](char c) { return boost::urls::unreserved_chars(c) || keep.find(c) != keep.npos; }); } -StringMap decodeQuery(std::string_view query) +StringMap decodeQuery(std::string_view query, bool lenient) try { - /* For back-compat unescaped characters are allowed. */ - auto fixedEncodedQuery = percentEncodeCharSet(query, extraAllowedCharsInQuery); + /* When `lenient = true`, for back-compat unescaped characters are allowed. */ + std::string fixedEncodedQuery; + if (lenient) { + fixedEncodedQuery = percentEncodeCharSet(query, extraAllowedCharsInQuery); + } StringMap result; - auto encodedQuery = boost::urls::params_encoded_view(fixedEncodedQuery); + auto encodedQuery = boost::urls::params_encoded_view(lenient ? fixedEncodedQuery : query); for (auto && [key, value, value_specified] : encodedQuery) { if (!value_specified) { warn("dubious URI query '%s' is missing equal sign '%s', ignoring", std::string_view(key), "="); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index df92d888e..0ed1face5 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -105,7 +105,8 @@ std::string getNameFromElement(const ProfileElement & element) { std::optional result = std::nullopt; if (element.source) { - result = getNameFromURL(parseURL(element.source->to_string())); + // Seems to be for Flake URLs + result = getNameFromURL(parseURL(element.source->to_string(), /*lenient=*/true)); } return result.value_or(element.identifier()); } @@ -160,11 +161,15 @@ struct ProfileManifest e["outputs"].get()}; } - std::string name = - elems.is_object() ? elem.key() - : element.source - ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) - : element.identifier(); + std::string name = [&] { + if (elems.is_object()) + return elem.key(); + if (element.source) { + if (auto optName = getNameFromURL(parseURL(element.source->to_string(), /*lenient=*/true))) + return *optName; + } + return element.identifier(); + }(); addElement(name, std::move(element)); } From 3e86d75c9daf04a497fd182ac14dfc06886a8e71 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Aug 2025 22:41:46 -0400 Subject: [PATCH 248/382] Make more URLs parsed, most notably `FileTransferRequest::url` Trying to gradually replace the use of strings with better types in ways that makes sense. --- src/libfetchers/git-lfs-fetch.cc | 4 ++-- src/libfetchers/github.cc | 8 ++++---- src/libfetchers/tarball.cc | 4 ++-- src/libstore-tests/s3.cc | 6 +++--- src/libstore/builtins/fetchurl.cc | 2 +- src/libstore/filetransfer.cc | 17 +++++++++-------- src/libstore/http-binary-cache-store.cc | 4 ++-- src/libstore/include/nix/store/filetransfer.hh | 8 ++++++-- src/libstore/include/nix/store/s3.hh | 2 +- src/libstore/s3.cc | 8 +++----- src/nix/prefetch.cc | 2 +- src/nix/upgrade-nix.cc | 2 +- 12 files changed, 35 insertions(+), 32 deletions(-) diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index a68cdf832..f555a9a4c 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -25,7 +25,7 @@ static void downloadToSink( std::string sha256Expected, size_t sizeExpected) { - FileTransferRequest request(url); + FileTransferRequest request(parseURL(url)); Headers headers; if (authHeader.has_value()) headers.push_back({"Authorization", *authHeader}); @@ -207,7 +207,7 @@ std::vector Fetch::fetchUrls(const std::vector & pointe auto api = lfs::getLfsApi(this->url); auto url = api.endpoint + "/objects/batch"; const auto & authHeader = api.authHeader; - FileTransferRequest request(url); + FileTransferRequest request(parseURL(url)); request.post = true; Headers headers; if (authHeader.has_value()) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 841a9c2df..b3749b01a 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -19,7 +19,7 @@ namespace nix::fetchers { struct DownloadUrl { - std::string url; + ParsedURL url; Headers headers; }; @@ -420,7 +420,7 @@ struct GitHubInputScheme : GitArchiveInputScheme const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false)); - return DownloadUrl{url, headers}; + return DownloadUrl{parseURL(url), headers}; } void clone(const Input & input, const Path & destDir) const override @@ -500,7 +500,7 @@ struct GitLabInputScheme : GitArchiveInputScheme input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - return DownloadUrl{url, headers}; + return DownloadUrl{parseURL(url), headers}; } void clone(const Input & input, const Path & destDir) const override @@ -592,7 +592,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); - return DownloadUrl{url, headers}; + return DownloadUrl{parseURL(url), headers}; } void clone(const Input & input, const Path & destDir) const override diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 309bbaf5a..b89cd99f1 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -43,7 +43,7 @@ DownloadFileResult downloadFile( if (cached && !cached->expired) return useCached(); - FileTransferRequest request(url); + FileTransferRequest request(parseURL(url)); request.headers = headers; if (cached) request.expectedETag = getStrAttr(cached->value, "etag"); @@ -153,7 +153,7 @@ static DownloadTarballResult downloadTarball_( auto _res = std::make_shared>(); auto source = sinkToSource([&](Sink & sink) { - FileTransferRequest req(url); + FileTransferRequest req(parseURL(url)); req.expectedETag = cached ? getStrAttr(cached->value, "etag") : ""; getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; }); }); diff --git a/src/libstore-tests/s3.cc b/src/libstore-tests/s3.cc index b66005cb9..579cfdc55 100644 --- a/src/libstore-tests/s3.cc +++ b/src/libstore-tests/s3.cc @@ -21,7 +21,7 @@ class ParsedS3URLTest : public ::testing::WithParamInterfaceresult.data.append(data); }) { - result.urls.push_back(request.uri); + result.urls.push_back(request.uri.to_string()); requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz"); if (!request.expectedETag.empty()) @@ -350,7 +350,7 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback); } - curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str()); + curl_easy_setopt(req, CURLOPT_URL, request.uri.to_string().c_str()); curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); @@ -784,8 +784,8 @@ struct curlFileTransfer : public FileTransfer void enqueueItem(std::shared_ptr item) { - if (item->request.data && !hasPrefix(item->request.uri, "http://") && !hasPrefix(item->request.uri, "https://")) - throw nix::Error("uploading to '%s' is not supported", item->request.uri); + if (item->request.data && item->request.uri.scheme != "http" && item->request.uri.scheme != "https") + throw nix::Error("uploading to '%s' is not supported", item->request.uri.to_string()); { auto state(state_.lock()); @@ -801,7 +801,7 @@ struct curlFileTransfer : public FileTransfer void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override { /* Ugly hack to support s3:// URIs. */ - if (hasPrefix(request.uri, "s3://")) { + if (request.uri.scheme == "s3") { // FIXME: do this on a worker thread try { #if NIX_WITH_S3_SUPPORT @@ -820,10 +820,11 @@ struct curlFileTransfer : public FileTransfer if (!s3Res.data) throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); res.data = std::move(*s3Res.data); - res.urls.push_back(request.uri); + res.urls.push_back(request.uri.to_string()); callback(std::move(res)); #else - throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); + throw nix::Error( + "cannot download '%s' because Nix is not built with S3 support", request.uri.to_string()); #endif } catch (...) { callback.rethrow(); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 2777b8827..940dcec2e 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -166,10 +166,10 @@ protected: `std::filesystem::path`'s equivalent operator, which properly combines the the URLs, whether the right is relative or absolute. */ - return FileTransferRequest( + return FileTransferRequest(parseURL( hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://") ? path - : config->cacheUri.to_string() + "/" + path); + : config->cacheUri.to_string() + "/" + path)); } void getFile(const std::string & path, Sink & sink) override diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 8ff0de5ef..8a04293bd 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -9,6 +9,7 @@ #include "nix/util/ref.hh" #include "nix/util/configuration.hh" #include "nix/util/serialise.hh" +#include "nix/util/url.hh" namespace nix { @@ -70,7 +71,7 @@ extern const unsigned int RETRY_TIME_MS_DEFAULT; struct FileTransferRequest { - std::string uri; + ParsedURL uri; Headers headers; std::string expectedETag; bool verifyTLS = true; @@ -84,7 +85,7 @@ struct FileTransferRequest std::string mimeType; std::function dataCallback; - FileTransferRequest(std::string_view uri) + FileTransferRequest(ParsedURL uri) : uri(uri) , parentAct(getCurActivity()) { @@ -111,6 +112,9 @@ struct FileTransferResult /** * All URLs visited in the redirect chain. + * + * @note Intentionally strings and not `ParsedURL`s so we faithfully + * return what cURL gave us. */ std::vector urls; diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index 517825952..3f38ef62f 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -74,7 +74,7 @@ struct ParsedS3URL endpoint); } - static ParsedS3URL parse(std::string_view uri); + static ParsedS3URL parse(const ParsedURL & uri); auto operator<=>(const ParsedS3URL & other) const = default; }; diff --git a/src/libstore/s3.cc b/src/libstore/s3.cc index 9ed4e7fd9..f605b45c1 100644 --- a/src/libstore/s3.cc +++ b/src/libstore/s3.cc @@ -8,10 +8,8 @@ using namespace std::string_view_literals; #if NIX_WITH_S3_SUPPORT -ParsedS3URL ParsedS3URL::parse(std::string_view uri) +ParsedS3URL ParsedS3URL::parse(const ParsedURL & parsed) try { - auto parsed = parseURL(uri); - if (parsed.scheme != "s3"sv) throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme); @@ -43,7 +41,7 @@ try { auto endpoint = getOptionalParam("endpoint"); return ParsedS3URL{ - .bucket = std::move(parsed.authority->host), + .bucket = parsed.authority->host, .key = std::string{key}, .profile = getOptionalParam("profile"), .region = getOptionalParam("region"), @@ -62,7 +60,7 @@ try { }(), }; } catch (BadURL & e) { - e.addTrace({}, "while parsing S3 URI: '%s'", uri); + e.addTrace({}, "while parsing S3 URI: '%s'", parsed.to_string()); throw; } diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index b23b11d02..88a4717a0 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -105,7 +105,7 @@ std::tuple prefetchFile( FdSink sink(fd.get()); - FileTransferRequest req(url); + FileTransferRequest req(parseURL(url)); req.decompress = false; getFileTransfer()->download(std::move(req), sink); } diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index f6668f6dc..48235a27f 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -156,7 +156,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); // FIXME: use nixos.org? - auto req = FileTransferRequest((std::string &) settings.upgradeNixStorePathUrl); + auto req = FileTransferRequest(parseURL(settings.upgradeNixStorePathUrl.get())); auto res = getFileTransfer()->download(req); auto state = std::make_unique(LookupPath{}, store, fetchSettings, evalSettings); From 7b8ceb5d2dc26c924c4a24c8b22bbb10c440e85f Mon Sep 17 00:00:00 2001 From: Ethan Evans Date: Tue, 12 Aug 2025 23:34:49 -0700 Subject: [PATCH 249/382] libutil, libexpr: #10542 abstract over getrusage for getting cpuTime stat and implement windows version Update src/libutil/windows/current-process.cc Prefer `nullptr` over `NULL` Co-authored-by: Sergei Zimmerman Update src/libutil/unix/current-process.cc Prefer C++ type casts Co-authored-by: Sergei Zimmerman Update src/libutil/windows/current-process.cc Prefer C++ type casts Co-authored-by: Sergei Zimmerman Update src/libutil/unix/current-process.cc Don't allocate exception Co-authored-by: Sergei Zimmerman --- src/libexpr/eval.cc | 18 ++-------- .../include/nix/util/current-process.hh | 6 ++++ src/libutil/unix/current-process.cc | 23 ++++++++++++ src/libutil/unix/meson.build | 1 + src/libutil/windows/current-process.cc | 35 +++++++++++++++++++ src/libutil/windows/meson.build | 1 + 6 files changed, 69 insertions(+), 15 deletions(-) create mode 100644 src/libutil/unix/current-process.cc create mode 100644 src/libutil/windows/current-process.cc diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 293b05953..81a9afe63 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -21,6 +21,7 @@ #include "nix/fetchers/fetch-to-store.hh" #include "nix/fetchers/tarball.hh" #include "nix/fetchers/input-cache.hh" +#include "nix/util/current-process.hh" #include "parser-tab.hh" @@ -37,10 +38,6 @@ #include #include -#ifndef _WIN32 // TODO use portable implementation -# include -#endif - #include "nix/util/strings-inline.hh" using json = nlohmann::json; @@ -2888,11 +2885,8 @@ void EvalState::maybePrintStats() void EvalState::printStatistics() { -#ifndef _WIN32 // TODO use portable implementation - struct rusage buf; - getrusage(RUSAGE_SELF, &buf); - float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000); -#endif + std::chrono::microseconds cpuTimeDuration = getCpuUserTime(); + float cpuTime = std::chrono::duration_cast>(cpuTimeDuration).count(); uint64_t bEnvs = nrEnvs * sizeof(Env) + nrValuesInEnvs * sizeof(Value *); uint64_t bLists = nrListElems * sizeof(Value *); @@ -2914,18 +2908,12 @@ void EvalState::printStatistics() if (outPath != "-") fs.open(outPath, std::fstream::out); json topObj = json::object(); -#ifndef _WIN32 // TODO implement topObj["cpuTime"] = cpuTime; -#endif topObj["time"] = { -#ifndef _WIN32 // TODO implement {"cpu", cpuTime}, -#endif #if NIX_USE_BOEHMGC {GC_is_incremental_mode() ? "gcNonIncremental" : "gc", gcFullOnlyTime}, -# ifndef _WIN32 // TODO implement {GC_is_incremental_mode() ? "gcNonIncrementalFraction" : "gcFraction", gcFullOnlyTime / cpuTime}, -# endif #endif }; topObj["envs"] = { diff --git a/src/libutil/include/nix/util/current-process.hh b/src/libutil/include/nix/util/current-process.hh index 364493137..c4a952581 100644 --- a/src/libutil/include/nix/util/current-process.hh +++ b/src/libutil/include/nix/util/current-process.hh @@ -2,6 +2,7 @@ ///@file #include +#include #ifndef _WIN32 # include @@ -11,6 +12,11 @@ namespace nix { +/** + * Get the current process's user space CPU time. + */ +std::chrono::microseconds getCpuUserTime(); + /** * If cgroups are active, attempt to calculate the number of CPUs available. * If cgroups are unavailable or if cpu.max is set to "max", return 0. diff --git a/src/libutil/unix/current-process.cc b/src/libutil/unix/current-process.cc new file mode 100644 index 000000000..eaa2424ab --- /dev/null +++ b/src/libutil/unix/current-process.cc @@ -0,0 +1,23 @@ +#include "nix/util/current-process.hh" +#include "nix/util/error.hh" +#include + +#include + +namespace nix { + +std::chrono::microseconds getCpuUserTime() +{ + struct rusage buf; + + if (getrusage(RUSAGE_SELF, &buf) != 0) { + throw SysError("failed to get CPU time"); + } + + std::chrono::seconds seconds(buf.ru_utime.tv_sec); + std::chrono::microseconds microseconds(buf.ru_utime.tv_usec); + + return seconds + microseconds; +} + +} // namespace nix diff --git a/src/libutil/unix/meson.build b/src/libutil/unix/meson.build index 13bb380b4..8f89b65ab 100644 --- a/src/libutil/unix/meson.build +++ b/src/libutil/unix/meson.build @@ -49,6 +49,7 @@ config_unix_priv_h = configure_file( sources += config_unix_priv_h sources += files( + 'current-process.cc', 'environment-variables.cc', 'file-descriptor.cc', 'file-path.cc', diff --git a/src/libutil/windows/current-process.cc b/src/libutil/windows/current-process.cc new file mode 100644 index 000000000..4bc866bb3 --- /dev/null +++ b/src/libutil/windows/current-process.cc @@ -0,0 +1,35 @@ +#include "nix/util/current-process.hh" +#include "nix/util/windows-error.hh" +#include + +#ifdef _WIN32 +# define WIN32_LEAN_AND_MEAN +# include + +namespace nix { + +std::chrono::microseconds getCpuUserTime() +{ + FILETIME creationTime; + FILETIME exitTime; + FILETIME kernelTime; + FILETIME userTime; + + if (!GetProcessTimes(GetCurrentProcess(), &creationTime, &exitTime, &kernelTime, &userTime)) { + auto lastError = GetLastError(); + throw windows::WinError(lastError, "failed to get CPU time"); + } + + ULARGE_INTEGER uLargeInt; + uLargeInt.LowPart = userTime.dwLowDateTime; + uLargeInt.HighPart = userTime.dwHighDateTime; + + // FILETIME stores units of 100 nanoseconds. + // Dividing by 10 gives microseconds. + std::chrono::microseconds microseconds(uLargeInt.QuadPart / 10); + + return microseconds; +} + +} // namespace nix +#endif // ifdef _WIN32 diff --git a/src/libutil/windows/meson.build b/src/libutil/windows/meson.build index 0c1cec49c..fb4de2017 100644 --- a/src/libutil/windows/meson.build +++ b/src/libutil/windows/meson.build @@ -1,4 +1,5 @@ sources += files( + 'current-process.cc', 'environment-variables.cc', 'file-descriptor.cc', 'file-path.cc', From f67daa4a8700d616e40cf7a942cd6eb051552bc9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 25 Aug 2025 10:27:46 +0200 Subject: [PATCH 250/382] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index bafceb320..7cca401c7 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.31.0 +2.32.0 From f5e09d9b589cf194c38de949bc3d66e92a65f304 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 25 Aug 2025 10:28:47 +0200 Subject: [PATCH 251/382] Update mergify.yml --- .mergify.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.mergify.yml b/.mergify.yml index f49144113..1c220045a 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -161,3 +161,14 @@ pull_request_rules: labels: - automatic backport - merge-queue + + - name: backport patches to 2.31 + conditions: + - label=backport 2.31-maintenance + actions: + backport: + branches: + - "2.31-maintenance" + labels: + - automatic backport + - merge-queue From adec28bf85048fa9f54214eecf2cc818de4745a0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 25 Aug 2025 10:30:21 +0200 Subject: [PATCH 252/382] Update release-process.md --- maintainers/release-process.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/maintainers/release-process.md b/maintainers/release-process.md index d2dba76d3..790618b7f 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -133,6 +133,8 @@ release: Commit and push this to the maintenance branch. +* Create a backport label. + * Bump the version of `master`: ```console @@ -140,6 +142,7 @@ release: $ git pull $ NEW_VERSION=2.13.0 $ echo $NEW_VERSION > .version + $ ... edit .mergify.yml to add the previous version ... $ git checkout -b bump-$NEW_VERSION $ git commit -a -m 'Bump version' $ git push --set-upstream origin bump-$NEW_VERSION @@ -147,10 +150,6 @@ release: Make a pull request and auto-merge it. -* Create a backport label. - -* Add the new backport label to `.mergify.yml`. - * Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of `rl-$VERSION.md`. From 5985d6790678898cda4fb4dea1e2504b4b1073ef Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Mon, 25 Aug 2025 17:09:07 +0000 Subject: [PATCH 253/382] feat(libstore/s3): add toHttpsUrl This is extracted from the work in #13752 --- src/libstore-tests/s3.cc | 110 +++++++++++++++++++++++++-- src/libstore/include/nix/store/s3.hh | 6 ++ src/libstore/s3.cc | 38 +++++++++ 3 files changed, 149 insertions(+), 5 deletions(-) diff --git a/src/libstore-tests/s3.cc b/src/libstore-tests/s3.cc index 579cfdc55..df61c04c1 100644 --- a/src/libstore-tests/s3.cc +++ b/src/libstore-tests/s3.cc @@ -35,7 +35,8 @@ INSTANTIATE_TEST_SUITE_P( .bucket = "my-bucket", .key = "my-key.txt", }, - "basic_s3_bucket"}, + "basic_s3_bucket", + }, ParsedS3URLTestCase{ "s3://prod-cache/nix/store/abc123.nar.xz?region=eu-west-1", { @@ -43,7 +44,8 @@ INSTANTIATE_TEST_SUITE_P( .key = "nix/store/abc123.nar.xz", .region = "eu-west-1", }, - "with_region"}, + "with_region", + }, ParsedS3URLTestCase{ "s3://bucket/key?region=us-west-2&profile=prod&endpoint=custom.s3.com&scheme=https®ion=us-east-1", { @@ -54,7 +56,8 @@ INSTANTIATE_TEST_SUITE_P( .scheme = "https", .endpoint = ParsedURL::Authority{.host = "custom.s3.com"}, }, - "complex"}, + "complex", + }, ParsedS3URLTestCase{ "s3://cache/file.txt?profile=production®ion=ap-southeast-2", { @@ -63,7 +66,8 @@ INSTANTIATE_TEST_SUITE_P( .profile = "production", .region = "ap-southeast-2", }, - "with_profile_and_region"}, + "with_profile_and_region", + }, ParsedS3URLTestCase{ "s3://bucket/key?endpoint=https://minio.local&scheme=http", { @@ -77,7 +81,8 @@ INSTANTIATE_TEST_SUITE_P( .authority = ParsedURL::Authority{.host = "minio.local"}, }, }, - "with_absolute_endpoint_uri"}), + "with_absolute_endpoint_uri", + }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); TEST(InvalidParsedS3URLTest, parseS3URLErrors) @@ -91,6 +96,101 @@ TEST(InvalidParsedS3URLTest, parseS3URLErrors) ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3://127.0.0.1")); }, invalidBucketMatcher); } +// Parameterized test for s3ToHttpsUrl conversion +struct S3ToHttpsConversionTestCase +{ + ParsedS3URL input; + ParsedURL expected; + std::string description; +}; + +class S3ToHttpsConversionTest : public ::testing::WithParamInterface, + public ::testing::Test +{}; + +TEST_P(S3ToHttpsConversionTest, ConvertsCorrectly) +{ + const auto & testCase = GetParam(); + auto result = testCase.input.toHttpsUrl(); + EXPECT_EQ(result, testCase.expected) << "Failed for: " << testCase.description; +} + +INSTANTIATE_TEST_SUITE_P( + S3ToHttpsConversion, + S3ToHttpsConversionTest, + ::testing::Values( + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "my-bucket", + .key = "my-key.txt", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.us-east-1.amazonaws.com"}, + .path = "/my-bucket/my-key.txt", + }, + "basic_s3_default_region", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "prod-cache", + .key = "nix/store/abc123.nar.xz", + .region = "eu-west-1", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.eu-west-1.amazonaws.com"}, + .path = "/prod-cache/nix/store/abc123.nar.xz", + }, + "with_eu_west_1_region", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "bucket", + .key = "key", + .scheme = "http", + .endpoint = ParsedURL::Authority{.host = "custom.s3.com"}, + }, + ParsedURL{ + .scheme = "http", + .authority = ParsedURL::Authority{.host = "custom.s3.com"}, + .path = "/bucket/key", + }, + "custom_endpoint_authority", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "bucket", + .key = "key", + .endpoint = + ParsedURL{ + .scheme = "http", + .authority = ParsedURL::Authority{.host = "server", .port = 9000}, + }, + }, + ParsedURL{ + .scheme = "http", + .authority = ParsedURL::Authority{.host = "server", .port = 9000}, + .path = "/bucket/key", + }, + "custom_endpoint_with_port", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "bucket", + .key = "path/to/file.txt", + .region = "ap-southeast-2", + .scheme = "https", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.ap-southeast-2.amazonaws.com"}, + .path = "/bucket/path/to/file.txt", + }, + "complex_path_and_region", + }), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + } // namespace nix #endif diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index 3f38ef62f..ec0cddf68 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -75,6 +75,12 @@ struct ParsedS3URL } static ParsedS3URL parse(const ParsedURL & uri); + + /** + * Convert this ParsedS3URL to HTTPS ParsedURL for use with curl's AWS SigV4 authentication + */ + ParsedURL toHttpsUrl() const; + auto operator<=>(const ParsedS3URL & other) const = default; }; diff --git a/src/libstore/s3.cc b/src/libstore/s3.cc index f605b45c1..e58006f03 100644 --- a/src/libstore/s3.cc +++ b/src/libstore/s3.cc @@ -1,6 +1,8 @@ #include "nix/store/s3.hh" #include "nix/util/split.hh" #include "nix/util/url.hh" +#include "nix/util/util.hh" +#include "nix/util/canon-path.hh" namespace nix { @@ -64,6 +66,42 @@ try { throw; } +ParsedURL ParsedS3URL::toHttpsUrl() const +{ + std::string regionStr = region.value_or("us-east-1"); + std::string schemeStr = scheme.value_or("https"); + + // Handle endpoint configuration using std::visit + return std::visit( + overloaded{ + [&](const std::monostate &) { + // No custom endpoint, use standard AWS S3 endpoint + return ParsedURL{ + .scheme = schemeStr, + .authority = ParsedURL::Authority{.host = "s3." + regionStr + ".amazonaws.com"}, + .path = (CanonPath::root / bucket / CanonPath(key)).abs(), + }; + }, + [&](const ParsedURL::Authority & auth) { + // Endpoint is just an authority (hostname/port) + return ParsedURL{ + .scheme = schemeStr, + .authority = auth, + .path = (CanonPath::root / bucket / CanonPath(key)).abs(), + }; + }, + [&](const ParsedURL & endpointUrl) { + // Endpoint is already a ParsedURL (e.g., http://server:9000) + return ParsedURL{ + .scheme = endpointUrl.scheme, + .authority = endpointUrl.authority, + .path = (CanonPath(endpointUrl.path) / bucket / CanonPath(key)).abs(), + }; + }, + }, + endpoint); +} + #endif } // namespace nix From f0e4af436552b18d3ad61375510917cfdf2db1b0 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 25 Aug 2025 22:09:18 +0300 Subject: [PATCH 254/382] libexpr: Fix weird formatting after treewide reformat --- src/libexpr/include/nix/expr/nixexpr.hh | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 49bd7a3b6..3c3c5e6f9 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -595,12 +595,17 @@ struct ExprOpNot : Expr { \ return pos; \ } \ - }; + } -MakeBinOp(ExprOpEq, "==") MakeBinOp(ExprOpNEq, "!=") MakeBinOp(ExprOpAnd, "&&") MakeBinOp(ExprOpOr, "||") - MakeBinOp(ExprOpImpl, "->") MakeBinOp(ExprOpUpdate, "//") MakeBinOp(ExprOpConcatLists, "++") +MakeBinOp(ExprOpEq, "=="); +MakeBinOp(ExprOpNEq, "!="); +MakeBinOp(ExprOpAnd, "&&"); +MakeBinOp(ExprOpOr, "||"); +MakeBinOp(ExprOpImpl, "->"); +MakeBinOp(ExprOpUpdate, "//"); +MakeBinOp(ExprOpConcatLists, "++"); - struct ExprConcatStrings : Expr +struct ExprConcatStrings : Expr { PosIdx pos; bool forceString; From e4e8a615fada3f9a098c3dac09ba392d8ef7353d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 25 Aug 2025 15:31:40 -0400 Subject: [PATCH 255/382] `ParsedS3URL::toHttpsUrl` Slight optimize I didn't want to block that PR on further code review while I figured out these new (to us) C++23 goodies. --- src/libstore/s3.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libstore/s3.cc b/src/libstore/s3.cc index e58006f03..739de2532 100644 --- a/src/libstore/s3.cc +++ b/src/libstore/s3.cc @@ -68,8 +68,10 @@ try { ParsedURL ParsedS3URL::toHttpsUrl() const { - std::string regionStr = region.value_or("us-east-1"); - std::string schemeStr = scheme.value_or("https"); + auto toView = [](const auto & x) { return std::string_view{x}; }; + + auto regionStr = region.transform(toView).value_or("us-east-1"); + auto schemeStr = scheme.transform(toView).value_or("https"); // Handle endpoint configuration using std::visit return std::visit( @@ -77,7 +79,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const [&](const std::monostate &) { // No custom endpoint, use standard AWS S3 endpoint return ParsedURL{ - .scheme = schemeStr, + .scheme = std::string{schemeStr}, .authority = ParsedURL::Authority{.host = "s3." + regionStr + ".amazonaws.com"}, .path = (CanonPath::root / bucket / CanonPath(key)).abs(), }; @@ -85,7 +87,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const [&](const ParsedURL::Authority & auth) { // Endpoint is just an authority (hostname/port) return ParsedURL{ - .scheme = schemeStr, + .scheme = std::string{schemeStr}, .authority = auth, .path = (CanonPath::root / bucket / CanonPath(key)).abs(), }; From e492c64c8e2d905dd97dc9e9870f0eb18f4a8313 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 22 Jul 2025 12:18:52 +0200 Subject: [PATCH 256/382] SQLite: fsync db.sqlite-shm before opening the database This is a workaround for https://github.com/NixOS/nix/issues/13515 (opening the SQLite DB randomly taking a couple of seconds on ZFS). (cherry picked from commit a7fceb5eec404eabf461d4f1281bf4163c5d8ad0) --- src/libstore/sqlite.cc | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 56a69470a..5f0b3ce51 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -4,6 +4,10 @@ #include "nix/util/url.hh" #include "nix/util/signals.hh" +#ifdef __linux__ +# include +#endif + #include #include @@ -60,6 +64,28 @@ static void traceSQL(void * x, const char * sql) SQLite::SQLite(const std::filesystem::path & path, SQLiteOpenMode mode) { + // Work around a ZFS issue where SQLite's truncate() call on + // db.sqlite-shm can randomly take up to a few seconds. See + // https://github.com/openzfs/zfs/issues/14290#issuecomment-3074672917. + // Remove this workaround when a fix is widely installed, perhaps 2027? Candidate: + // https://github.com/search?q=repo%3Aopenzfs%2Fzfs+%22Linux%3A+zfs_putpage%3A+complete+async+page+writeback+immediately%22&type=commits +#ifdef __linux__ + try { + auto shmFile = path; + shmFile += "-shm"; + AutoCloseFD fd = open(shmFile.string().c_str(), O_RDWR | O_CLOEXEC); + if (fd) { + struct statfs fs; + if (fstatfs(fd.get(), &fs)) + throw SysError("statfs() on '%s'", shmFile); + if (fs.f_type == /* ZFS_SUPER_MAGIC */ 801189825 && fdatasync(fd.get()) != 0) + throw SysError("fsync() on '%s'", shmFile); + } + } catch (...) { + throw; + } +#endif + // useSQLiteWAL also indicates what virtual file system we need. Using // `unix-dotfile` is needed on NFS file systems and on Windows' Subsystem // for Linux (WSL) where useSQLiteWAL should be false by default. From 0250d50df3d40159f4495d04cd1c03fc14ed890e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Aug 2025 19:11:28 -0400 Subject: [PATCH 257/382] Move `runPostBuildHook` out of `DerivationBuilder` It is suppposed to be "post build" not "during the build" after all. Its location now matches that for the hook case (see elsewhere in `DerivationdBuildingGoal`). It was in a try-catch before, and now it isn't, but I believe that it is impossible for it to throw `BuildError`, which is sufficient for this code motion to be correct. --- src/libstore/build/derivation-building-goal.cc | 10 +++++++++- .../nix/store/build/derivation-building-misc.hh | 3 --- .../include/nix/store/build/derivation-goal.hh | 3 --- src/libstore/unix/build/derivation-builder.cc | 5 ----- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index a82f7f928..6e2fa445c 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -148,6 +148,9 @@ std::string showKnownOutputs(const StoreDirConfig & store, const Derivation & dr return msg; } +static void runPostBuildHook( + const StoreDirConfig & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); + /* At least one of the output paths could not be produced using a substitute. So we have to build instead. */ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() @@ -810,6 +813,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() outputLocks.unlock(); co_return done(std::move(ste->first), {}, std::move(ste->second)); } else if (auto * builtOutputs = std::get_if<1>(&res)) { + StorePathSet outputPaths; + for (auto & [_, output] : *builtOutputs) + outputPaths.insert(output.outPath); + runPostBuildHook(worker.store, *logger, drvPath, outputPaths); + /* It is now safe to delete the lock files, since all future lockers will see that the output paths are valid; they will not create new lock files with the same names as the old @@ -823,7 +831,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() #endif } -void runPostBuildHook( +static void runPostBuildHook( const StoreDirConfig & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths) { auto hook = settings.postBuildHook; diff --git a/src/libstore/include/nix/store/build/derivation-building-misc.hh b/src/libstore/include/nix/store/build/derivation-building-misc.hh index f9e965104..2b68fa178 100644 --- a/src/libstore/include/nix/store/build/derivation-building-misc.hh +++ b/src/libstore/include/nix/store/build/derivation-building-misc.hh @@ -49,9 +49,6 @@ struct InitialOutput std::optional known; }; -void runPostBuildHook( - const StoreDirConfig & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); - /** * Format the known outputs of a derivation for use in error messages. */ diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index 589c3fd58..d9042d136 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -14,9 +14,6 @@ namespace nix { using std::map; -/** Used internally */ -void runPostBuildHook(Store & store, Logger & logger, const StorePath & drvPath, const StorePathSet & outputPaths); - /** * A goal for realising a single output of a derivation. Various sorts of * fetching (which will be done by other goal types) is tried, and if none of diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 15c99e3c0..51b44719d 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -506,11 +506,6 @@ std::variant, SingleDrvOutputs> Derivation being valid. */ auto builtOutputs = registerOutputs(); - StorePathSet outputPaths; - for (auto & [_, output] : builtOutputs) - outputPaths.insert(output.outPath); - runPostBuildHook(store, *logger, drvPath, outputPaths); - /* Delete unused redirected outputs (when doing hash rewriting). */ for (auto & i : redirectedOutputs) deletePath(store.Store::toRealPath(i.second)); From afade27123191fdb58a8cbf588610624fbc9082a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 26 Aug 2025 00:50:12 +0200 Subject: [PATCH 258/382] Update work meeting time in README --- maintainers/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maintainers/README.md b/maintainers/README.md index 6553cd048..722a920b7 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -46,7 +46,7 @@ The team meets twice a week (times are denoted in the [Europe/Amsterdam](https:/ - mark it as draft if it is blocked on the contributor - escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again. -- Work meeting: Mondays 14:00-16:00 Europe/Amsterdam see [calendar](https://calendar.google.com/calendar/u/0/embed?src=b9o52fobqjak8oq8lfkhg3t0qg@group.calendar.google.com). +- Work meeting: Mondays 18:00-20:00 Europe/Amsterdam; see [calendar](https://calendar.google.com/calendar/u/0/embed?src=b9o52fobqjak8oq8lfkhg3t0qg@group.calendar.google.com). 1. Code review on pull requests from [In review](#in-review). 2. Other chores and tasks. From 7989e3192d24b60b74ec322b8221e7208165ca68 Mon Sep 17 00:00:00 2001 From: Leandro Reina Date: Tue, 26 Aug 2025 17:41:27 +0200 Subject: [PATCH 259/382] Handle empty ports --- src/libutil-tests/url.cc | 17 +++++++++++++++++ src/libutil/url.cc | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index b248421b3..ae383eb65 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -273,6 +273,23 @@ TEST(parseURL, emptyStringIsInvalidURL) ASSERT_THROW(parseURL(""), Error); } +TEST(parseURL, parsesHttpUrlWithEmptyPort) +{ + auto s = "http://www.example.org:/file.tar.gz?foo=bar"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/file.tar.gz", + .query = (StringMap) {{"foo", "bar"}}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); + ASSERT_EQ("http://www.example.org/file.tar.gz?foo=bar", parsed.to_string()); +} + /* ---------------------------------------------------------------------------- * decodeQuery * --------------------------------------------------------------------------*/ diff --git a/src/libutil/url.cc b/src/libutil/url.cc index b7f1eff30..73e8cc181 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -33,7 +33,7 @@ ParsedURL::Authority ParsedURL::Authority::parse(std::string_view encodedAuthori }(); auto port = [&]() -> std::optional { - if (!parsed->has_port()) + if (!parsed->has_port() || parsed->port() == "") return std::nullopt; /* If the port number is non-zero and representable. */ if (auto portNumber = parsed->port_number()) From cc4aa70e6e652724256039a1e5e7940d7e1e7564 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 26 Aug 2025 14:37:13 -0400 Subject: [PATCH 260/382] Better `stringSplit` I need this for some `ParseURL` improvements, but I figure this is better to send as its own PR. I changed the tests willy-nilly to sometimes use `std::list` instead of `Strings` (which is `std::list`). Co-Authored-By: Sergei Zimmerman --- src/libutil-tests/strings.cc | 94 ++++++++++--------- .../include/nix/util/strings-inline.hh | 17 +++- 2 files changed, 66 insertions(+), 45 deletions(-) diff --git a/src/libutil-tests/strings.cc b/src/libutil-tests/strings.cc index bf1f66025..bd740ce0c 100644 --- a/src/libutil-tests/strings.cc +++ b/src/libutil-tests/strings.cc @@ -2,6 +2,7 @@ #include #include "nix/util/strings.hh" +#include "nix/util/strings-inline.hh" #include "nix/util/error.hh" namespace nix { @@ -271,113 +272,122 @@ TEST(tokenizeString, tokenizeSepEmpty) * splitString * --------------------------------------------------------------------------*/ -TEST(splitString, empty) -{ - Strings expected = {""}; +using SplitStringTestContainerTypes = ::testing:: + Types, std::vector, std::list, std::list>; - ASSERT_EQ(splitString("", " \t\n\r"), expected); +template +class splitStringTest : public ::testing::Test +{}; + +TYPED_TEST_SUITE(splitStringTest, SplitStringTestContainerTypes); + +TYPED_TEST(splitStringTest, empty) +{ + TypeParam expected = {""}; + + EXPECT_EQ(splitString("", " \t\n\r"), expected); } -TEST(splitString, oneSep) +TYPED_TEST(splitStringTest, oneSep) { - Strings expected = {"", ""}; + TypeParam expected = {"", ""}; - ASSERT_EQ(splitString(" ", " \t\n\r"), expected); + EXPECT_EQ(splitString(" ", " \t\n\r"), expected); } -TEST(splitString, twoSep) +TYPED_TEST(splitStringTest, twoSep) { - Strings expected = {"", "", ""}; + TypeParam expected = {"", "", ""}; - ASSERT_EQ(splitString(" \n", " \t\n\r"), expected); + EXPECT_EQ(splitString(" \n", " \t\n\r"), expected); } -TEST(splitString, tokenizeSpacesWithSpaces) +TYPED_TEST(splitStringTest, tokenizeSpacesWithSpaces) { auto s = "foo bar baz"; - Strings expected = {"foo", "bar", "baz"}; + TypeParam expected = {"foo", "bar", "baz"}; - ASSERT_EQ(splitString(s, " \t\n\r"), expected); + EXPECT_EQ(splitString(s, " \t\n\r"), expected); } -TEST(splitString, tokenizeTabsWithDefaults) +TYPED_TEST(splitStringTest, tokenizeTabsWithDefaults) { auto s = "foo\tbar\tbaz"; // Using it like this is weird, but shows the difference with tokenizeString, which also has this test - Strings expected = {"foo", "bar", "baz"}; + TypeParam expected = {"foo", "bar", "baz"}; - ASSERT_EQ(splitString(s, " \t\n\r"), expected); + EXPECT_EQ(splitString(s, " \t\n\r"), expected); } -TEST(splitString, tokenizeTabsSpacesWithDefaults) +TYPED_TEST(splitStringTest, tokenizeTabsSpacesWithDefaults) { auto s = "foo\t bar\t baz"; // Using it like this is weird, but shows the difference with tokenizeString, which also has this test - Strings expected = {"foo", "", "bar", "", "baz"}; + TypeParam expected = {"foo", "", "bar", "", "baz"}; - ASSERT_EQ(splitString(s, " \t\n\r"), expected); + EXPECT_EQ(splitString(s, " \t\n\r"), expected); } -TEST(splitString, tokenizeTabsSpacesNewlineWithDefaults) +TYPED_TEST(splitStringTest, tokenizeTabsSpacesNewlineWithDefaults) { auto s = "foo\t\n bar\t\n baz"; // Using it like this is weird, but shows the difference with tokenizeString, which also has this test - Strings expected = {"foo", "", "", "bar", "", "", "baz"}; + TypeParam expected = {"foo", "", "", "bar", "", "", "baz"}; - ASSERT_EQ(splitString(s, " \t\n\r"), expected); + EXPECT_EQ(splitString(s, " \t\n\r"), expected); } -TEST(splitString, tokenizeTabsSpacesNewlineRetWithDefaults) +TYPED_TEST(splitStringTest, tokenizeTabsSpacesNewlineRetWithDefaults) { auto s = "foo\t\n\r bar\t\n\r baz"; // Using it like this is weird, but shows the difference with tokenizeString, which also has this test - Strings expected = {"foo", "", "", "", "bar", "", "", "", "baz"}; + TypeParam expected = {"foo", "", "", "", "bar", "", "", "", "baz"}; - ASSERT_EQ(splitString(s, " \t\n\r"), expected); + EXPECT_EQ(splitString(s, " \t\n\r"), expected); auto s2 = "foo \t\n\r bar \t\n\r baz"; - Strings expected2 = {"foo", "", "", "", "", "bar", "", "", "", "", "baz"}; + TypeParam expected2 = {"foo", "", "", "", "", "bar", "", "", "", "", "baz"}; - ASSERT_EQ(splitString(s2, " \t\n\r"), expected2); + EXPECT_EQ(splitString(s2, " \t\n\r"), expected2); } -TEST(splitString, tokenizeWithCustomSep) +TYPED_TEST(splitStringTest, tokenizeWithCustomSep) { auto s = "foo\n,bar\n,baz\n"; - Strings expected = {"foo\n", "bar\n", "baz\n"}; + TypeParam expected = {"foo\n", "bar\n", "baz\n"}; - ASSERT_EQ(splitString(s, ","), expected); + EXPECT_EQ(splitString(s, ","), expected); } -TEST(splitString, tokenizeSepAtStart) +TYPED_TEST(splitStringTest, tokenizeSepAtStart) { auto s = ",foo,bar,baz"; - Strings expected = {"", "foo", "bar", "baz"}; + TypeParam expected = {"", "foo", "bar", "baz"}; - ASSERT_EQ(splitString(s, ","), expected); + EXPECT_EQ(splitString(s, ","), expected); } -TEST(splitString, tokenizeSepAtEnd) +TYPED_TEST(splitStringTest, tokenizeSepAtEnd) { auto s = "foo,bar,baz,"; - Strings expected = {"foo", "bar", "baz", ""}; + TypeParam expected = {"foo", "bar", "baz", ""}; - ASSERT_EQ(splitString(s, ","), expected); + EXPECT_EQ(splitString(s, ","), expected); } -TEST(splitString, tokenizeSepEmpty) +TYPED_TEST(splitStringTest, tokenizeSepEmpty) { auto s = "foo,,baz"; - Strings expected = {"foo", "", "baz"}; + TypeParam expected = {"foo", "", "baz"}; - ASSERT_EQ(splitString(s, ","), expected); + EXPECT_EQ(splitString(s, ","), expected); } // concatStringsSep sep . splitString sep = id if sep is 1 char -RC_GTEST_PROP(splitString, recoveredByConcatStringsSep, (const std::string & s)) +RC_GTEST_TYPED_FIXTURE_PROP(splitStringTest, recoveredByConcatStringsSep, (const std::string & s)) { - RC_ASSERT(concatStringsSep("/", splitString(s, "/")) == s); - RC_ASSERT(concatStringsSep("a", splitString(s, "a")) == s); + RC_ASSERT(concatStringsSep("/", splitString(s, "/")) == s); + RC_ASSERT(concatStringsSep("a", splitString(s, "a")) == s); } /* ---------------------------------------------------------------------------- diff --git a/src/libutil/include/nix/util/strings-inline.hh b/src/libutil/include/nix/util/strings-inline.hh index d99b686fc..61bddfeda 100644 --- a/src/libutil/include/nix/util/strings-inline.hh +++ b/src/libutil/include/nix/util/strings-inline.hh @@ -26,18 +26,29 @@ C tokenizeString(std::string_view s, std::string_view separators) } template -C basicSplitString(std::basic_string_view s, std::basic_string_view separators) +void basicSplitStringInto(C & accum, std::basic_string_view s, std::basic_string_view separators) { - C result; size_t pos = 0; while (pos <= s.size()) { auto end = s.find_first_of(separators, pos); if (end == s.npos) end = s.size(); - result.insert(result.end(), std::basic_string(s, pos, end - pos)); + accum.insert(accum.end(), typename C::value_type{s.substr(pos, end - pos)}); pos = end + 1; } +} +template +void splitStringInto(C & accum, std::string_view s, std::string_view separators) +{ + basicSplitStringInto(accum, s, separators); +} + +template +C basicSplitString(std::basic_string_view s, std::basic_string_view separators) +{ + C result; + basicSplitStringInto(result, s, separators); return result; } From 625477a7df3850fa98073db6190c4785784d08c2 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 27 Aug 2025 01:14:44 +0300 Subject: [PATCH 261/382] flake: Update nixpkgs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/cd32a774ac52caaa03bcfc9e7591ac8c18617ced?narHash=sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI%3D' (2025-08-17) → 'github:NixOS/nixpkgs/d98ce345cdab58477ca61855540999c86577d19d?narHash=sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8%3D' (2025-08-26) This update contains d1266642a8722f2a05e311fa151c1413d2b9653c, which is necessary for the TOML timestamps to get tested via nixpkgsLibTests job. --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 073e371f5..cc2b2f27e 100644 --- a/flake.lock +++ b/flake.lock @@ -63,11 +63,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1755442223, - "narHash": "sha256-VtMQg02B3kt1oejwwrGn50U9Xbjgzfbb5TV5Wtx8dKI=", + "lastModified": 1756178832, + "narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "cd32a774ac52caaa03bcfc9e7591ac8c18617ced", + "rev": "d98ce345cdab58477ca61855540999c86577d19d", "type": "github" }, "original": { From e82210b3b20b5193d90475bd59319c3196e7a407 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 24 Aug 2025 14:30:53 -0400 Subject: [PATCH 262/382] Implement `parseURLRelative`, use in `HttpBinaryCacheStore` This allows us to replace some very hacky and not correct string concatentation in `HttpBinaryCacheStore`. It will especially be useful with #13752, when today's hacks started to cause problems in practice, not just theory. Also make `fixGitURL` returned a `ParsedURL`. --- src/libexpr/primops/fetchTree.cc | 4 +- src/libfetchers/git-lfs-fetch.cc | 2 +- src/libfetchers/git.cc | 4 +- src/libstore/http-binary-cache-store.cc | 21 +-- src/libutil-tests/url.cc | 188 ++++++++++++++++++++++++ src/libutil/include/nix/util/url.hh | 12 +- src/libutil/url.cc | 77 ++++++++-- 7 files changed, 278 insertions(+), 30 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 274f758a7..d58d76d75 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -113,7 +113,7 @@ static void fetchTree( auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned(); attrs.emplace( state.symbols[attr.name], - params.isFetchGit && state.symbols[attr.name] == "url" ? fixGitURL(s) : s); + params.isFetchGit && state.symbols[attr.name] == "url" ? fixGitURL(s).to_string() : s); } else if (attr.value->type() == nBool) attrs.emplace(state.symbols[attr.name], Explicit{attr.value->boolean()}); else if (attr.value->type() == nInt) { @@ -175,7 +175,7 @@ static void fetchTree( if (params.isFetchGit) { fetchers::Attrs attrs; attrs.emplace("type", "git"); - attrs.emplace("url", fixGitURL(url)); + attrs.emplace("url", fixGitURL(url).to_string()); if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index f555a9a4c..bd9752711 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -179,7 +179,7 @@ Fetch::Fetch(git_repository * repo, git_oid rev) const auto remoteUrl = lfs::getLfsEndpointUrl(repo); - this->url = nix::parseURL(nix::fixGitURL(remoteUrl)).canonicalise(); + this->url = nix::fixGitURL(remoteUrl).canonicalise(); } bool Fetch::shouldFetch(const CanonPath & path) const diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index bd1e1fffe..c19e8d7db 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -233,9 +233,7 @@ struct GitInputScheme : InputScheme Input input{settings}; input.attrs = attrs; - auto url = fixGitURL(getStrAttr(attrs, "url")); - parseURL(url); - input.attrs["url"] = url; + input.attrs["url"] = fixGitURL(getStrAttr(attrs, "url")).to_string(); getShallowAttr(input); getSubmodulesAttr(input); getAllRefsAttr(input); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 940dcec2e..ab799617e 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -154,22 +154,17 @@ protected: FileTransferRequest makeRequest(const std::string & path) { - /* FIXME path is not a path, but a full relative or absolute + /* Otherwise the last path fragment will get discarded. */ + auto cacheUriWithTrailingSlash = config->cacheUri; + if (!cacheUriWithTrailingSlash.path.empty()) + cacheUriWithTrailingSlash.path += "/"; + + /* path is not a path, but a full relative or absolute URL, e.g. we've seen in the wild NARINFO files have a URL field which is `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` - (note the query param) and that gets passed here. - - What should actually happen is that we have two parsed URLs - (if we support relative URLs), and then we combined them with - a URL `operator/` which would be like - `std::filesystem::path`'s equivalent operator, which properly - combines the the URLs, whether the right is relative or - absolute. */ - return FileTransferRequest(parseURL( - hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://") - ? path - : config->cacheUri.to_string() + "/" + path)); + (note the query param) and that gets passed here. */ + return FileTransferRequest(parseURLRelative(path, cacheUriWithTrailingSlash)); } void getFile(const std::string & path, Sink & sink) override diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index ae383eb65..b776ba671 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -290,6 +290,194 @@ TEST(parseURL, parsesHttpUrlWithEmptyPort) ASSERT_EQ("http://www.example.org/file.tar.gz?foo=bar", parsed.to_string()); } +/* ---------------------------------------------------------------------------- + * parseURLRelative + * --------------------------------------------------------------------------*/ + +TEST(parseURLRelative, resolvesRelativePath) +{ + ParsedURL base = parseURL("http://example.org/dir/page.html"); + auto parsed = parseURLRelative("subdir/file.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, + .path = "/dir/subdir/file.txt", + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, baseUrlIpv6AddressWithoutZoneId) +{ + ParsedURL base = parseURL("http://[fe80::818c:da4d:8975:415c]/dir/page.html"); + auto parsed = parseURLRelative("subdir/file.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c"}, + .path = "/dir/subdir/file.txt", + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, resolvesRelativePathIpv6AddressWithZoneId) +{ + ParsedURL base = parseURL("http://[fe80::818c:da4d:8975:415c\%25enp0s25]:8080/dir/page.html"); + auto parsed = parseURLRelative("subdir/file2.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, + .path = "/dir/subdir/file2.txt", + .query = {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, resolvesRelativePathWithDot) +{ + ParsedURL base = parseURL("http://example.org/dir/page.html"); + auto parsed = parseURLRelative("./subdir/file.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, + .path = "/dir/subdir/file.txt", + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, resolvesParentDirectory) +{ + ParsedURL base = parseURL("http://example.org:234/dir/page.html"); + auto parsed = parseURLRelative("../up.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org", .port = 234}, + .path = "/up.txt", + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, replacesPathWithAbsoluteRelative) +{ + ParsedURL base = parseURL("http://example.org/dir/page.html"); + auto parsed = parseURLRelative("/rooted.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, + .path = "/rooted.txt", + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, keepsQueryAndFragmentFromRelative) +{ + // But discard query params on base URL + ParsedURL base = parseURL("https://www.example.org/path/index.html?z=3"); + auto parsed = parseURLRelative("other.html?x=1&y=2#frag", base); + ParsedURL expected{ + .scheme = "https", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/path/other.html", + .query = {{"x", "1"}, {"y", "2"}}, + .fragment = "frag", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, absOverride) +{ + ParsedURL base = parseURL("http://example.org/path/page.html"); + std::string_view abs = "https://127.0.0.1.org/secure"; + auto parsed = parseURLRelative(abs, base); + auto parsedAbs = parseURL(abs); + ASSERT_EQ(parsed, parsedAbs); +} + +TEST(parseURLRelative, absOverrideWithZoneId) +{ + ParsedURL base = parseURL("http://example.org/path/page.html"); + std::string_view abs = "https://[fe80::818c:da4d:8975:415c\%25enp0s25]/secure?foo=bar"; + auto parsed = parseURLRelative(abs, base); + auto parsedAbs = parseURL(abs); + ASSERT_EQ(parsed, parsedAbs); +} + +TEST(parseURLRelative, bothWithoutAuthority) +{ + ParsedURL base = parseURL("mailto:mail-base@bar.baz?bcc=alice@asdf.com"); + std::string_view over = "mailto:mail-override@foo.bar?subject=url-testing"; + auto parsed = parseURLRelative(over, base); + auto parsedOverride = parseURL(over); + ASSERT_EQ(parsed, parsedOverride); +} + +TEST(parseURLRelative, emptyRelative) +{ + ParsedURL base = parseURL("https://www.example.org/path/index.html?a\%20b=5\%206&x\%20y=34#frag"); + auto parsed = parseURLRelative("", base); + ParsedURL expected{ + .scheme = "https", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/path/index.html", + .query = {{"a b", "5 6"}, {"x y", "34"}}, + .fragment = "", + }; + EXPECT_EQ(base.fragment, "frag"); + EXPECT_EQ(parsed, expected); +} + +TEST(parseURLRelative, fragmentRelative) +{ + ParsedURL base = parseURL("https://www.example.org/path/index.html?a\%20b=5\%206&x\%20y=34#frag"); + auto parsed = parseURLRelative("#frag2", base); + ParsedURL expected{ + .scheme = "https", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/path/index.html", + .query = {{"a b", "5 6"}, {"x y", "34"}}, + .fragment = "frag2", + }; + EXPECT_EQ(parsed, expected); +} + +TEST(parseURLRelative, queryRelative) +{ + ParsedURL base = parseURL("https://www.example.org/path/index.html?a\%20b=5\%206&x\%20y=34#frag"); + auto parsed = parseURLRelative("?asdf\%20qwer=1\%202\%203", base); + ParsedURL expected{ + .scheme = "https", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/path/index.html", + .query = {{"asdf qwer", "1 2 3"}}, + .fragment = "", + }; + EXPECT_EQ(parsed, expected); +} + +TEST(parseURLRelative, queryFragmentRelative) +{ + ParsedURL base = parseURL("https://www.example.org/path/index.html?a\%20b=5\%206&x\%20y=34#frag"); + auto parsed = parseURLRelative("?asdf\%20qwer=1\%202\%203#frag2", base); + ParsedURL expected{ + .scheme = "https", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = "/path/index.html", + .query = {{"asdf qwer", "1 2 3"}}, + .fragment = "frag2", + }; + EXPECT_EQ(parsed, expected); +} + /* ---------------------------------------------------------------------------- * decodeQuery * --------------------------------------------------------------------------*/ diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 3262b44b7..54bd1e533 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -117,6 +117,16 @@ std::string encodeQuery(const StringMap & query); */ ParsedURL parseURL(std::string_view url, bool lenient = false); +/** + * Like `parseURL`, but also accepts relative URLs, which are resolved + * against the given base URL. + * + * This is specified in [IETF RFC 3986, section 5](https://datatracker.ietf.org/doc/html/rfc3986#section-5) + * + * Behavior should also match the `new URL(url, base)` JavaScript constructor. + */ +ParsedURL parseURLRelative(std::string_view url, const ParsedURL & base); + /** * Although that’s not really standardized anywhere, an number of tools * use a scheme of the form 'x+y' in urls, where y is the “transport layer” @@ -136,7 +146,7 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme); /* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes them by removing the `:` and assuming a scheme of `ssh://`. Also changes absolute paths into file:// URLs. */ -std::string fixGitURL(const std::string & url); +ParsedURL fixGitURL(const std::string & url); /** * Whether a string is valid as RFC 3986 scheme name. diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 73e8cc181..ff0b7a71b 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -108,6 +108,8 @@ static std::string percentEncodeCharSet(std::string_view s, auto charSet) return res; } +static ParsedURL fromBoostUrlView(boost::urls::url_view url, bool lenient); + ParsedURL parseURL(std::string_view url, bool lenient) try { /* Account for several non-standard properties of nix urls (for back-compat): @@ -149,10 +151,15 @@ try { }(); } - auto urlView = boost::urls::url_view(lenient ? fixedEncodedUrl : url); + return fromBoostUrlView(boost::urls::url_view(lenient ? fixedEncodedUrl : url), lenient); +} catch (boost::system::system_error & e) { + throw BadURL("'%s' is not a valid URL: %s", url, e.code().message()); +} +static ParsedURL fromBoostUrlView(boost::urls::url_view urlView, bool lenient) +{ if (!urlView.has_scheme()) - throw BadURL("'%s' doesn't have a scheme", url); + throw BadURL("'%s' doesn't have a scheme", urlView.buffer()); auto scheme = urlView.scheme(); auto authority = [&]() -> std::optional { @@ -170,7 +177,7 @@ try { * scheme considers a missing authority or empty host invalid. */ auto transportIsFile = parseUrlScheme(scheme).transport == "file"; if (authority && authority->host.size() && transportIsFile) - throw BadURL("file:// URL '%s' has unexpected authority '%s'", url, *authority); + throw BadURL("file:// URL '%s' has unexpected authority '%s'", urlView.buffer(), *authority); auto path = urlView.path(); /* Does pct-decoding */ auto fragment = urlView.fragment(); /* Does pct-decoding */ @@ -189,8 +196,58 @@ try { .query = decodeQuery(query, lenient), .fragment = fragment, }; -} catch (boost::system::system_error & e) { - throw BadURL("'%s' is not a valid URL: %s", url, e.code().message()); +} + +ParsedURL parseURLRelative(std::string_view urlS, const ParsedURL & base) +try { + + boost::urls::url resolved; + + try { + resolved.set_scheme(base.scheme); + if (base.authority) { + auto & authority = *base.authority; + resolved.set_host_address(authority.host); + if (authority.user) + resolved.set_user(*authority.user); + if (authority.password) + resolved.set_password(*authority.password); + if (authority.port) + resolved.set_port_number(*authority.port); + } + resolved.set_path(base.path); + resolved.set_encoded_query(encodeQuery(base.query)); + resolved.set_fragment(base.fragment); + } catch (boost::system::system_error & e) { + throw BadURL("'%s' is not a valid URL: %s", base.to_string(), e.code().message()); + } + + boost::urls::url_view url; + try { + url = urlS; + resolved.resolve(url).value(); + } catch (boost::system::system_error & e) { + throw BadURL("'%s' is not a valid URL: %s", urlS, e.code().message()); + } + + auto ret = fromBoostUrlView(resolved, /*lenient=*/false); + + /* Hack: Boost `url_view` supports Zone IDs, but `url` does not. + Just manually take the authority from the original URL to work + around it. See https://github.com/boostorg/url/issues/919 for + details. */ + if (!url.has_authority()) { + ret.authority = base.authority; + } + + /* Hack, work around fragment of base URL improperly being preserved + https://github.com/boostorg/url/issues/920 */ + ret.fragment = url.has_fragment() ? std::string{url.fragment()} : ""; + + return ret; +} catch (BadURL & e) { + e.addTrace({}, "while resolving possibly-relative url '%s' against base URL '%s'", urlS, base); + throw; } std::string percentDecode(std::string_view in) @@ -287,17 +344,17 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) }; } -std::string fixGitURL(const std::string & url) +ParsedURL fixGitURL(const std::string & url) { std::regex scpRegex("([^/]*)@(.*):(.*)"); if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex)) - return std::regex_replace(url, scpRegex, "ssh://$1@$2/$3"); + return parseURL(std::regex_replace(url, scpRegex, "ssh://$1@$2/$3")); if (hasPrefix(url, "file:")) - return url; + return parseURL(url); if (url.find("://") == std::string::npos) { - return (ParsedURL{.scheme = "file", .authority = ParsedURL::Authority{}, .path = url}).to_string(); + return (ParsedURL{.scheme = "file", .authority = ParsedURL::Authority{}, .path = url}); } - return url; + return parseURL(url); } // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 From 725a2f379fcd76ff1137132fee48dffba9c0c396 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 27 Aug 2025 09:29:47 +0200 Subject: [PATCH 263/382] don't include derivation name in temporary build directories With the migration to /nix/var/nix/builds we now have failing builds when the derivation name is too long. This change removes the derivation name from the temporary build to have a predictable prefix length: Also see: https://github.com/NixOS/infra/pull/764 for context. --- doc/manual/rl-next/shorter-build-dir-names.md | 6 ++++++ src/libstore/unix/build/derivation-builder.cc | 2 +- tests/functional/check.sh | 6 +++--- tests/nixos/user-sandboxing/default.nix | 8 ++++---- 4 files changed, 14 insertions(+), 8 deletions(-) create mode 100644 doc/manual/rl-next/shorter-build-dir-names.md diff --git a/doc/manual/rl-next/shorter-build-dir-names.md b/doc/manual/rl-next/shorter-build-dir-names.md new file mode 100644 index 000000000..e87fa5d04 --- /dev/null +++ b/doc/manual/rl-next/shorter-build-dir-names.md @@ -0,0 +1,6 @@ +--- +synopsis: "Temporary build directories no longer include derivation names" +prs: [13839] +--- + +Temporary build directories created during derivation builds no longer include the derivation name in their path to avoid build failures when the derivation name is too long. This change ensures predictable prefix lengths for build directories under `/nix/var/nix/builds`. \ No newline at end of file diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 15c99e3c0..f94bb40cc 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -706,7 +706,7 @@ void DerivationBuilderImpl::startBuilder() /* Create a temporary directory where the build will take place. */ - topTmpDir = createTempDir(buildDir, "nix-build-" + std::string(drvPath.name()), 0700); + topTmpDir = createTempDir(buildDir, "nix", 0700); setBuildTmpDir(); assert(!tmpDir.empty()); diff --git a/tests/functional/check.sh b/tests/functional/check.sh index a1c6decf5..260506138 100755 --- a/tests/functional/check.sh +++ b/tests/functional/check.sh @@ -52,10 +52,10 @@ test_custom_build_dir() { nix-build check.nix -A failed --argstr checkBuildId "$checkBuildId" \ --no-out-link --keep-failed --option build-dir "$TEST_ROOT/custom-build-dir" 2> "$TEST_ROOT/log" || status=$? [ "$status" = "100" ] - [[ 1 == "$(count "$customBuildDir/nix-build-"*)" ]] - local buildDir=("$customBuildDir/nix-build-"*) + [[ 1 == "$(count "$customBuildDir/nix-"*)" ]] + local buildDir=("$customBuildDir/nix-"*) if [[ "${#buildDir[@]}" -ne 1 ]]; then - echo "expected one nix-build-* directory, got: ${buildDir[*]}" >&2 + echo "expected one nix-* directory, got: ${buildDir[*]}" >&2 exit 1 fi if [[ -e ${buildDir[*]}/build ]]; then diff --git a/tests/nixos/user-sandboxing/default.nix b/tests/nixos/user-sandboxing/default.nix index 3f6b575b0..d6899140a 100644 --- a/tests/nixos/user-sandboxing/default.nix +++ b/tests/nixos/user-sandboxing/default.nix @@ -104,8 +104,8 @@ in # Wait for the build to be ready # This is OK because it runs as root, so we can access everything - machine.wait_until_succeeds("stat /nix/var/nix/builds/nix-build-open-build-dir.drv-*/build/syncPoint") - dir = machine.succeed("ls -d /nix/var/nix/builds/nix-build-open-build-dir.drv-*").strip() + machine.wait_until_succeeds("stat /nix/var/nix/builds/nix-*/build/syncPoint") + dir = machine.succeed("ls -d /nix/var/nix/builds/nix-*").strip() # But Alice shouldn't be able to access the build directory machine.fail(f"su alice -c 'ls {dir}/build'") @@ -125,8 +125,8 @@ in args = [ (builtins.storePath "${create-hello-world}") ]; }' >&2 & """.strip()) - machine.wait_until_succeeds("stat /nix/var/nix/builds/nix-build-innocent.drv-*/build/syncPoint") - dir = machine.succeed("ls -d /nix/var/nix/builds/nix-build-innocent.drv-*").strip() + machine.wait_until_succeeds("stat /nix/var/nix/builds/nix-*/build/syncPoint") + dir = machine.succeed("ls -d /nix/var/nix/builds/nix-*").strip() # The build ran as `nixbld1` (which is the only build user on the # machine), but a process running as `nixbld1` outside the sandbox From f5f9e32f5498309e7862e48a394bf7a146dbce91 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 21 Aug 2025 13:15:19 -0400 Subject: [PATCH 264/382] No more `DerivationBuilderParams:` constructor! I am not sure how/why this started working. C++23? --- .../build/derivation-building-goal.cc | 20 +++++++------- .../nix/store/build/derivation-builder.hh | 26 ------------------- 2 files changed, 10 insertions(+), 36 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index a82f7f928..24a53c27b 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -758,16 +758,16 @@ Goal::Co DerivationBuildingGoal::tryToBuild() *localStoreP, std::make_unique(*this, builder), DerivationBuilderParams{ - drvPath, - buildMode, - buildResult, - *drv, - *drvOptions, - inputPaths, - initialOutputs, - std::move(defaultPathsInChroot), - std::move(finalEnv), - std::move(extraFiles), + .drvPath = drvPath, + .buildResult = buildResult, + .drv = *drv, + .drvOptions = *drvOptions, + .inputPaths = inputPaths, + .initialOutputs = initialOutputs, + .buildMode = buildMode, + .defaultPathsInChroot = std::move(defaultPathsInChroot), + .finalEnv = std::move(finalEnv), + .extraFiles = std::move(extraFiles), }); } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 144ca27b1..48ad06e17 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -93,32 +93,6 @@ struct DerivationBuilderParams * `EnvEntry::nameOfPassAsFile` above. */ StringMap extraFiles; - - DerivationBuilderParams( - const StorePath & drvPath, - const BuildMode & buildMode, - BuildResult & buildResult, - const Derivation & drv, - const DerivationOptions & drvOptions, - const StorePathSet & inputPaths, - std::map & initialOutputs, - PathsInChroot defaultPathsInChroot, - std::map> finalEnv, - StringMap extraFiles) - : drvPath{drvPath} - , buildResult{buildResult} - , drv{drv} - , drvOptions{drvOptions} - , inputPaths{inputPaths} - , initialOutputs{initialOutputs} - , buildMode{buildMode} - , defaultPathsInChroot{std::move(defaultPathsInChroot)} - , finalEnv{std::move(finalEnv)} - , extraFiles{std::move(extraFiles)} - { - } - - DerivationBuilderParams(DerivationBuilderParams &&) = default; }; /** From f4a0161cb131840c21fa54721b1b243b7319e71a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 21 Aug 2025 13:15:19 -0400 Subject: [PATCH 265/382] Create `StringSet DerivationBuilderParams::systemFeatures` Do this to avoid checking "system features" from the store config directly, because we rather not have `DerivationBuilder` depend on `Store`. --- src/libstore/build/derivation-building-goal.cc | 1 + .../include/nix/store/build/derivation-builder.hh | 8 ++++++++ src/libstore/unix/build/linux-derivation-builder.cc | 2 +- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 24a53c27b..50c1d5055 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -766,6 +766,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() .initialOutputs = initialOutputs, .buildMode = buildMode, .defaultPathsInChroot = std::move(defaultPathsInChroot), + .systemFeatures = worker.store.config.systemFeatures.get(), .finalEnv = std::move(finalEnv), .extraFiles = std::move(extraFiles), }); diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 48ad06e17..f00d4db25 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -65,6 +65,14 @@ struct DerivationBuilderParams */ PathsInChroot defaultPathsInChroot; + /** + * May be used to control various platform-specific functionality. + * + * For example, on Linux, the `kvm` system feature controls whether + * `/dev/kvm` should be exposed to the builder within the sandbox. + */ + StringSet systemFeatures; + struct EnvEntry { /** diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index 0d9dc4a85..b92d05607 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -492,7 +492,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/pts"); ss.push_back("/dev/full"); - if (store.Store::config.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) + if (systemFeatures.count("kvm") && pathExists("/dev/kvm")) ss.push_back("/dev/kvm"); ss.push_back("/dev/null"); ss.push_back("/dev/random"); From d1bdaef04e7ca46949b36f0eb0aa76c89014a3fa Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 27 Aug 2025 15:31:46 -0400 Subject: [PATCH 266/382] Factor out `checkOutputs` We currently just use this during the build of a derivation, but there is no reason we wouldn't want to use it elsewhere, e.g. to check the outputs of someone else's build after the fact. Moreover, I like pulling things out of `DerivationBuilder` that are simple and don't need access to all that state. While `DerivationBuilder` is unix-only, this refactor also make the code more portable "for free". The header is private, at Eelco's request. --- src/libstore/build/derivation-check.cc | 156 ++++++++++++++++++ src/libstore/build/derivation-check.hh | 23 +++ src/libstore/meson.build | 1 + src/libstore/unix/build/derivation-builder.cc | 153 +---------------- 4 files changed, 182 insertions(+), 151 deletions(-) create mode 100644 src/libstore/build/derivation-check.cc create mode 100644 src/libstore/build/derivation-check.hh diff --git a/src/libstore/build/derivation-check.cc b/src/libstore/build/derivation-check.cc new file mode 100644 index 000000000..7473380fa --- /dev/null +++ b/src/libstore/build/derivation-check.cc @@ -0,0 +1,156 @@ +#include + +#include "nix/store/store-api.hh" + +#include "derivation-check.hh" + +namespace nix { + +void checkOutputs( + Store & store, + const StorePath & drvPath, + const decltype(DerivationOptions::outputChecks) & outputChecks, + const std::map & outputs) +{ + std::map outputsByPath; + for (auto & output : outputs) + outputsByPath.emplace(store.printStorePath(output.second.path), output.second); + + for (auto & output : outputs) { + auto & outputName = output.first; + auto & info = output.second; + + /* Compute the closure and closure size of some output. This + is slightly tricky because some of its references (namely + other outputs) may not be valid yet. */ + auto getClosure = [&](const StorePath & path) { + uint64_t closureSize = 0; + StorePathSet pathsDone; + std::queue pathsLeft; + pathsLeft.push(path); + + while (!pathsLeft.empty()) { + auto path = pathsLeft.front(); + pathsLeft.pop(); + if (!pathsDone.insert(path).second) + continue; + + auto i = outputsByPath.find(store.printStorePath(path)); + if (i != outputsByPath.end()) { + closureSize += i->second.narSize; + for (auto & ref : i->second.references) + pathsLeft.push(ref); + } else { + auto info = store.queryPathInfo(path); + closureSize += info->narSize; + for (auto & ref : info->references) + pathsLeft.push(ref); + } + } + + return std::make_pair(std::move(pathsDone), closureSize); + }; + + auto applyChecks = [&](const DerivationOptions::OutputChecks & checks) { + if (checks.maxSize && info.narSize > *checks.maxSize) + throw BuildError( + "path '%s' is too large at %d bytes; limit is %d bytes", + store.printStorePath(info.path), + info.narSize, + *checks.maxSize); + + if (checks.maxClosureSize) { + uint64_t closureSize = getClosure(info.path).second; + if (closureSize > *checks.maxClosureSize) + throw BuildError( + "closure of path '%s' is too large at %d bytes; limit is %d bytes", + store.printStorePath(info.path), + closureSize, + *checks.maxClosureSize); + } + + auto checkRefs = [&](const StringSet & value, bool allowed, bool recursive) { + /* Parse a list of reference specifiers. Each element must + either be a store path, or the symbolic name of the output + of the derivation (such as `out'). */ + StorePathSet spec; + for (auto & i : value) { + if (store.isStorePath(i)) + spec.insert(store.parseStorePath(i)); + else if (auto output = get(outputs, i)) + spec.insert(output->path); + else { + std::string outputsListing = + concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); + throw BuildError( + "derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," + " expected store path or output name (one of [%s])", + store.printStorePath(drvPath), + outputName, + i, + outputsListing); + } + } + + auto used = recursive ? getClosure(info.path).first : info.references; + + if (recursive && checks.ignoreSelfRefs) + used.erase(info.path); + + StorePathSet badPaths; + + for (auto & i : used) + if (allowed) { + if (!spec.count(i)) + badPaths.insert(i); + } else { + if (spec.count(i)) + badPaths.insert(i); + } + + if (!badPaths.empty()) { + std::string badPathsStr; + for (auto & i : badPaths) { + badPathsStr += "\n "; + badPathsStr += store.printStorePath(i); + } + throw BuildError( + "output '%s' is not allowed to refer to the following paths:%s", + store.printStorePath(info.path), + badPathsStr); + } + }; + + /* Mandatory check: absent whitelist, and present but empty + whitelist mean very different things. */ + if (auto & refs = checks.allowedReferences) { + checkRefs(*refs, true, false); + } + if (auto & refs = checks.allowedRequisites) { + checkRefs(*refs, true, true); + } + + /* Optimization: don't need to do anything when + disallowed and empty set. */ + if (!checks.disallowedReferences.empty()) { + checkRefs(checks.disallowedReferences, false, false); + } + if (!checks.disallowedRequisites.empty()) { + checkRefs(checks.disallowedRequisites, false, true); + } + }; + + std::visit( + overloaded{ + [&](const DerivationOptions::OutputChecks & checks) { applyChecks(checks); }, + [&](const std::map & checksPerOutput) { + if (auto outputChecks = get(checksPerOutput, outputName)) + + applyChecks(*outputChecks); + }, + }, + outputChecks); + } +} + +} // namespace nix diff --git a/src/libstore/build/derivation-check.hh b/src/libstore/build/derivation-check.hh new file mode 100644 index 000000000..249e176c5 --- /dev/null +++ b/src/libstore/build/derivation-check.hh @@ -0,0 +1,23 @@ +#include "nix/store/derivations.hh" +#include "nix/store/derivation-options.hh" +#include "nix/store/path-info.hh" + +namespace nix { + +/** + * Check that outputs meets the requirements specified by the + * 'outputChecks' attribute (or the legacy + * '{allowed,disallowed}{References,Requisites}' attributes). + * + * The outputs may not be valid yet, hence outputs needs to contain all + * needed info like the NAR size. However, the external (not other + * output) references of the output must be valid, so we can compute the + * closure size. + */ +void checkOutputs( + Store & store, + const StorePath & drvPath, + const decltype(DerivationOptions::outputChecks) & drvOptions, + const std::map & outputs); + +} // namespace nix diff --git a/src/libstore/meson.build b/src/libstore/meson.build index ad130945e..ca8eac12b 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -265,6 +265,7 @@ sources = files( 'binary-cache-store.cc', 'build-result.cc', 'build/derivation-building-goal.cc', + 'build/derivation-check.cc', 'build/derivation-goal.cc', 'build/derivation-trampoline-goal.cc', 'build/drv-output-substitution-goal.cc', diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 51b44719d..bd5f975fb 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -42,6 +42,7 @@ #include "nix/util/signals.hh" #include "store-config-private.hh" +#include "build/derivation-check.hh" namespace nix { @@ -335,13 +336,6 @@ private: */ SingleDrvOutputs registerOutputs(); - /** - * Check that an output meets the requirements specified by the - * 'outputChecks' attribute (or the legacy - * '{allowed,disallowed}{References,Requisites}' attributes). - */ - void checkOutputs(const std::map & outputs); - public: void deleteTmpDir(bool force) override; @@ -1810,7 +1804,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() } /* Apply output checks. */ - checkOutputs(infos); + checkOutputs(store, drvPath, drvOptions.outputChecks, infos); /* Register each output path as valid, and register the sets of paths referenced by each of them. If there are cycles in the @@ -1849,149 +1843,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() return builtOutputs; } -void DerivationBuilderImpl::checkOutputs(const std::map & outputs) -{ - std::map outputsByPath; - for (auto & output : outputs) - outputsByPath.emplace(store.printStorePath(output.second.path), output.second); - - for (auto & output : outputs) { - auto & outputName = output.first; - auto & info = output.second; - - /* Compute the closure and closure size of some output. This - is slightly tricky because some of its references (namely - other outputs) may not be valid yet. */ - auto getClosure = [&](const StorePath & path) { - uint64_t closureSize = 0; - StorePathSet pathsDone; - std::queue pathsLeft; - pathsLeft.push(path); - - while (!pathsLeft.empty()) { - auto path = pathsLeft.front(); - pathsLeft.pop(); - if (!pathsDone.insert(path).second) - continue; - - auto i = outputsByPath.find(store.printStorePath(path)); - if (i != outputsByPath.end()) { - closureSize += i->second.narSize; - for (auto & ref : i->second.references) - pathsLeft.push(ref); - } else { - auto info = store.queryPathInfo(path); - closureSize += info->narSize; - for (auto & ref : info->references) - pathsLeft.push(ref); - } - } - - return std::make_pair(std::move(pathsDone), closureSize); - }; - - auto applyChecks = [&](const DerivationOptions::OutputChecks & checks) { - if (checks.maxSize && info.narSize > *checks.maxSize) - throw BuildError( - "path '%s' is too large at %d bytes; limit is %d bytes", - store.printStorePath(info.path), - info.narSize, - *checks.maxSize); - - if (checks.maxClosureSize) { - uint64_t closureSize = getClosure(info.path).second; - if (closureSize > *checks.maxClosureSize) - throw BuildError( - "closure of path '%s' is too large at %d bytes; limit is %d bytes", - store.printStorePath(info.path), - closureSize, - *checks.maxClosureSize); - } - - auto checkRefs = [&](const StringSet & value, bool allowed, bool recursive) { - /* Parse a list of reference specifiers. Each element must - either be a store path, or the symbolic name of the output - of the derivation (such as `out'). */ - StorePathSet spec; - for (auto & i : value) { - if (store.isStorePath(i)) - spec.insert(store.parseStorePath(i)); - else if (auto output = get(outputs, i)) - spec.insert(output->path); - else { - std::string outputsListing = - concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); - throw BuildError( - "derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," - " expected store path or output name (one of [%s])", - store.printStorePath(drvPath), - outputName, - i, - outputsListing); - } - } - - auto used = recursive ? getClosure(info.path).first : info.references; - - if (recursive && checks.ignoreSelfRefs) - used.erase(info.path); - - StorePathSet badPaths; - - for (auto & i : used) - if (allowed) { - if (!spec.count(i)) - badPaths.insert(i); - } else { - if (spec.count(i)) - badPaths.insert(i); - } - - if (!badPaths.empty()) { - std::string badPathsStr; - for (auto & i : badPaths) { - badPathsStr += "\n "; - badPathsStr += store.printStorePath(i); - } - throw BuildError( - "output '%s' is not allowed to refer to the following paths:%s", - store.printStorePath(info.path), - badPathsStr); - } - }; - - /* Mandatory check: absent whitelist, and present but empty - whitelist mean very different things. */ - if (auto & refs = checks.allowedReferences) { - checkRefs(*refs, true, false); - } - if (auto & refs = checks.allowedRequisites) { - checkRefs(*refs, true, true); - } - - /* Optimization: don't need to do anything when - disallowed and empty set. */ - if (!checks.disallowedReferences.empty()) { - checkRefs(checks.disallowedReferences, false, false); - } - if (!checks.disallowedRequisites.empty()) { - checkRefs(checks.disallowedRequisites, false, true); - } - }; - - std::visit( - overloaded{ - [&](const DerivationOptions::OutputChecks & checks) { applyChecks(checks); }, - [&](const std::map & checksPerOutput) { - if (auto outputChecks = get(checksPerOutput, outputName)) - - applyChecks(*outputChecks); - }, - }, - drvOptions.outputChecks); - } -} - void DerivationBuilderImpl::deleteTmpDir(bool force) { if (topTmpDir != "") { From 241abcca8640dc81e057f7398e9d860083db2d3f Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 27 Aug 2025 21:13:59 +0000 Subject: [PATCH 267/382] refactor(libstore/http-binary-cache-store): pragma once --- src/libstore/include/nix/store/http-binary-cache-store.hh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index e0f6ce42f..4102c858f 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -1,3 +1,6 @@ +#pragma once +///@file + #include "nix/util/url.hh" #include "nix/store/binary-cache-store.hh" From 0590b131565311d6f48ea31412d11a61e0a85303 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 27 Aug 2025 19:03:49 -0400 Subject: [PATCH 268/382] Revert "Add a crude tracing mechansim for the build results" The commit says it was added for CA testing --- manual I assume, since there is no use of this in the test suite. I don't think we need it any more, and I am not sure whether it was ever supposed to have made it to `master` either. This reverts commit 2eec2f765a86b8954f3a74ff148bc70a2d32be27. --- .../build/derivation-building-goal.cc | 7 ---- src/libstore/build/derivation-goal.cc | 7 ---- .../include/nix/store/build-result.hh | 41 ------------------- 3 files changed, 55 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 997cd6140..c290852fc 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -1368,13 +1368,6 @@ DerivationBuildingGoal::done(BuildResult::Status status, SingleDrvOutputs builtO worker.updateProgress(); - auto traceBuiltOutputsFile = getEnv("_NIX_TRACE_BUILT_OUTPUTS").value_or(""); - if (traceBuiltOutputsFile != "") { - std::fstream fs; - fs.open(traceBuiltOutputsFile, std::fstream::out); - fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl; - } - return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 883121d94..dc28225b5 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -364,13 +364,6 @@ DerivationGoal::done(BuildResult::Status status, std::optional buil worker.updateProgress(); - auto traceBuiltOutputsFile = getEnv("_NIX_TRACE_BUILT_OUTPUTS").value_or(""); - if (traceBuiltOutputsFile != "") { - std::fstream fs; - fs.open(traceBuiltOutputsFile, std::fstream::out); - fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl; - } - return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); } diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 3b70b781f..58138ed45 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -46,47 +46,6 @@ struct BuildResult */ std::string errorMsg; - std::string toString() const - { - auto strStatus = [&]() { - switch (status) { - case Built: - return "Built"; - case Substituted: - return "Substituted"; - case AlreadyValid: - return "AlreadyValid"; - case PermanentFailure: - return "PermanentFailure"; - case InputRejected: - return "InputRejected"; - case OutputRejected: - return "OutputRejected"; - case TransientFailure: - return "TransientFailure"; - case CachedFailure: - return "CachedFailure"; - case TimedOut: - return "TimedOut"; - case MiscFailure: - return "MiscFailure"; - case DependencyFailed: - return "DependencyFailed"; - case LogLimitExceeded: - return "LogLimitExceeded"; - case NotDeterministic: - return "NotDeterministic"; - case ResolvesToAlreadyValid: - return "ResolvesToAlreadyValid"; - case NoSubstituters: - return "NoSubstituters"; - default: - return "Unknown"; - }; - }(); - return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg); - } - /** * How many times this build was performed. */ From 169033001d8f9ca44d7324446cfc93932c380295 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 27 Aug 2025 18:58:58 -0400 Subject: [PATCH 269/382] Simplify handling of statuses for build errors Instead of passing them around separately, or doing finicky logic in a try-catch block to recover them, just make `BuildError` always contain a status, and make it the thrower's responsibility to set it. This is much more simple and explicit. Once that change is done, split the `done` functions of `DerivationGoal` and `DerivationBuildingGoal` into separate success and failure functions, which ends up being easier to understand and hardly any duplication. Also, change the handling of failures in resolved cases to use `BuildResult::DependencyFailed` and a new message. This is because the underlying derivation will also get its message printed --- which is good, because in general the resolved derivation is not unique. One dyn drv test had to be updated, but CA (and dyn drv) is experimental, so I do not mind. Finally, delete `SubstError` because it is unused. --- .../build/derivation-building-goal.cc | 83 +++++++++++-------- src/libstore/build/derivation-check.cc | 5 ++ src/libstore/build/derivation-goal.cc | 50 ++++++----- src/libstore/derivation-options.cc | 3 +- .../include/nix/store/build-result.hh | 26 +++++- .../nix/store/build/derivation-builder.hh | 2 +- .../store/build/derivation-building-goal.hh | 4 +- .../nix/store/build/derivation-goal.hh | 10 +-- src/libstore/include/nix/store/store-api.hh | 5 -- src/libstore/local-store.cc | 5 +- src/libstore/misc.cc | 5 +- src/libstore/posix-fs-canonicalise.cc | 2 +- src/libstore/store-api.cc | 1 + src/libstore/unix/build/derivation-builder.cc | 40 ++++++--- .../unix/build/linux-derivation-builder.cc | 2 +- tests/functional/dyn-drv/failing-outer.sh | 2 +- 16 files changed, 153 insertions(+), 92 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index c290852fc..b732c6094 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -118,7 +118,7 @@ void DerivationBuildingGoal::timedOut(Error && ex) killChild(); // We're not inside a coroutine, hence we can't use co_return here. // Thus we ignore the return value. - [[maybe_unused]] Done _ = done(BuildResult::TimedOut, {}, std::move(ex)); + [[maybe_unused]] Done _ = doneFailure({BuildResult::TimedOut, std::move(ex)}); } /** @@ -258,7 +258,7 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() nrFailed, nrFailed == 1 ? "dependency" : "dependencies"); msg += showKnownOutputs(worker.store, *drv); - co_return done(BuildResult::DependencyFailed, {}, Error(msg)); + co_return doneFailure(BuildError(BuildResult::DependencyFailed, msg)); } /* Gather information necessary for computing the closure and/or @@ -359,9 +359,9 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() auto resolvedResult = resolvedDrvGoal->buildResult; - SingleDrvOutputs builtOutputs; - if (resolvedResult.success()) { + SingleDrvOutputs builtOutputs; + auto resolvedHashes = staticOutputHashes(worker.store, drvResolved); StorePathSet outputPaths; @@ -411,13 +411,19 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() } runPostBuildHook(worker.store, *logger, drvPath, outputPaths); + + auto status = resolvedResult.status; + if (status == BuildResult::AlreadyValid) + status = BuildResult::ResolvesToAlreadyValid; + + co_return doneSuccess(status, std::move(builtOutputs)); + } else { + co_return doneFailure({ + BuildResult::DependencyFailed, + "build of resolved derivation '%s' failed", + worker.store.printStorePath(pathResolved), + }); } - - auto status = resolvedResult.status; - if (status == BuildResult::AlreadyValid) - status = BuildResult::ResolvesToAlreadyValid; - - co_return done(status, std::move(builtOutputs)); } /* If we get this far, we know no dynamic drvs inputs */ @@ -542,7 +548,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); outputLocks.setDeletion(true); outputLocks.unlock(); - co_return done(BuildResult::AlreadyValid, std::move(validOutputs)); + co_return doneSuccess(BuildResult::AlreadyValid, std::move(validOutputs)); } /* If any of the outputs already exist but are not valid, delete @@ -752,7 +758,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() } catch (BuildError & e) { outputLocks.unlock(); worker.permanentFailure = true; - co_return done(BuildResult::InputRejected, {}, std::move(e)); + co_return doneFailure(std::move(e)); } /* If we have to wait and retry (see below), then `builder` will @@ -800,7 +806,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() builder.reset(); outputLocks.unlock(); worker.permanentFailure = true; - co_return done(BuildResult::InputRejected, {}, std::move(e)); + co_return doneFailure(std::move(e)); // InputRejected } started(); @@ -812,7 +818,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() // N.B. cannot use `std::visit` with co-routine return if (auto * ste = std::get_if<0>(&res)) { outputLocks.unlock(); - co_return done(std::move(ste->first), {}, std::move(ste->second)); + co_return doneFailure(std::move(*ste)); } else if (auto * builtOutputs = std::get_if<1>(&res)) { StorePathSet outputPaths; for (auto & [_, output] : *builtOutputs) @@ -825,7 +831,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() (unlinked) lock files. */ outputLocks.setDeletion(true); outputLocks.unlock(); - co_return done(BuildResult::Built, std::move(*builtOutputs)); + co_return doneSuccess(BuildResult::Built, std::move(*builtOutputs)); } else { unreachable(); } @@ -970,7 +976,7 @@ Goal::Co DerivationBuildingGoal::hookDone() /* TODO (once again) support fine-grained error codes, see issue #12641. */ - co_return done(BuildResult::MiscFailure, {}, BuildError(msg)); + co_return doneFailure(BuildError{BuildResult::MiscFailure, msg}); } /* Compute the FS closure of the outputs and register them as @@ -997,7 +1003,7 @@ Goal::Co DerivationBuildingGoal::hookDone() outputLocks.setDeletion(true); outputLocks.unlock(); - co_return done(BuildResult::Built, std::move(builtOutputs)); + co_return doneSuccess(BuildResult::Built, std::move(builtOutputs)); } HookReply DerivationBuildingGoal::tryBuildHook() @@ -1179,10 +1185,11 @@ void DerivationBuildingGoal::handleChildOutput(Descriptor fd, std::string_view d killChild(); // We're not inside a coroutine, hence we can't use co_return here. // Thus we ignore the return value. - [[maybe_unused]] Done _ = done( + [[maybe_unused]] Done _ = doneFailure(BuildError( BuildResult::LogLimitExceeded, - {}, - Error("%s killed after writing more than %d bytes of log output", getName(), settings.maxLogSize)); + "%s killed after writing more than %d bytes of log output", + getName(), + settings.maxLogSize)); return; } @@ -1343,13 +1350,27 @@ SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() return validOutputs; } -Goal::Done -DerivationBuildingGoal::done(BuildResult::Status status, SingleDrvOutputs builtOutputs, std::optional ex) +Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs) { - outputLocks.unlock(); buildResult.status = status; - if (ex) - buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg)); + + assert(buildResult.success()); + + mcRunningBuilds.reset(); + + buildResult.builtOutputs = std::move(builtOutputs); + if (status == BuildResult::Built) + worker.doneBuilds++; + + worker.updateProgress(); + + return amDone(ecSuccess, std::nullopt); +} + +Goal::Done DerivationBuildingGoal::doneFailure(BuildError ex) +{ + buildResult.status = ex.status; + buildResult.errorMsg = fmt("%s", Uncolored(ex.info().msg)); if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; if (buildResult.status == BuildResult::PermanentFailure) @@ -1357,18 +1378,12 @@ DerivationBuildingGoal::done(BuildResult::Status status, SingleDrvOutputs builtO mcRunningBuilds.reset(); - if (buildResult.success()) { - buildResult.builtOutputs = std::move(builtOutputs); - if (status == BuildResult::Built) - worker.doneBuilds++; - } else { - if (status != BuildResult::DependencyFailed) - worker.failedBuilds++; - } + if (ex.status != BuildResult::DependencyFailed) + worker.failedBuilds++; worker.updateProgress(); - return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); + return amDone(ecFailed, {std::move(ex)}); } } // namespace nix diff --git a/src/libstore/build/derivation-check.cc b/src/libstore/build/derivation-check.cc index 7473380fa..c5b489b23 100644 --- a/src/libstore/build/derivation-check.cc +++ b/src/libstore/build/derivation-check.cc @@ -1,6 +1,7 @@ #include #include "nix/store/store-api.hh" +#include "nix/store/build-result.hh" #include "derivation-check.hh" @@ -54,6 +55,7 @@ void checkOutputs( auto applyChecks = [&](const DerivationOptions::OutputChecks & checks) { if (checks.maxSize && info.narSize > *checks.maxSize) throw BuildError( + BuildResult::OutputRejected, "path '%s' is too large at %d bytes; limit is %d bytes", store.printStorePath(info.path), info.narSize, @@ -63,6 +65,7 @@ void checkOutputs( uint64_t closureSize = getClosure(info.path).second; if (closureSize > *checks.maxClosureSize) throw BuildError( + BuildResult::OutputRejected, "closure of path '%s' is too large at %d bytes; limit is %d bytes", store.printStorePath(info.path), closureSize, @@ -83,6 +86,7 @@ void checkOutputs( std::string outputsListing = concatMapStringsSep(", ", outputs, [](auto & o) { return o.first; }); throw BuildError( + BuildResult::OutputRejected, "derivation '%s' output check for '%s' contains an illegal reference specifier '%s'," " expected store path or output name (one of [%s])", store.printStorePath(drvPath), @@ -115,6 +119,7 @@ void checkOutputs( badPathsStr += store.printStorePath(i); } throw BuildError( + BuildResult::OutputRejected, "output '%s' is not allowed to refer to the following paths:%s", store.printStorePath(info.path), badPathsStr); diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index dc28225b5..b9046744a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -94,7 +94,7 @@ Goal::Co DerivationGoal::haveDerivation() /* If they are all valid, then we're done. */ if (checkResult && checkResult->second == PathStatus::Valid && buildMode == bmNormal) { - co_return done(BuildResult::AlreadyValid, checkResult->first); + co_return doneSuccess(BuildResult::AlreadyValid, checkResult->first); } Goals waitees; @@ -122,12 +122,10 @@ Goal::Co DerivationGoal::haveDerivation() assert(!drv->type().isImpure()); if (nrFailed > 0 && nrFailed > nrNoSubstituters && !settings.tryFallback) { - co_return done( + co_return doneFailure(BuildError( BuildResult::TransientFailure, - {}, - Error( - "some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", - worker.store.printStorePath(drvPath))); + "some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", + worker.store.printStorePath(drvPath))); } nrFailed = nrNoSubstituters = 0; @@ -137,7 +135,7 @@ Goal::Co DerivationGoal::haveDerivation() bool allValid = checkResult && checkResult->second == PathStatus::Valid; if (buildMode == bmNormal && allValid) { - co_return done(BuildResult::Substituted, checkResult->first); + co_return doneSuccess(BuildResult::Substituted, checkResult->first); } if (buildMode == bmRepair && allValid) { co_return repairClosure(); @@ -281,7 +279,7 @@ Goal::Co DerivationGoal::repairClosure() "some paths in the output closure of derivation '%s' could not be repaired", worker.store.printStorePath(drvPath)); } - co_return done(BuildResult::AlreadyValid, assertPathValidity()); + co_return doneSuccess(BuildResult::AlreadyValid, assertPathValidity()); } std::optional> DerivationGoal::checkPathValidity() @@ -339,12 +337,27 @@ Realisation DerivationGoal::assertPathValidity() return checkResult->first; } -Goal::Done -DerivationGoal::done(BuildResult::Status status, std::optional builtOutput, std::optional ex) +Goal::Done DerivationGoal::doneSuccess(BuildResult::Status status, Realisation builtOutput) { buildResult.status = status; - if (ex) - buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg)); + + assert(buildResult.success()); + + mcExpectedBuilds.reset(); + + buildResult.builtOutputs = {{wantedOutput, std::move(builtOutput)}}; + if (status == BuildResult::Built) + worker.doneBuilds++; + + worker.updateProgress(); + + return amDone(ecSuccess, std::nullopt); +} + +Goal::Done DerivationGoal::doneFailure(BuildError ex) +{ + buildResult.status = ex.status; + buildResult.errorMsg = fmt("%s", Uncolored(ex.info().msg)); if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; if (buildResult.status == BuildResult::PermanentFailure) @@ -352,19 +365,12 @@ DerivationGoal::done(BuildResult::Status status, std::optional buil mcExpectedBuilds.reset(); - if (buildResult.success()) { - assert(builtOutput); - buildResult.builtOutputs = {{wantedOutput, std::move(*builtOutput)}}; - if (status == BuildResult::Built) - worker.doneBuilds++; - } else { - if (status != BuildResult::DependencyFailed) - worker.failedBuilds++; - } + if (ex.status != BuildResult::DependencyFailed) + worker.failedBuilds++; worker.updateProgress(); - return amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex)); + return amDone(ecFailed, {std::move(ex)}); } } // namespace nix diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 1acb9dc03..630159629 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -265,7 +265,8 @@ DerivationOptions::getParsedExportReferencesGraph(const StoreDirConfig & store) StorePathSet storePaths; for (auto & storePathS : ss) { if (!store.isInStore(storePathS)) - throw BuildError("'exportReferencesGraph' contains a non-store path '%1%'", storePathS); + throw BuildError( + BuildResult::InputRejected, "'exportReferencesGraph' contains a non-store path '%1%'", storePathS); storePaths.insert(store.toStorePath(storePathS).first); } res.insert_or_assign(fileName, storePaths); diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 58138ed45..a743aa387 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -1,13 +1,13 @@ #pragma once ///@file -#include "nix/store/realisation.hh" -#include "nix/store/derived-path.hh" - #include #include #include +#include "nix/store/derived-path.hh" +#include "nix/store/realisation.hh" + namespace nix { struct BuildResult @@ -90,6 +90,26 @@ struct BuildResult } }; +/** + * denotes a permanent build failure + */ +struct BuildError : public Error +{ + BuildResult::Status status; + + BuildError(BuildResult::Status status, BuildError && error) + : Error{std::move(error)} + , status{status} + { + } + + BuildError(BuildResult::Status status, auto &&... args) + : Error{args...} + , status{status} + { + } +}; + /** * A `BuildResult` together with its "primary key". */ diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index f00d4db25..a82fc98ea 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -191,7 +191,7 @@ struct DerivationBuilder : RestrictionContext * more information. The second case indicates success, and * realisations for each output of the derivation are returned. */ - virtual std::variant, SingleDrvOutputs> unprepareBuild() = 0; + virtual std::variant unprepareBuild() = 0; /** * Stop the in-process nix daemon thread. diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 95949649c..38f0fc7bf 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -170,7 +170,9 @@ struct DerivationBuildingGoal : public Goal void started(); - Done done(BuildResult::Status status, SingleDrvOutputs builtOutputs = {}, std::optional ex = {}); + Done doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs); + + Done doneFailure(BuildError ex); void appendLogTailErrorMsg(std::string & msg); diff --git a/src/libstore/include/nix/store/build/derivation-goal.hh b/src/libstore/include/nix/store/build/derivation-goal.hh index d9042d136..85b471e28 100644 --- a/src/libstore/include/nix/store/build/derivation-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-goal.hh @@ -99,13 +99,9 @@ private: Co repairClosure(); - /** - * @param builtOutput Must be set if `status` is successful. - */ - Done done( - BuildResult::Status status, - std::optional builtOutput = std::nullopt, - std::optional ex = {}); + Done doneSuccess(BuildResult::Status status, Realisation builtOutput); + + Done doneFailure(BuildError ex); }; } // namespace nix diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 987ed4d48..7d019ea21 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -24,11 +24,6 @@ namespace nix { -MakeError(SubstError, Error); -/** - * denotes a permanent build failure - */ -MakeError(BuildError, Error); MakeError(InvalidPath, Error); MakeError(Unsupported, Error); MakeError(SubstituteGone, Error); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index a66a97866..7872d4f93 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1002,7 +1002,10 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) }}, {[&](const StorePath & path, const StorePath & parent) { return BuildError( - "cycle detected in the references of '%s' from '%s'", printStorePath(path), printStorePath(parent)); + BuildResult::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(path), + printStorePath(parent)); }}); txn.commit(); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index c794f8d06..8de41fe19 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -322,7 +322,10 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths) }}, {[&](const StorePath & path, const StorePath & parent) { return BuildError( - "cycle detected in the references of '%s' from '%s'", printStorePath(path), printStorePath(parent)); + BuildResult::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(path), + printStorePath(parent)); }}); } diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index a889938c9..b6a64e65b 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -98,7 +98,7 @@ static void canonicalisePathMetaData_( (i.e. "touch $out/foo; ln $out/foo $out/bar"). */ if (uidRange && (st.st_uid < uidRange->first || st.st_uid > uidRange->second)) { if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino))) - throw BuildError("invalid ownership on file '%1%'", path); + throw BuildError(BuildResult::OutputRejected, "invalid ownership on file '%1%'", path); mode_t mode = st.st_mode & ~S_IFMT; assert( S_ISLNK(st.st_mode) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index fad79a83e..d96be5965 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -770,6 +770,7 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor for (auto & storePath : storePaths) { if (!inputPaths.count(storePath)) throw BuildError( + BuildResult::InputRejected, "cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath)); diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index bd5f975fb..3d55dcecd 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -46,7 +46,13 @@ namespace nix { -MakeError(NotDeterministic, BuildError); +struct NotDeterministic : BuildError +{ + NotDeterministic(auto &&... args) + : BuildError(BuildResult::NotDeterministic, args...) + { + } +}; /** * This class represents the state for building locally. @@ -185,7 +191,7 @@ public: void startBuilder() override; - std::variant, SingleDrvOutputs> unprepareBuild() override; + std::variant unprepareBuild() override; protected: @@ -420,7 +426,7 @@ bool DerivationBuilderImpl::prepareBuild() return true; } -std::variant, SingleDrvOutputs> DerivationBuilderImpl::unprepareBuild() +std::variant DerivationBuilderImpl::unprepareBuild() { // FIXME: get rid of this, rely on RAII. Finally releaseBuildUser([&]() { @@ -493,7 +499,10 @@ std::variant, SingleDrvOutputs> Derivation if (diskFull) msg += "\nnote: build failure may have been caused by lack of free disk space"; - throw BuildError(msg); + throw BuildError( + !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure + : BuildResult::PermanentFailure, + msg); } /* Compute the FS closure of the outputs and register them as @@ -509,12 +518,7 @@ std::variant, SingleDrvOutputs> Derivation return std::move(builtOutputs); } catch (BuildError & e) { - BuildResult::Status st = dynamic_cast(&e) ? BuildResult::NotDeterministic - : statusOk(status) ? BuildResult::OutputRejected - : !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure - : BuildResult::PermanentFailure; - - return std::pair{std::move(st), std::move(e)}; + return std::move(e); } } @@ -682,7 +686,7 @@ void DerivationBuilderImpl::startBuilder() fmt("\nNote: run `%s` to run programs for x86_64-darwin", Magenta("/usr/sbin/softwareupdate --install-rosetta && launchctl stop org.nixos.nix-daemon")); - throw BuildError(msg); + throw BuildError(BuildResult::InputRejected, msg); } auto buildDir = store.config->getBuildDir(); @@ -1378,6 +1382,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() auto optSt = maybeLstat(actualPath.c_str()); if (!optSt) throw BuildError( + BuildResult::OutputRejected, "builder for '%s' failed to produce output path for output '%s' at '%s'", store.printStorePath(drvPath), outputName, @@ -1392,6 +1397,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if ((!S_ISLNK(st.st_mode) && (st.st_mode & (S_IWGRP | S_IWOTH))) || (buildUser && st.st_uid != buildUser->getUID())) throw BuildError( + BuildResult::OutputRejected, "suspicious ownership or permission on '%s' for output '%s'; rejecting this build output", actualPath, outputName); @@ -1428,7 +1434,11 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() {[&](const std::string & name) { auto orifu = get(outputReferencesIfUnregistered, name); if (!orifu) - throw BuildError("no output reference for '%s' in build of '%s'", name, store.printStorePath(drvPath)); + throw BuildError( + BuildResult::OutputRejected, + "no output reference for '%s' in build of '%s'", + name, + store.printStorePath(drvPath)); return std::visit( overloaded{ /* Since we'll use the already installed versions of these, we @@ -1450,6 +1460,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() {[&](const std::string & path, const std::string & parent) { // TODO with more -vvvv also show the temporary paths for manual inspection. return BuildError( + BuildResult::OutputRejected, "cycle detected in build of '%s' in the references of output '%s' from output '%s'", store.printStorePath(drvPath), path, @@ -1543,11 +1554,12 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo { auto st = get(outputStats, outputName); if (!st) - throw BuildError("output path %1% without valid stats info", actualPath); + throw BuildError(BuildResult::OutputRejected, "output path %1% without valid stats info", actualPath); if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat) { /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) throw BuildError( + BuildResult::OutputRejected, "output path '%1%' should be a non-executable regular file " "since recursive hashing is not enabled (one of outputHashMode={flat,text} is true)", actualPath); @@ -1649,6 +1661,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() valid. */ miscMethods->noteHashMismatch(); delayedException = std::make_exception_ptr(BuildError( + BuildResult::OutputRejected, "hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", store.printStorePath(drvPath), wanted.to_string(HashFormat::SRI, true), @@ -1657,6 +1670,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (!newInfo0.references.empty()) { auto numViolations = newInfo.references.size(); delayedException = std::make_exception_ptr(BuildError( + BuildResult::OutputRejected, "fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", store.printStorePath(drvPath), numViolations, diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index b92d05607..39b8f09ae 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -659,7 +659,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu throw SysError("setuid failed"); } - std::variant, SingleDrvOutputs> unprepareBuild() override + std::variant unprepareBuild() override { sandboxMountNamespace = -1; sandboxUserNamespace = -1; diff --git a/tests/functional/dyn-drv/failing-outer.sh b/tests/functional/dyn-drv/failing-outer.sh index 3feda74fb..596efe43d 100644 --- a/tests/functional/dyn-drv/failing-outer.sh +++ b/tests/functional/dyn-drv/failing-outer.sh @@ -9,4 +9,4 @@ expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly expectStderr "$expected" nix-build ./text-hashed-output.nix -A failingWrapper --no-out-link \ - | grepQuiet "build of '.*use-dynamic-drv-in-non-dynamic-drv-wrong.drv' failed" + | grepQuiet "build of resolved derivation '.*use-dynamic-drv-in-non-dynamic-drv-wrong.drv' failed" From ff961fd9e29e242f7c01e98edc5c55eecf97751f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 11:01:34 -0400 Subject: [PATCH 270/382] Get rid of `DerivationBuilder::note*Mismatch` It's fine to set these worker flags a little later in the control flow, since we'll be sure to reach those points in the error cases. And doing that is much nicer than having these tangled callbacks. I originally made the callbacks to meticulously recreate the exact behavior which I didn't quite understand. Now, thanks to cleaning up the error handling, I do understand what is going on, so I can be confident that this change is safe to make. --- .../build/derivation-building-goal.cc | 30 ++++++++++++------- .../include/nix/store/build-result.hh | 4 +++ .../nix/store/build/derivation-builder.hh | 3 -- src/libstore/unix/build/derivation-builder.cc | 6 ++-- 4 files changed, 26 insertions(+), 17 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index b732c6094..c9b562817 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -652,16 +652,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() goal.worker.childTerminated(&goal); } - void noteHashMismatch() override - { - goal.worker.hashMismatch = true; - } - - void noteCheckMismatch() override - { - goal.worker.checkMismatch = true; - } - void markContentsGood(const StorePath & path) override { goal.worker.markContentsGood(path); @@ -818,6 +808,26 @@ Goal::Co DerivationBuildingGoal::tryToBuild() // N.B. cannot use `std::visit` with co-routine return if (auto * ste = std::get_if<0>(&res)) { outputLocks.unlock(); +// Allow selecting a subset of enum values +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wswitch-enum" + switch (ste->status) { + case BuildResult::HashMismatch: + worker.hashMismatch = true; + /* See header, the protocols don't know about `HashMismatch` + yet, so change it to `OutputRejected`, which they expect + for this case (hash mismatch is a type of output + rejection). */ + ste->status = BuildResult::OutputRejected; + break; + case BuildResult::NotDeterministic: + worker.checkMismatch = true; + break; + default: + /* Other statuses need no adjusting */ + break; + } +# pragma GCC diagnostic pop co_return doneFailure(std::move(*ste)); } else if (auto * builtOutputs = std::get_if<1>(&res)) { StorePathSet outputPaths; diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index a743aa387..d7249d420 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -36,6 +36,10 @@ struct BuildResult NotDeterministic, ResolvesToAlreadyValid, NoSubstituters, + /// A certain type of `OutputRejected`. The protocols do not yet + /// know about this one, so change it back to `OutputRejected` + /// before serialization. + HashMismatch, } status = MiscFailure; /** diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index a82fc98ea..fd487c5fe 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -134,9 +134,6 @@ struct DerivationBuilderCallbacks */ virtual void childTerminated() = 0; - virtual void noteHashMismatch(void) = 0; - virtual void noteCheckMismatch(void) = 0; - virtual void markContentsGood(const StorePath & path) = 0; }; diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 3d55dcecd..710e5a2b2 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1659,9 +1659,8 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (wanted != got) { /* Throw an error after registering the path as valid. */ - miscMethods->noteHashMismatch(); delayedException = std::make_exception_ptr(BuildError( - BuildResult::OutputRejected, + BuildResult::HashMismatch, "hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", store.printStorePath(drvPath), wanted.to_string(HashFormat::SRI, true), @@ -1670,7 +1669,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() if (!newInfo0.references.empty()) { auto numViolations = newInfo.references.size(); delayedException = std::make_exception_ptr(BuildError( - BuildResult::OutputRejected, + BuildResult::HashMismatch, "fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", store.printStorePath(drvPath), numViolations, @@ -1746,7 +1745,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() continue; ValidPathInfo oldInfo(*store.queryPathInfo(newInfo.path)); if (newInfo.narHash != oldInfo.narHash) { - miscMethods->noteCheckMismatch(); if (settings.runDiffHook || settings.keepFailed) { auto dst = store.toRealPath(finalDestPath + ".check"); deletePath(dst); From 0b85b023d8ee3ab75e4c2511a0f391eb7361d569 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 10:51:05 -0400 Subject: [PATCH 271/382] Get rid of `delayedException` in `DerivationBuilder` Instead of that funny business, the fixed output checks are not put in `checkOutputs`, with the other (newer) output checks, where they also better belong. The control flow is reworked (with comments!) so that `checkOutputs` also runs in the `bmCheck` case. Not only does this preserve existing behavior of `bmCheck` double-checking fixed output hashes with less tricky code, it also makes `bmCheck` better by also double-checking the other output checks, rather than just assuming they pass if the derivation is deterministic. --- src/libstore/build/derivation-check.cc | 35 +++- src/libstore/build/derivation-check.hh | 1 + src/libstore/unix/build/derivation-builder.cc | 177 ++++++++---------- 3 files changed, 109 insertions(+), 104 deletions(-) diff --git a/src/libstore/build/derivation-check.cc b/src/libstore/build/derivation-check.cc index c5b489b23..82e92e1f3 100644 --- a/src/libstore/build/derivation-check.cc +++ b/src/libstore/build/derivation-check.cc @@ -10,6 +10,7 @@ namespace nix { void checkOutputs( Store & store, const StorePath & drvPath, + const decltype(Derivation::outputs) & drvOutputs, const decltype(DerivationOptions::outputChecks) & outputChecks, const std::map & outputs) { @@ -17,9 +18,37 @@ void checkOutputs( for (auto & output : outputs) outputsByPath.emplace(store.printStorePath(output.second.path), output.second); - for (auto & output : outputs) { - auto & outputName = output.first; - auto & info = output.second; + for (auto & [outputName, info] : outputs) { + + auto * outputSpec = get(drvOutputs, outputName); + assert(outputSpec); + + if (const auto * dof = std::get_if(&outputSpec->raw)) { + auto & wanted = dof->ca.hash; + + /* Check wanted hash */ + assert(info.ca); + auto & got = info.ca->hash; + if (wanted != got) { + /* Throw an error after registering the path as + valid. */ + throw BuildError( + BuildResult::HashMismatch, + "hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", + store.printStorePath(drvPath), + wanted.to_string(HashFormat::SRI, true), + got.to_string(HashFormat::SRI, true)); + } + if (!info.references.empty()) { + auto numViolations = info.references.size(); + throw BuildError( + BuildResult::HashMismatch, + "fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", + store.printStorePath(drvPath), + numViolations, + store.printStorePath(*info.references.begin())); + } + } /* Compute the closure and closure size of some output. This is slightly tricky because some of its references (namely diff --git a/src/libstore/build/derivation-check.hh b/src/libstore/build/derivation-check.hh index 249e176c5..8f6b2b6b5 100644 --- a/src/libstore/build/derivation-check.hh +++ b/src/libstore/build/derivation-check.hh @@ -17,6 +17,7 @@ namespace nix { void checkOutputs( Store & store, const StorePath & drvPath, + const decltype(Derivation::outputs) & drvOutputs, const decltype(DerivationOptions::outputChecks) & drvOptions, const std::map & outputs); diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 710e5a2b2..c9b603db9 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1327,8 +1327,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() outputs to allow hard links between outputs. */ InodesSeen inodesSeen; - std::exception_ptr delayedException; - /* The paths that can be referenced are the input closures, the output paths, and any paths that have been built via recursive Nix calls. */ @@ -1647,36 +1645,11 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() std::filesystem::rename(tmpOutput, actualPath); - auto newInfo0 = newInfoFromCA( + return newInfoFromCA( DerivationOutput::CAFloating{ .method = dof.ca.method, .hashAlgo = wanted.algo, }); - - /* Check wanted hash */ - assert(newInfo0.ca); - auto & got = newInfo0.ca->hash; - if (wanted != got) { - /* Throw an error after registering the path as - valid. */ - delayedException = std::make_exception_ptr(BuildError( - BuildResult::HashMismatch, - "hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", - store.printStorePath(drvPath), - wanted.to_string(HashFormat::SRI, true), - got.to_string(HashFormat::SRI, true))); - } - if (!newInfo0.references.empty()) { - auto numViolations = newInfo.references.size(); - delayedException = std::make_exception_ptr(BuildError( - BuildResult::HashMismatch, - "fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'", - store.printStorePath(drvPath), - numViolations, - store.printStorePath(*newInfo.references.begin()))); - } - - return newInfo0; }, [&](const DerivationOutput::CAFloating & dof) { return newInfoFromCA(dof); }, @@ -1740,84 +1713,91 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() } if (buildMode == bmCheck) { + /* Check against already registered outputs */ - if (!store.isValidPath(newInfo.path)) - continue; - ValidPathInfo oldInfo(*store.queryPathInfo(newInfo.path)); - if (newInfo.narHash != oldInfo.narHash) { - if (settings.runDiffHook || settings.keepFailed) { - auto dst = store.toRealPath(finalDestPath + ".check"); - deletePath(dst); - movePath(actualPath, dst); + if (store.isValidPath(newInfo.path)) { + ValidPathInfo oldInfo(*store.queryPathInfo(newInfo.path)); + if (newInfo.narHash != oldInfo.narHash) { + if (settings.runDiffHook || settings.keepFailed) { + auto dst = store.toRealPath(finalDestPath + ".check"); + deletePath(dst); + movePath(actualPath, dst); - handleDiffHook( - buildUser ? buildUser->getUID() : getuid(), - buildUser ? buildUser->getGID() : getgid(), - finalDestPath, - dst, - store.printStorePath(drvPath), - tmpDir); + handleDiffHook( + buildUser ? buildUser->getUID() : getuid(), + buildUser ? buildUser->getGID() : getgid(), + finalDestPath, + dst, + store.printStorePath(drvPath), + tmpDir); - throw NotDeterministic( - "derivation '%s' may not be deterministic: output '%s' differs from '%s'", - store.printStorePath(drvPath), - store.toRealPath(finalDestPath), - dst); - } else - throw NotDeterministic( - "derivation '%s' may not be deterministic: output '%s' differs", - store.printStorePath(drvPath), - store.toRealPath(finalDestPath)); + throw NotDeterministic( + "derivation '%s' may not be deterministic: output '%s' differs from '%s'", + store.printStorePath(drvPath), + store.toRealPath(finalDestPath), + dst); + } else + throw NotDeterministic( + "derivation '%s' may not be deterministic: output '%s' differs", + store.printStorePath(drvPath), + store.toRealPath(finalDestPath)); + } + + /* Since we verified the build, it's now ultimately trusted. */ + if (!oldInfo.ultimate) { + oldInfo.ultimate = true; + store.signPathInfo(oldInfo); + store.registerValidPaths({{oldInfo.path, oldInfo}}); + } + } + } else { + /* do tasks relating to registering these outputs */ + + /* For debugging, print out the referenced and unreferenced paths. */ + for (auto & i : inputPaths) { + if (references.count(i)) + debug("referenced input: '%1%'", store.printStorePath(i)); + else + debug("unreferenced input: '%1%'", store.printStorePath(i)); } - /* Since we verified the build, it's now ultimately trusted. */ - if (!oldInfo.ultimate) { - oldInfo.ultimate = true; - store.signPathInfo(oldInfo); - store.registerValidPaths({{oldInfo.path, oldInfo}}); - } + store.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences() + miscMethods->markContentsGood(newInfo.path); - continue; + newInfo.deriver = drvPath; + newInfo.ultimate = true; + store.signPathInfo(newInfo); + + finish(newInfo.path); + + /* If it's a CA path, register it right away. This is necessary if it + isn't statically known so that we can safely unlock the path before + the next iteration + + This is also good so that if a fixed-output produces the + wrong path, we still store the result (just don't consider + the derivation sucessful, so if someone fixes the problem by + just changing the wanted hash, the redownload (or whateer + possibly quite slow thing it was) doesn't have to be done + again. */ + if (newInfo.ca) + store.registerValidPaths({{newInfo.path, newInfo}}); } - /* For debugging, print out the referenced and unreferenced paths. */ - for (auto & i : inputPaths) { - if (references.count(i)) - debug("referenced input: '%1%'", store.printStorePath(i)); - else - debug("unreferenced input: '%1%'", store.printStorePath(i)); - } - - store.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences() - miscMethods->markContentsGood(newInfo.path); - - newInfo.deriver = drvPath; - newInfo.ultimate = true; - store.signPathInfo(newInfo); - - finish(newInfo.path); - - /* If it's a CA path, register it right away. This is necessary if it - isn't statically known so that we can safely unlock the path before - the next iteration */ - if (newInfo.ca) - store.registerValidPaths({{newInfo.path, newInfo}}); - + /* Do this in both the check and non-check cases, because we + want `checkOutputs` below to work, which needs these path + infos. */ infos.emplace(outputName, std::move(newInfo)); } + /* Apply output checks. This includes checking of the wanted vs got + hash of fixed-outputs. */ + checkOutputs(store, drvPath, drv.outputs, drvOptions.outputChecks, infos); + if (buildMode == bmCheck) { - /* In case of fixed-output derivations, if there are - mismatches on `--check` an error must be thrown as this is - also a source for non-determinism. */ - if (delayedException) - std::rethrow_exception(delayedException); return {}; } - /* Apply output checks. */ - checkOutputs(store, drvPath, drvOptions.outputChecks, infos); - /* Register each output path as valid, and register the sets of paths referenced by each of them. If there are cycles in the outputs, this will fail. */ @@ -1829,16 +1809,11 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() store.registerValidPaths(infos2); } - /* In case of a fixed-output derivation hash mismatch, throw an - exception now that we have registered the output as valid. */ - if (delayedException) - std::rethrow_exception(delayedException); - - /* If we made it this far, we are sure the output matches the derivation - (since the delayedException would be a fixed output CA mismatch). That - means it's safe to link the derivation to the output hash. We must do - that for floating CA derivations, which otherwise couldn't be cached, - but it's fine to do in all cases. */ + /* If we made it this far, we are sure the output matches the + derivation That means it's safe to link the derivation to the + output hash. We must do that for floating CA derivations, which + otherwise couldn't be cached, but it's fine to do in all cases. + */ SingleDrvOutputs builtOutputs; for (auto & [outputName, newInfo] : infos) { From 374f8e79a195bbcf606b0c3452f7e7de67b68150 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 11:58:28 -0400 Subject: [PATCH 272/382] `DerivationBuilderImpl::unprepareBuild` Just throw error Aftet the previous simplifications, there is no reason to catch the error and immediately return it with a `std::variant` --- just let the caller catch it instead. --- .../build/derivation-building-goal.cc | 22 +++--- .../nix/store/build/derivation-builder.hh | 4 +- src/libstore/unix/build/derivation-builder.cc | 67 +++++++++---------- .../unix/build/linux-derivation-builder.cc | 2 +- 4 files changed, 45 insertions(+), 50 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index c9b562817..d2752dfb5 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -804,21 +804,22 @@ Goal::Co DerivationBuildingGoal::tryToBuild() trace("build done"); - auto res = builder->unprepareBuild(); - // N.B. cannot use `std::visit` with co-routine return - if (auto * ste = std::get_if<0>(&res)) { + SingleDrvOutputs builtOutputs; + try { + builtOutputs = builder->unprepareBuild(); + } catch (BuildError & e) { outputLocks.unlock(); // Allow selecting a subset of enum values # pragma GCC diagnostic push # pragma GCC diagnostic ignored "-Wswitch-enum" - switch (ste->status) { + switch (e.status) { case BuildResult::HashMismatch: worker.hashMismatch = true; /* See header, the protocols don't know about `HashMismatch` yet, so change it to `OutputRejected`, which they expect for this case (hash mismatch is a type of output rejection). */ - ste->status = BuildResult::OutputRejected; + e.status = BuildResult::OutputRejected; break; case BuildResult::NotDeterministic: worker.checkMismatch = true; @@ -828,10 +829,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() break; } # pragma GCC diagnostic pop - co_return doneFailure(std::move(*ste)); - } else if (auto * builtOutputs = std::get_if<1>(&res)) { + co_return doneFailure(std::move(e)); + } + { StorePathSet outputPaths; - for (auto & [_, output] : *builtOutputs) + for (auto & [_, output] : builtOutputs) outputPaths.insert(output.outPath); runPostBuildHook(worker.store, *logger, drvPath, outputPaths); @@ -841,9 +843,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() (unlinked) lock files. */ outputLocks.setDeletion(true); outputLocks.unlock(); - co_return doneSuccess(BuildResult::Built, std::move(*builtOutputs)); - } else { - unreachable(); + co_return doneSuccess(BuildResult::Built, std::move(builtOutputs)); } #endif } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index fd487c5fe..08708ec05 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -187,8 +187,10 @@ struct DerivationBuilder : RestrictionContext * processing. A status code and exception are returned, providing * more information. The second case indicates success, and * realisations for each output of the derivation are returned. + * + * @throws BuildError */ - virtual std::variant unprepareBuild() = 0; + virtual SingleDrvOutputs unprepareBuild() = 0; /** * Stop the in-process nix daemon thread. diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index c9b603db9..6a5b6934e 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -191,7 +191,7 @@ public: void startBuilder() override; - std::variant unprepareBuild() override; + SingleDrvOutputs unprepareBuild() override; protected: @@ -426,7 +426,7 @@ bool DerivationBuilderImpl::prepareBuild() return true; } -std::variant DerivationBuilderImpl::unprepareBuild() +SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() { // FIXME: get rid of this, rely on RAII. Finally releaseBuildUser([&]() { @@ -477,49 +477,42 @@ std::variant DerivationBuilderImpl::unprepareBuild bool diskFull = false; - try { + /* Check the exit status. */ + if (!statusOk(status)) { - /* Check the exit status. */ - if (!statusOk(status)) { + diskFull |= decideWhetherDiskFull(); - diskFull |= decideWhetherDiskFull(); + cleanupBuild(); - cleanupBuild(); + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", + Magenta(store.printStorePath(drvPath)), + statusToString(status)); - auto msg = - fmt("Cannot build '%s'.\n" - "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", - Magenta(store.printStorePath(drvPath)), - statusToString(status)); + msg += showKnownOutputs(store, drv); - msg += showKnownOutputs(store, drv); + miscMethods->appendLogTailErrorMsg(msg); - miscMethods->appendLogTailErrorMsg(msg); + if (diskFull) + msg += "\nnote: build failure may have been caused by lack of free disk space"; - if (diskFull) - msg += "\nnote: build failure may have been caused by lack of free disk space"; - - throw BuildError( - !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure - : BuildResult::PermanentFailure, - msg); - } - - /* Compute the FS closure of the outputs and register them as - being valid. */ - auto builtOutputs = registerOutputs(); - - /* Delete unused redirected outputs (when doing hash rewriting). */ - for (auto & i : redirectedOutputs) - deletePath(store.Store::toRealPath(i.second)); - - deleteTmpDir(true); - - return std::move(builtOutputs); - - } catch (BuildError & e) { - return std::move(e); + throw BuildError( + !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure : BuildResult::PermanentFailure, + msg); } + + /* Compute the FS closure of the outputs and register them as + being valid. */ + auto builtOutputs = registerOutputs(); + + /* Delete unused redirected outputs (when doing hash rewriting). */ + for (auto & i : redirectedOutputs) + deletePath(store.Store::toRealPath(i.second)); + + deleteTmpDir(true); + + return builtOutputs; } void DerivationBuilderImpl::cleanupBuild() diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index 39b8f09ae..d474c001e 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -659,7 +659,7 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu throw SysError("setuid failed"); } - std::variant unprepareBuild() override + SingleDrvOutputs unprepareBuild() override { sandboxMountNamespace = -1; sandboxUserNamespace = -1; From 8dd289099c787440c0eb9eeac550a199801f57ae Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 12:06:06 -0400 Subject: [PATCH 273/382] Simplify `DerivationGoal::unprepareBuild::diskFull` We only need it defined in the narrower scope --- src/libstore/unix/build/derivation-builder.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 6a5b6934e..daaf0b964 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -475,12 +475,10 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() ((double) buildResult.cpuSystem->count()) / 1000000); } - bool diskFull = false; - /* Check the exit status. */ if (!statusOk(status)) { - diskFull |= decideWhetherDiskFull(); + bool diskFull = decideWhetherDiskFull(); cleanupBuild(); From 4db6bf96b77d5027526f487bbda9966518d69187 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 12:18:40 -0400 Subject: [PATCH 274/382] Give `DerivationBuilderImpl::cleanupBuild` bool arg Do this to match `DerivationBuilder::deleteTmpDir`, which we'll want to combine it with next. Also chenge one caller from `deleteTmpDir(true)` to `cleanupBuild(true)` now that this is done, because it will not make a difference. This should be a pure refactor with no behavioral change. --- src/libstore/unix/build/chroot-derivation-builder.cc | 7 +++++-- src/libstore/unix/build/derivation-builder.cc | 10 +++++----- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc index 887bb47f0..20a4bd6bf 100644 --- a/src/libstore/unix/build/chroot-derivation-builder.cc +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -166,9 +166,12 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl return !needsHashRewrite() ? chrootRootDir + p : store.toRealPath(p); } - void cleanupBuild() override + void cleanupBuild(bool force) override { - DerivationBuilderImpl::cleanupBuild(); + DerivationBuilderImpl::cleanupBuild(force); + + if (force) + return; /* Move paths out of the chroot for easier debugging of build failures. */ diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index daaf0b964..bd6cac522 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -350,7 +350,7 @@ public: protected: - virtual void cleanupBuild(); + virtual void cleanupBuild(bool force); private: @@ -480,7 +480,7 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() bool diskFull = decideWhetherDiskFull(); - cleanupBuild(); + cleanupBuild(false); auto msg = fmt("Cannot build '%s'.\n" @@ -508,14 +508,14 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() for (auto & i : redirectedOutputs) deletePath(store.Store::toRealPath(i.second)); - deleteTmpDir(true); + cleanupBuild(true); return builtOutputs; } -void DerivationBuilderImpl::cleanupBuild() +void DerivationBuilderImpl::cleanupBuild(bool force) { - deleteTmpDir(false); + deleteTmpDir(force); } static void chmod_(const Path & path, mode_t mode) From 557bbe969e2d3caa71ac4c2c3075371b5df0c7de Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 12:26:29 -0400 Subject: [PATCH 275/382] Combine `cleanupBuild` and `deleteTmpDir` It's hard to tell if I changed any behavior, but if I did, I think I made it better, because now we explicitly move stuff out of the chroot (if we were going to) before trying to delete the chroot. --- src/libstore/build/derivation-building-goal.cc | 2 +- .../include/nix/store/build/derivation-builder.hh | 5 ++++- .../unix/build/chroot-derivation-builder.cc | 14 +++----------- src/libstore/unix/build/derivation-builder.cc | 13 ++----------- 4 files changed, 10 insertions(+), 24 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index d2752dfb5..24244ebd4 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -66,7 +66,7 @@ DerivationBuildingGoal::~DerivationBuildingGoal() ignoreExceptionInDestructor(); } try { - builder->deleteTmpDir(false); + builder->cleanupBuild(false); } catch (...) { ignoreExceptionInDestructor(); } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 08708ec05..512d001e0 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -200,8 +200,11 @@ struct DerivationBuilder : RestrictionContext /** * Delete the temporary directory, if we have one. + * + * @param force We know the build suceeded, so don't attempt to + * preseve anything for debugging. */ - virtual void deleteTmpDir(bool force) = 0; + virtual void cleanupBuild(bool force) = 0; /** * Kill any processes running under the build user UID or in the diff --git a/src/libstore/unix/build/chroot-derivation-builder.cc b/src/libstore/unix/build/chroot-derivation-builder.cc index 20a4bd6bf..8c9359533 100644 --- a/src/libstore/unix/build/chroot-derivation-builder.cc +++ b/src/libstore/unix/build/chroot-derivation-builder.cc @@ -22,13 +22,6 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl PathsInChroot pathsInChroot; - void deleteTmpDir(bool force) override - { - autoDelChroot.reset(); /* this runs the destructor */ - - DerivationBuilderImpl::deleteTmpDir(force); - } - bool needsHashRewrite() override { return false; @@ -170,12 +163,9 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl { DerivationBuilderImpl::cleanupBuild(force); - if (force) - return; - /* Move paths out of the chroot for easier debugging of build failures. */ - if (buildMode == bmNormal) + if (!force && buildMode == bmNormal) for (auto & [_, status] : initialOutputs) { if (!status.known) continue; @@ -185,6 +175,8 @@ struct ChrootDerivationBuilder : virtual DerivationBuilderImpl if (pathExists(chrootRootDir + p)) std::filesystem::rename((chrootRootDir + p), p); } + + autoDelChroot.reset(); /* this runs the destructor */ } std::pair addDependencyPrep(const StorePath & path) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index bd6cac522..241d98ace 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -344,14 +344,10 @@ private: public: - void deleteTmpDir(bool force) override; + void cleanupBuild(bool force) override; void killSandbox(bool getStats) override; -protected: - - virtual void cleanupBuild(bool force); - private: bool decideWhetherDiskFull(); @@ -513,11 +509,6 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() return builtOutputs; } -void DerivationBuilderImpl::cleanupBuild(bool force) -{ - deleteTmpDir(force); -} - static void chmod_(const Path & path, mode_t mode) { if (chmod(path.c_str(), mode) == -1) @@ -1821,7 +1812,7 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() return builtOutputs; } -void DerivationBuilderImpl::deleteTmpDir(bool force) +void DerivationBuilderImpl::cleanupBuild(bool force) { if (topTmpDir != "") { /* As an extra precaution, even in the event of `deletePath` failing to From 49da508f46a1a90b04faed88a7d865976ae7c6fb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 12:40:55 -0400 Subject: [PATCH 276/382] Write a destructor for `DerivationBuilderImpl` This allows `DerivationBuildingGoal` to know less. --- .../build/derivation-building-goal.cc | 14 ++------ .../nix/store/build/derivation-builder.hh | 14 -------- src/libstore/unix/build/derivation-builder.cc | 36 ++++++++++++++++--- 3 files changed, 33 insertions(+), 31 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 24244ebd4..61f726bf9 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -59,18 +59,8 @@ DerivationBuildingGoal::~DerivationBuildingGoal() ignoreExceptionInDestructor(); } #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows - if (builder) { - try { - builder->stopDaemon(); - } catch (...) { - ignoreExceptionInDestructor(); - } - try { - builder->cleanupBuild(false); - } catch (...) { - ignoreExceptionInDestructor(); - } - } + if (builder) + builder.reset(); #endif try { closeLogFile(); diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 512d001e0..65d044a79 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -192,20 +192,6 @@ struct DerivationBuilder : RestrictionContext */ virtual SingleDrvOutputs unprepareBuild() = 0; - /** - * Stop the in-process nix daemon thread. - * @see startDaemon - */ - virtual void stopDaemon() = 0; - - /** - * Delete the temporary directory, if we have one. - * - * @param force We know the build suceeded, so don't attempt to - * preseve anything for debugging. - */ - virtual void cleanupBuild(bool force) = 0; - /** * Kill any processes running under the build user UID or in the * cgroup of the build. diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 241d98ace..4678dae42 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -85,6 +85,22 @@ public: { } + ~DerivationBuilderImpl() + { + /* Careful: we should never ever throw an exception from a + destructor. */ + try { + stopDaemon(); + } catch (...) { + ignoreExceptionInDestructor(); + } + try { + cleanupBuild(false); + } catch (...) { + ignoreExceptionInDestructor(); + } + } + protected: /** @@ -285,9 +301,11 @@ private: */ void startDaemon(); -public: - - void stopDaemon() override; + /** + * Stop the in-process nix daemon thread. + * @see startDaemon + */ + void stopDaemon(); protected: @@ -342,9 +360,17 @@ private: */ SingleDrvOutputs registerOutputs(); -public: +protected: - void cleanupBuild(bool force) override; + /** + * Delete the temporary directory, if we have one. + * + * @param force We know the build suceeded, so don't attempt to + * preseve anything for debugging. + */ + virtual void cleanupBuild(bool force); + +public: void killSandbox(bool getStats) override; From 4388e3dcb588ef960c92128040242c80bfb10361 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 13:43:26 -0400 Subject: [PATCH 277/382] Create `DerivationBuilder::killChild` Then the derivation building goal doesn't need to snoop around as much. --- .../build/derivation-building-goal.cc | 16 +----------- .../nix/store/build/derivation-builder.hh | 8 +++--- src/libstore/unix/build/derivation-builder.cc | 26 ++++++++++++++++++- 3 files changed, 31 insertions(+), 19 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 61f726bf9..5af385aed 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -84,22 +84,8 @@ void DerivationBuildingGoal::killChild() hook.reset(); #endif #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows - if (builder && builder->pid != -1) { + if (builder && builder->killChild()) worker.childTerminated(this); - - // FIXME: move this into DerivationBuilder. - - /* If we're using a build user, then there is a tricky race - condition: if we kill the build user before the child has - done its setuid() to the build user uid, then it won't be - killed, and we'll potentially lock up in pid.wait(). So - also send a conventional kill to the child. */ - ::kill(-builder->pid, SIGKILL); /* ignore the result */ - - builder->killSandbox(true); - - builder->pid.wait(); - } #endif } diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 65d044a79..3e8903e8a 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -193,10 +193,12 @@ struct DerivationBuilder : RestrictionContext virtual SingleDrvOutputs unprepareBuild() = 0; /** - * Kill any processes running under the build user UID or in the - * cgroup of the build. + * Forcibly kill the child process, if any. + * + * @returns whether the child was still alive and needed to be + * killed. */ - virtual void killSandbox(bool getStats) = 0; + virtual bool killChild() = 0; }; #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 4678dae42..dff7d0eaa 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -370,9 +370,15 @@ protected: */ virtual void cleanupBuild(bool force); + /** + * Kill any processes running under the build user UID or in the + * cgroup of the build. + */ + virtual void killSandbox(bool getStats); + public: - void killSandbox(bool getStats) override; + bool killChild() override; private: @@ -435,6 +441,24 @@ void DerivationBuilderImpl::killSandbox(bool getStats) } } +bool DerivationBuilderImpl::killChild() +{ + bool ret = pid != -1; + if (ret) { + /* If we're using a build user, then there is a tricky race + condition: if we kill the build user before the child has + done its setuid() to the build user uid, then it won't be + killed, and we'll potentially lock up in pid.wait(). So + also send a conventional kill to the child. */ + ::kill(-pid, SIGKILL); /* ignore the result */ + + killSandbox(true); + + pid.wait(); + } + return ret; +} + bool DerivationBuilderImpl::prepareBuild() { if (useBuildUsers()) { From c632c823cee268c3efdd5251375434c976827370 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 13:44:39 -0400 Subject: [PATCH 278/382] Take `DerivationBuilder::pid` private --- src/libstore/include/nix/store/build/derivation-builder.hh | 5 ----- src/libstore/unix/build/derivation-builder.cc | 5 +++++ 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 3e8903e8a..d7f2058d1 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -150,11 +150,6 @@ struct DerivationBuilderCallbacks */ struct DerivationBuilder : RestrictionContext { - /** - * The process ID of the builder. - */ - Pid pid; - DerivationBuilder() = default; virtual ~DerivationBuilder() = default; diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index dff7d0eaa..b11eb383d 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -70,6 +70,11 @@ class DerivationBuilderImpl : public DerivationBuilder, public DerivationBuilder { protected: + /** + * The process ID of the builder. + */ + Pid pid; + LocalStore & store; std::unique_ptr miscMethods; From bde745cb3f8dab7a29fbc2c87eb599ff31384ab5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 13:57:38 -0400 Subject: [PATCH 279/382] Move `killChild` call from `~DerivationBuildingGoal` to `~DerivationBuilder` Sadly we cannot unexpose `DerivationBuilder::killChild` yet, because `DerivationBuildingGoal` calls it elsewhere, but we can at least haave a better division of labor between the two destructors. --- src/libstore/build/derivation-building-goal.cc | 5 ----- src/libstore/unix/build/derivation-builder.cc | 5 +++++ 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 5af385aed..20a67008c 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -53,11 +53,6 @@ DerivationBuildingGoal::~DerivationBuildingGoal() { /* Careful: we should never ever throw an exception from a destructor. */ - try { - killChild(); - } catch (...) { - ignoreExceptionInDestructor(); - } #ifndef _WIN32 // TODO enable `DerivationBuilder` on Windows if (builder) builder.reset(); diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index b11eb383d..bc48d4256 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -94,6 +94,11 @@ public: { /* Careful: we should never ever throw an exception from a destructor. */ + try { + killChild(); + } catch (...) { + ignoreExceptionInDestructor(); + } try { stopDaemon(); } catch (...) { From 3e0b1705c13ab612ca1e4f524619d12a9733eeff Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 14:54:11 -0400 Subject: [PATCH 280/382] Move `markContentsGood` to after `DerivationBuilder` finishes I think this should be fine for repairing. If anything, it is better, because it would be weird to "mark and output good" only for it to then fail output checks. --- src/libstore/build/derivation-building-goal.cc | 10 ++++------ .../include/nix/store/build/derivation-builder.hh | 2 -- src/libstore/unix/build/derivation-builder.cc | 1 - 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 20a67008c..b1920cadb 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -623,11 +623,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() goal.worker.childTerminated(&goal); } - void markContentsGood(const StorePath & path) override - { - goal.worker.markContentsGood(path); - } - Path openLogFile() override { return goal.openLogFile(); @@ -804,8 +799,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() } { StorePathSet outputPaths; - for (auto & [_, output] : builtOutputs) + for (auto & [_, output] : builtOutputs) { + // for sake of `bmRepair` + worker.markContentsGood(output.outPath); outputPaths.insert(output.outPath); + } runPostBuildHook(worker.store, *logger, drvPath, outputPaths); /* It is now safe to delete the lock files, since all future diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index d7f2058d1..a373c4729 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -133,8 +133,6 @@ struct DerivationBuilderCallbacks * @todo this should be reworked */ virtual void childTerminated() = 0; - - virtual void markContentsGood(const StorePath & path) = 0; }; /** diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index bc48d4256..bf99c4c1a 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1804,7 +1804,6 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() } store.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences() - miscMethods->markContentsGood(newInfo.path); newInfo.deriver = drvPath; newInfo.ultimate = true; From 6839f3de5522f9895b3f3fecaab818a6bb7ae30a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 28 Aug 2025 01:11:37 +0300 Subject: [PATCH 281/382] libutil-tests: Add more URL tests --- src/libutil-tests/url.cc | 61 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index b776ba671..71c416a3b 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -204,6 +204,67 @@ TEST(parseURL, parseFileURLWithQueryAndFragment) ASSERT_EQ(s, parsed.to_string()); } +TEST(parseURL, parseFileURL) +{ + auto s = "file:/none/of/your/business/"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "file", + .authority = std::nullopt, + .path = "/none/of/your/business/", + }; + + ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); +} + +TEST(parseURL, parseFileURLWithAuthority) +{ + auto s = "file://///of/your/business//"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "file", + .authority = Authority{.host = ""}, + .path = "///of/your/business//", + }; + + ASSERT_EQ(parsed.authority, expected.authority); + ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); +} + +TEST(parseURL, parseFileURLNoLeadingSlash) +{ + auto s = "file:none/of/your/business/"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "file", + .authority = std::nullopt, + .path = "none/of/your/business/", + }; + + ASSERT_EQ(parsed, expected); + ASSERT_EQ("file:none/of/your/business/", parsed.to_string()); +} + +TEST(parseURL, parseHttpTrailingSlash) +{ + auto s = "http://example.com/"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "http", + .authority = Authority{.host = "example.com"}, + .path = "/", + }; + + ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); +} + TEST(parseURL, parsedUrlsIsEqualToItself) { auto s = "http://www.example.org/file.tar.gz"; From c436b7a32afaf01d62f828697ddf5c49d4f8678c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 26 Aug 2025 12:49:28 +0200 Subject: [PATCH 282/382] Fix `ParsedURL` handling of `%2F` in URL paths See the new extensive doxygen in `url.hh`. This fixes fetching gitlab: flakes. Paths are now stored as a std::vector of individual path segments, which can themselves contain path separators '/' (%2F). This is necessary to make the Gitlab's /projects/ API work. Co-authored-by: John Ericson Co-authored-by: Sergei Zimmerman --- src/libfetchers/git-lfs-fetch.cc | 3 +- src/libfetchers/git.cc | 16 +- src/libfetchers/github.cc | 8 +- src/libfetchers/indirect.cc | 15 +- src/libfetchers/mercurial.cc | 4 +- src/libfetchers/path.cc | 4 +- src/libfetchers/tarball.cc | 29 +-- src/libflake/flakeref.cc | 20 +- src/libflake/url-name.cc | 11 +- src/libstore-tests/s3.cc | 39 ++-- src/libstore/filetransfer.cc | 2 +- src/libstore/http-binary-cache-store.cc | 6 +- src/libstore/include/nix/store/s3.hh | 7 +- .../include/nix/store/store-reference.hh | 10 + src/libstore/s3.cc | 28 ++- src/libstore/store-reference.cc | 4 +- src/libutil-tests/url.cc | 94 +++++++--- src/libutil/include/nix/util/url.hh | 175 +++++++++++++++++- src/libutil/url.cc | 88 ++++++++- 19 files changed, 446 insertions(+), 117 deletions(-) diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index bd9752711..9688daa4a 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -69,7 +69,8 @@ static LfsApiInfo getLfsApi(const ParsedURL & url) args.push_back("--"); args.push_back("git-lfs-authenticate"); - args.push_back(url.path); + // FIXME %2F encode slashes? Does this command take/accept percent encoding? + args.push_back(url.renderPath(/*encode=*/false)); args.push_back("download"); auto [status, output] = runProgram({.program = "ssh", .args = args}); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index c19e8d7db..a7acc316e 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -462,8 +462,8 @@ struct GitInputScheme : InputScheme // Why are we checking for bare repository? // well if it's a bare repository we want to force a git fetch rather than copying the folder - bool isBareRepository = url.scheme == "file" && pathExists(url.path) && !pathExists(url.path + "/.git"); - // + auto isBareRepository = [](PathView path) { return pathExists(path) && !pathExists(path + "/.git"); }; + // FIXME: here we turn a possibly relative path into an absolute path. // This allows relative git flake inputs to be resolved against the // **current working directory** (as in POSIX), which tends to work out @@ -472,8 +472,10 @@ struct GitInputScheme : InputScheme // // See: https://discourse.nixos.org/t/57783 and #9708 // - if (url.scheme == "file" && !forceHttp && !isBareRepository) { - if (!isAbsolute(url.path)) { + if (url.scheme == "file" && !forceHttp && !isBareRepository(renderUrlPathEnsureLegal(url.path))) { + auto path = renderUrlPathEnsureLegal(url.path); + + if (!isAbsolute(path)) { warn( "Fetching Git repository '%s', which uses a path relative to the current directory. " "This is not supported and will stop working in a future release. " @@ -483,10 +485,10 @@ struct GitInputScheme : InputScheme // If we don't check here for the path existence, then we can give libgit2 any directory // and it will initialize them as git directories. - if (!pathExists(url.path)) { - throw Error("The path '%s' does not exist.", url.path); + if (!pathExists(path)) { + throw Error("The path '%s' does not exist.", path); } - repoInfo.location = std::filesystem::absolute(url.path); + repoInfo.location = std::filesystem::absolute(path); } else { if (url.scheme == "file") /* Query parameters are meaningless for file://, but diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index b3749b01a..e40757dec 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -38,7 +38,7 @@ struct GitArchiveInputScheme : InputScheme if (url.scheme != schemeName()) return {}; - auto path = tokenizeString>(url.path, "/"); + const auto & path = url.path; std::optional rev; std::optional ref; @@ -139,12 +139,12 @@ struct GitArchiveInputScheme : InputScheme auto repo = getStrAttr(input.attrs, "repo"); auto ref = input.getRef(); auto rev = input.getRev(); - auto path = owner + "/" + repo; + std::vector path{owner, repo}; assert(!(ref && rev)); if (ref) - path += "/" + *ref; + path.push_back(*ref); if (rev) - path += "/" + rev->to_string(HashFormat::Base16, false); + path.push_back(rev->to_string(HashFormat::Base16, false)); auto url = ParsedURL{ .scheme = std::string{schemeName()}, .path = path, diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 4bd4d890d..c5cbf156b 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -14,7 +14,7 @@ struct IndirectInputScheme : InputScheme if (url.scheme != "flake") return {}; - auto path = tokenizeString>(url.path, "/"); + const auto & path = url.path; std::optional rev; std::optional ref; @@ -82,16 +82,15 @@ struct IndirectInputScheme : InputScheme ParsedURL toURL(const Input & input) const override { - ParsedURL url; - url.scheme = "flake"; - url.path = getStrAttr(input.attrs, "id"); + ParsedURL url{ + .scheme = "flake", + .path = {getStrAttr(input.attrs, "id")}, + }; if (auto ref = input.getRef()) { - url.path += '/'; - url.path += *ref; + url.path.push_back(*ref); }; if (auto rev = input.getRev()) { - url.path += '/'; - url.path += rev->gitRev(); + url.path.push_back(rev->gitRev()); }; return url; } diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9b17d675e..641b3d6a8 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -120,7 +120,7 @@ struct MercurialInputScheme : InputScheme { auto url = parseURL(getStrAttr(input.attrs, "url")); if (url.scheme == "file" && !input.getRef() && !input.getRev()) - return url.path; + return renderUrlPathEnsureLegal(url.path); return {}; } @@ -152,7 +152,7 @@ struct MercurialInputScheme : InputScheme { auto url = parseURL(getStrAttr(input.attrs, "url")); bool isLocal = url.scheme == "file"; - return {isLocal, isLocal ? url.path : url.to_string()}; + return {isLocal, isLocal ? renderUrlPathEnsureLegal(url.path) : url.to_string()}; } StorePath fetchToStore(ref store, Input & input) const diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index e5635ee75..b66459fb9 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -20,7 +20,7 @@ struct PathInputScheme : InputScheme Input input{settings}; input.attrs.insert_or_assign("type", "path"); - input.attrs.insert_or_assign("path", url.path); + input.attrs.insert_or_assign("path", renderUrlPathEnsureLegal(url.path)); for (auto & [name, value] : url.query) if (name == "rev" || name == "narHash") @@ -74,7 +74,7 @@ struct PathInputScheme : InputScheme query.erase("__final"); return ParsedURL{ .scheme = "path", - .path = getStrAttr(input.attrs, "path"), + .path = splitString>(getStrAttr(input.attrs, "path"), "/"), .query = query, }; } diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index b89cd99f1..c1b28f674 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -107,19 +107,19 @@ DownloadFileResult downloadFile( } static DownloadTarballResult downloadTarball_( - const Settings & settings, const std::string & url, const Headers & headers, const std::string & displayPrefix) + const Settings & settings, const std::string & urlS, const Headers & headers, const std::string & displayPrefix) { + auto url = parseURL(urlS); // Some friendly error messages for common mistakes. // Namely lets catch when the url is a local file path, but // it is not in fact a tarball. - if (url.rfind("file://", 0) == 0) { - // Remove "file://" prefix to get the local file path - std::string localPath = url.substr(7); - if (!std::filesystem::exists(localPath)) { + if (url.scheme == "file") { + std::filesystem::path localPath = renderUrlPathEnsureLegal(url.path); + if (!exists(localPath)) { throw Error("tarball '%s' does not exist.", localPath); } - if (std::filesystem::is_directory(localPath)) { + if (is_directory(localPath)) { if (std::filesystem::exists(localPath + "/.git")) { throw Error( "tarball '%s' is a git repository, not a tarball. Please use `git+file` as the scheme.", localPath); @@ -128,7 +128,7 @@ static DownloadTarballResult downloadTarball_( } } - Cache::Key cacheKey{"tarball", {{"url", url}}}; + Cache::Key cacheKey{"tarball", {{"url", urlS}}}; auto cached = settings.getCache()->lookupExpired(cacheKey); @@ -153,7 +153,7 @@ static DownloadTarballResult downloadTarball_( auto _res = std::make_shared>(); auto source = sinkToSource([&](Sink & sink) { - FileTransferRequest req(parseURL(url)); + FileTransferRequest req(url); req.expectedETag = cached ? getStrAttr(cached->value, "etag") : ""; getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; }); }); @@ -166,7 +166,7 @@ static DownloadTarballResult downloadTarball_( /* Note: if the download is cached, `importTarball()` will receive no data, which causes it to import an empty tarball. */ - auto archive = hasSuffix(toLower(parseURL(url).path), ".zip") ? ({ + auto archive = !url.path.empty() && hasSuffix(toLower(url.path.back()), ".zip") ? ({ /* In streaming mode, libarchive doesn't handle symlinks in zip files correctly (#10649). So write the entire file to disk so libarchive can access it @@ -180,7 +180,7 @@ static DownloadTarballResult downloadTarball_( } TarArchive{path}; }) - : TarArchive{*source}; + : TarArchive{*source}; auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); @@ -234,8 +234,11 @@ struct CurlInputScheme : InputScheme { const StringSet transportUrlSchemes = {"file", "http", "https"}; - bool hasTarballExtension(std::string_view path) const + bool hasTarballExtension(const ParsedURL & url) const { + if (url.path.empty()) + return false; + const auto & path = url.path.back(); return hasSuffix(path, ".zip") || hasSuffix(path, ".tar") || hasSuffix(path, ".tgz") || hasSuffix(path, ".tar.gz") || hasSuffix(path, ".tar.xz") || hasSuffix(path, ".tar.bz2") || hasSuffix(path, ".tar.zst"); @@ -336,7 +339,7 @@ struct FileInputScheme : CurlInputScheme auto parsedUrlScheme = parseUrlScheme(url.scheme); return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) && (parsedUrlScheme.application ? parsedUrlScheme.application.value() == schemeName() - : (!requireTree && !hasTarballExtension(url.path))); + : (!requireTree && !hasTarballExtension(url))); } std::pair, Input> getAccessor(ref store, const Input & _input) const override @@ -373,7 +376,7 @@ struct TarballInputScheme : CurlInputScheme return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) && (parsedUrlScheme.application ? parsedUrlScheme.application.value() == schemeName() - : (requireTree || hasTarballExtension(url.path))); + : (requireTree || hasTarballExtension(url))); } std::pair, Input> getAccessor(ref store, const Input & _input) const override diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index 070f4e483..cd176f14b 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -143,7 +143,7 @@ std::pair parsePathFlakeRefWithFragment( auto parsedURL = ParsedURL{ .scheme = "git+file", .authority = ParsedURL::Authority{}, - .path = flakeRoot, + .path = splitString>(flakeRoot, "/"), .query = query, .fragment = fragment, }; @@ -172,7 +172,13 @@ std::pair parsePathFlakeRefWithFragment( return fromParsedURL( fetchSettings, - {.scheme = "path", .authority = ParsedURL::Authority{}, .path = path, .query = query, .fragment = fragment}, + { + .scheme = "path", + .authority = ParsedURL::Authority{}, + .path = splitString>(path, "/"), + .query = query, + .fragment = fragment, + }, isFlake); } @@ -193,7 +199,7 @@ parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & ur auto parsedURL = ParsedURL{ .scheme = "flake", .authority = ParsedURL::Authority{}, - .path = match[1], + .path = splitString>(match[1].str(), "/"), }; return std::make_pair( @@ -211,8 +217,12 @@ std::optional> parseURLFlakeRef( { try { auto parsed = parseURL(url, /*lenient=*/true); - if (baseDir && (parsed.scheme == "path" || parsed.scheme == "git+file") && !isAbsolute(parsed.path)) - parsed.path = absPath(parsed.path, *baseDir); + if (baseDir && (parsed.scheme == "path" || parsed.scheme == "git+file")) { + /* Here we know that the path must not contain encoded '/' or NUL bytes. */ + auto path = renderUrlPathEnsureLegal(parsed.path); + if (!isAbsolute(path)) + parsed.path = splitString>(absPath(path, *baseDir), "/"); + } return fromParsedURL(fetchSettings, std::move(parsed), isFlake); } catch (BadURL &) { return std::nullopt; diff --git a/src/libflake/url-name.cc b/src/libflake/url-name.cc index b3eeca26a..3bba3692e 100644 --- a/src/libflake/url-name.cc +++ b/src/libflake/url-name.cc @@ -27,16 +27,21 @@ std::optional getNameFromURL(const ParsedURL & url) return match.str(2); } + /* This is not right, because special chars like slashes within the + path fragments should be percent encoded, but I don't think any + of the regexes above care. */ + auto path = concatStringsSep("/", url.path); + /* If this is a github/gitlab/sourcehut flake, use the repo name */ - if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex)) + if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(path, match, secondPathSegmentRegex)) return match.str(1); /* If it is a regular git flake, use the directory name */ - if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex)) + if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(path, match, lastPathSegmentRegex)) return match.str(1); /* If there is no fragment, take the last element of the path */ - if (std::regex_match(url.path, match, lastPathSegmentRegex)) + if (std::regex_match(path, match, lastPathSegmentRegex)) return match.str(1); /* If even that didn't work, the URL does not contain enough info to determine a useful name */ diff --git a/src/libstore-tests/s3.cc b/src/libstore-tests/s3.cc index df61c04c1..44a31ddc9 100644 --- a/src/libstore-tests/s3.cc +++ b/src/libstore-tests/s3.cc @@ -33,7 +33,7 @@ INSTANTIATE_TEST_SUITE_P( "s3://my-bucket/my-key.txt", { .bucket = "my-bucket", - .key = "my-key.txt", + .key = {"my-key.txt"}, }, "basic_s3_bucket", }, @@ -41,7 +41,7 @@ INSTANTIATE_TEST_SUITE_P( "s3://prod-cache/nix/store/abc123.nar.xz?region=eu-west-1", { .bucket = "prod-cache", - .key = "nix/store/abc123.nar.xz", + .key = {"nix", "store", "abc123.nar.xz"}, .region = "eu-west-1", }, "with_region", @@ -50,7 +50,7 @@ INSTANTIATE_TEST_SUITE_P( "s3://bucket/key?region=us-west-2&profile=prod&endpoint=custom.s3.com&scheme=https®ion=us-east-1", { .bucket = "bucket", - .key = "key", + .key = {"key"}, .profile = "prod", .region = "us-west-2", //< using the first parameter (decodeQuery ignores dupicates) .scheme = "https", @@ -62,7 +62,7 @@ INSTANTIATE_TEST_SUITE_P( "s3://cache/file.txt?profile=production®ion=ap-southeast-2", { .bucket = "cache", - .key = "file.txt", + .key = {"file.txt"}, .profile = "production", .region = "ap-southeast-2", }, @@ -72,13 +72,14 @@ INSTANTIATE_TEST_SUITE_P( "s3://bucket/key?endpoint=https://minio.local&scheme=http", { .bucket = "bucket", - .key = "key", + .key = {"key"}, /* TODO: Figure out what AWS SDK is doing when both endpointOverride and scheme are set. */ .scheme = "http", .endpoint = ParsedURL{ .scheme = "https", .authority = ParsedURL::Authority{.host = "minio.local"}, + .path = {""}, }, }, "with_absolute_endpoint_uri", @@ -101,6 +102,7 @@ struct S3ToHttpsConversionTestCase { ParsedS3URL input; ParsedURL expected; + std::string expectedRendered; std::string description; }; @@ -113,6 +115,7 @@ TEST_P(S3ToHttpsConversionTest, ConvertsCorrectly) const auto & testCase = GetParam(); auto result = testCase.input.toHttpsUrl(); EXPECT_EQ(result, testCase.expected) << "Failed for: " << testCase.description; + EXPECT_EQ(result.to_string(), testCase.expectedRendered); } INSTANTIATE_TEST_SUITE_P( @@ -122,71 +125,77 @@ INSTANTIATE_TEST_SUITE_P( S3ToHttpsConversionTestCase{ ParsedS3URL{ .bucket = "my-bucket", - .key = "my-key.txt", + .key = {"my-key.txt"}, }, ParsedURL{ .scheme = "https", .authority = ParsedURL::Authority{.host = "s3.us-east-1.amazonaws.com"}, - .path = "/my-bucket/my-key.txt", + .path = {"", "my-bucket", "my-key.txt"}, }, + "https://s3.us-east-1.amazonaws.com/my-bucket/my-key.txt", "basic_s3_default_region", }, S3ToHttpsConversionTestCase{ ParsedS3URL{ .bucket = "prod-cache", - .key = "nix/store/abc123.nar.xz", + .key = {"nix", "store", "abc123.nar.xz"}, .region = "eu-west-1", }, ParsedURL{ .scheme = "https", .authority = ParsedURL::Authority{.host = "s3.eu-west-1.amazonaws.com"}, - .path = "/prod-cache/nix/store/abc123.nar.xz", + .path = {"", "prod-cache", "nix", "store", "abc123.nar.xz"}, }, + "https://s3.eu-west-1.amazonaws.com/prod-cache/nix/store/abc123.nar.xz", "with_eu_west_1_region", }, S3ToHttpsConversionTestCase{ ParsedS3URL{ .bucket = "bucket", - .key = "key", + .key = {"key"}, .scheme = "http", .endpoint = ParsedURL::Authority{.host = "custom.s3.com"}, }, ParsedURL{ .scheme = "http", .authority = ParsedURL::Authority{.host = "custom.s3.com"}, - .path = "/bucket/key", + .path = {"", "bucket", "key"}, }, + "http://custom.s3.com/bucket/key", "custom_endpoint_authority", }, S3ToHttpsConversionTestCase{ ParsedS3URL{ .bucket = "bucket", - .key = "key", + .key = {"key"}, .endpoint = ParsedURL{ .scheme = "http", .authority = ParsedURL::Authority{.host = "server", .port = 9000}, + .path = {""}, }, }, ParsedURL{ .scheme = "http", .authority = ParsedURL::Authority{.host = "server", .port = 9000}, - .path = "/bucket/key", + .path = {"", "bucket", "key"}, }, + "http://server:9000/bucket/key", "custom_endpoint_with_port", }, S3ToHttpsConversionTestCase{ ParsedS3URL{ .bucket = "bucket", - .key = "path/to/file.txt", + .key = {"path", "to", "file.txt"}, .region = "ap-southeast-2", .scheme = "https", }, ParsedURL{ .scheme = "https", .authority = ParsedURL::Authority{.host = "s3.ap-southeast-2.amazonaws.com"}, - .path = "/bucket/path/to/file.txt", + .path = {"", "bucket", "path", "to", "file.txt"}, }, + "https://s3.ap-southeast-2.amazonaws.com/bucket/path/to/file.txt", "complex_path_and_region", }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 7145a3d06..0007b9ad8 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -815,7 +815,7 @@ struct curlFileTransfer : public FileTransfer S3Helper s3Helper(profile, region, scheme, endpoint); // FIXME: implement ETag - auto s3Res = s3Helper.getObject(parsed.bucket, parsed.key); + auto s3Res = s3Helper.getObject(parsed.bucket, encodeUrlPath(parsed.key)); FileTransferResult res; if (!s3Res.data) throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index ab799617e..7737389a3 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -27,7 +27,7 @@ HttpBinaryCacheStoreConfig::HttpBinaryCacheStoreConfig( + (!_cacheUri.empty() ? _cacheUri : throw UsageError("`%s` Store requires a non-empty authority in Store URL", scheme)))) { - while (!cacheUri.path.empty() && cacheUri.path.back() == '/') + while (!cacheUri.path.empty() && cacheUri.path.back() == "") cacheUri.path.pop_back(); } @@ -37,7 +37,7 @@ StoreReference HttpBinaryCacheStoreConfig::getReference() const .variant = StoreReference::Specified{ .scheme = cacheUri.scheme, - .authority = (cacheUri.authority ? cacheUri.authority->to_string() : "") + cacheUri.path, + .authority = cacheUri.renderAuthorityAndPath(), }, .params = cacheUri.query, }; @@ -157,7 +157,7 @@ protected: /* Otherwise the last path fragment will get discarded. */ auto cacheUriWithTrailingSlash = config->cacheUri; if (!cacheUriWithTrailingSlash.path.empty()) - cacheUriWithTrailingSlash.path += "/"; + cacheUriWithTrailingSlash.path.push_back(""); /* path is not a path, but a full relative or absolute URL, e.g. we've seen in the wild NARINFO files have a URL diff --git a/src/libstore/include/nix/store/s3.hh b/src/libstore/include/nix/store/s3.hh index ec0cddf68..0270eeda6 100644 --- a/src/libstore/include/nix/store/s3.hh +++ b/src/libstore/include/nix/store/s3.hh @@ -54,7 +54,12 @@ struct S3Helper struct ParsedS3URL { std::string bucket; - std::string key; + /** + * @see ParsedURL::path. This is a vector for the same reason. + * Unlike ParsedURL::path this doesn't include the leading empty segment, + * since the bucket name is necessary. + */ + std::vector key; std::optional profile; std::optional region; std::optional scheme; diff --git a/src/libstore/include/nix/store/store-reference.hh b/src/libstore/include/nix/store/store-reference.hh index 5cf1e9a11..1df333947 100644 --- a/src/libstore/include/nix/store/store-reference.hh +++ b/src/libstore/include/nix/store/store-reference.hh @@ -77,12 +77,22 @@ struct StoreReference */ std::string render(bool withParams = true) const; + std::string to_string() const + { + return render(); + } + /** * Parse a URI into a store reference. */ static StoreReference parse(const std::string & uri, const Params & extraParams = Params{}); }; +static inline std::ostream & operator<<(std::ostream & os, const StoreReference & ref) +{ + return os << ref.render(); +} + /** * Split URI into protocol+hierarchy part and its parameter set. */ diff --git a/src/libstore/s3.cc b/src/libstore/s3.cc index 739de2532..5396f43b9 100644 --- a/src/libstore/s3.cc +++ b/src/libstore/s3.cc @@ -3,6 +3,9 @@ #include "nix/util/url.hh" #include "nix/util/util.hh" #include "nix/util/canon-path.hh" +#include "nix/util/strings-inline.hh" + +#include namespace nix { @@ -24,10 +27,6 @@ try { || parsed.authority->hostType != ParsedURL::Authority::HostType::Name) throw BadURL("URI has a missing or invalid bucket name"); - std::string_view key = parsed.path; - /* Make the key a relative path. */ - splitPrefix(key, "/"); - /* TODO: Validate the key against: * https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html#object-key-guidelines */ @@ -41,10 +40,14 @@ try { }; auto endpoint = getOptionalParam("endpoint"); + if (parsed.path.size() <= 1 || !parsed.path.front().empty()) + throw BadURL("URI has a missing or invalid key"); + + auto path = std::views::drop(parsed.path, 1) | std::ranges::to>(); return ParsedS3URL{ .bucket = parsed.authority->host, - .key = std::string{key}, + .key = std::move(path), .profile = getOptionalParam("profile"), .region = getOptionalParam("region"), .scheme = getOptionalParam("scheme"), @@ -78,26 +81,35 @@ ParsedURL ParsedS3URL::toHttpsUrl() const overloaded{ [&](const std::monostate &) { // No custom endpoint, use standard AWS S3 endpoint + std::vector path{""}; + path.push_back(bucket); + path.insert(path.end(), key.begin(), key.end()); return ParsedURL{ .scheme = std::string{schemeStr}, .authority = ParsedURL::Authority{.host = "s3." + regionStr + ".amazonaws.com"}, - .path = (CanonPath::root / bucket / CanonPath(key)).abs(), + .path = std::move(path), }; }, [&](const ParsedURL::Authority & auth) { // Endpoint is just an authority (hostname/port) + std::vector path{""}; + path.push_back(bucket); + path.insert(path.end(), key.begin(), key.end()); return ParsedURL{ .scheme = std::string{schemeStr}, .authority = auth, - .path = (CanonPath::root / bucket / CanonPath(key)).abs(), + .path = std::move(path), }; }, [&](const ParsedURL & endpointUrl) { // Endpoint is already a ParsedURL (e.g., http://server:9000) + auto path = endpointUrl.path; + path.push_back(bucket); + path.insert(path.end(), key.begin(), key.end()); return ParsedURL{ .scheme = endpointUrl.scheme, .authority = endpointUrl.authority, - .path = (CanonPath(endpointUrl.path) / bucket / CanonPath(key)).abs(), + .path = std::move(path), }; }, }, diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index adc60b391..8b4c19600 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -48,13 +48,11 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen auto parsedUri = parseURL(uri, /*lenient=*/true); params.insert(parsedUri.query.begin(), parsedUri.query.end()); - auto baseURI = parsedUri.authority.value_or(ParsedURL::Authority{}).to_string() + parsedUri.path; - return { .variant = Specified{ .scheme = std::move(parsedUri.scheme), - .authority = std::move(baseURI), + .authority = parsedUri.renderAuthorityAndPath(), }, .params = std::move(params), }; diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 71c416a3b..9c698a943 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -18,7 +18,7 @@ TEST(parseURL, parsesSimpleHttpUrl) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {}, .fragment = "", }; @@ -35,7 +35,7 @@ TEST(parseURL, parsesSimpleHttpsUrl) ParsedURL expected{ .scheme = "https", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {}, .fragment = "", }; @@ -52,7 +52,7 @@ TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) ParsedURL expected{ .scheme = "https", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, .fragment = "hello", }; @@ -69,7 +69,7 @@ TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {{"field", "value"}}, .fragment = "?foo=bar#", }; @@ -85,7 +85,7 @@ TEST(parseURL, parsesFilePlusHttpsUrl) ParsedURL expected{ .scheme = "file+https", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/video.mp4", + .path = {"", "video.mp4"}, .query = (StringMap) {}, .fragment = "", }; @@ -108,7 +108,7 @@ TEST(parseURL, parseIPv4Address) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::IPv4, .host = "127.0.0.1", .port = 8080}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, .fragment = "hello", }; @@ -125,7 +125,7 @@ TEST(parseURL, parseScopedRFC6874IPv6Address) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, - .path = "", + .path = {""}, .query = (StringMap) {}, .fragment = "", }; @@ -147,7 +147,7 @@ TEST(parseURL, parseIPv6Address) .host = "2a02:8071:8192:c100:311d:192d:81ac:11ea", .port = 8080, }, - .path = "", + .path = {""}, .query = (StringMap) {}, .fragment = "", }; @@ -178,7 +178,7 @@ TEST(parseURL, parseUserPassword) .password = "pass", .port = 8080, }, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {}, .fragment = "", }; @@ -195,11 +195,12 @@ TEST(parseURL, parseFileURLWithQueryAndFragment) ParsedURL expected{ .scheme = "file", .authority = Authority{}, - .path = "/none/of//your/business", + .path = {"", "none", "of", "", "your", "business"}, .query = (StringMap) {}, .fragment = "", }; + ASSERT_EQ(parsed.renderPath(), "/none/of//your/business"); ASSERT_EQ(parsed, expected); ASSERT_EQ(s, parsed.to_string()); } @@ -212,9 +213,10 @@ TEST(parseURL, parseFileURL) ParsedURL expected{ .scheme = "file", .authority = std::nullopt, - .path = "/none/of/your/business/", + .path = {"", "none", "of", "your", "business", ""}, }; + ASSERT_EQ(parsed.renderPath(), "/none/of/your/business/"); ASSERT_EQ(parsed, expected); ASSERT_EQ(s, parsed.to_string()); } @@ -227,10 +229,11 @@ TEST(parseURL, parseFileURLWithAuthority) ParsedURL expected{ .scheme = "file", .authority = Authority{.host = ""}, - .path = "///of/your/business//", + .path = {"", "", "", "of", "your", "business", "", ""}, }; - ASSERT_EQ(parsed.authority, expected.authority); + ASSERT_EQ(parsed.path, expected.path); + ASSERT_EQ(parsed.renderPath(), "///of/your/business//"); ASSERT_EQ(parsed, expected); ASSERT_EQ(s, parsed.to_string()); } @@ -243,9 +246,10 @@ TEST(parseURL, parseFileURLNoLeadingSlash) ParsedURL expected{ .scheme = "file", .authority = std::nullopt, - .path = "none/of/your/business/", + .path = {"none", "of", "your", "business", ""}, }; + ASSERT_EQ(parsed.renderPath(), "none/of/your/business/"); ASSERT_EQ(parsed, expected); ASSERT_EQ("file:none/of/your/business/", parsed.to_string()); } @@ -258,9 +262,10 @@ TEST(parseURL, parseHttpTrailingSlash) ParsedURL expected{ .scheme = "http", .authority = Authority{.host = "example.com"}, - .path = "/", + .path = {"", ""}, }; + ASSERT_EQ(parsed.renderPath(), "/"); ASSERT_EQ(parsed, expected); ASSERT_EQ(s, parsed.to_string()); } @@ -306,7 +311,7 @@ TEST(parseURL, parseFTPUrl) ParsedURL expected{ .scheme = "ftp", .authority = Authority{.hostType = HostType::Name, .host = "ftp.nixos.org"}, - .path = "/downloads/nixos.iso", + .path = {"", "downloads", "nixos.iso"}, .query = (StringMap) {}, .fragment = "", }; @@ -342,7 +347,7 @@ TEST(parseURL, parsesHttpUrlWithEmptyPort) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/file.tar.gz", + .path = {"", "file.tar.gz"}, .query = (StringMap) {{"foo", "bar"}}, .fragment = "", }; @@ -362,7 +367,7 @@ TEST(parseURLRelative, resolvesRelativePath) ParsedURL expected{ .scheme = "http", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, - .path = "/dir/subdir/file.txt", + .path = {"", "dir", "subdir", "file.txt"}, .query = {}, .fragment = "", }; @@ -376,7 +381,7 @@ TEST(parseURLRelative, baseUrlIpv6AddressWithoutZoneId) ParsedURL expected{ .scheme = "http", .authority = ParsedURL::Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c"}, - .path = "/dir/subdir/file.txt", + .path = {"", "dir", "subdir", "file.txt"}, .query = {}, .fragment = "", }; @@ -390,7 +395,7 @@ TEST(parseURLRelative, resolvesRelativePathIpv6AddressWithZoneId) ParsedURL expected{ .scheme = "http", .authority = Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, - .path = "/dir/subdir/file2.txt", + .path = {"", "dir", "subdir", "file2.txt"}, .query = {}, .fragment = "", }; @@ -405,7 +410,7 @@ TEST(parseURLRelative, resolvesRelativePathWithDot) ParsedURL expected{ .scheme = "http", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, - .path = "/dir/subdir/file.txt", + .path = {"", "dir", "subdir", "file.txt"}, .query = {}, .fragment = "", }; @@ -419,7 +424,21 @@ TEST(parseURLRelative, resolvesParentDirectory) ParsedURL expected{ .scheme = "http", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org", .port = 234}, - .path = "/up.txt", + .path = {"", "up.txt"}, + .query = {}, + .fragment = "", + }; + ASSERT_EQ(parsed, expected); +} + +TEST(parseURLRelative, resolvesParentDirectoryNotTrickedByEscapedSlash) +{ + ParsedURL base = parseURL("http://example.org:234/dir\%2Ffirst-trick/another-dir\%2Fsecond-trick/page.html"); + auto parsed = parseURLRelative("../up.txt", base); + ParsedURL expected{ + .scheme = "http", + .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org", .port = 234}, + .path = {"", "dir/first-trick", "up.txt"}, .query = {}, .fragment = "", }; @@ -433,7 +452,7 @@ TEST(parseURLRelative, replacesPathWithAbsoluteRelative) ParsedURL expected{ .scheme = "http", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "example.org"}, - .path = "/rooted.txt", + .path = {"", "rooted.txt"}, .query = {}, .fragment = "", }; @@ -448,7 +467,7 @@ TEST(parseURLRelative, keepsQueryAndFragmentFromRelative) ParsedURL expected{ .scheme = "https", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/path/other.html", + .path = {"", "path", "other.html"}, .query = {{"x", "1"}, {"y", "2"}}, .fragment = "frag", }; @@ -489,7 +508,7 @@ TEST(parseURLRelative, emptyRelative) ParsedURL expected{ .scheme = "https", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/path/index.html", + .path = {"", "path", "index.html"}, .query = {{"a b", "5 6"}, {"x y", "34"}}, .fragment = "", }; @@ -504,7 +523,7 @@ TEST(parseURLRelative, fragmentRelative) ParsedURL expected{ .scheme = "https", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/path/index.html", + .path = {"", "path", "index.html"}, .query = {{"a b", "5 6"}, {"x y", "34"}}, .fragment = "frag2", }; @@ -518,7 +537,7 @@ TEST(parseURLRelative, queryRelative) ParsedURL expected{ .scheme = "https", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/path/index.html", + .path = {"", "path", "index.html"}, .query = {{"asdf qwer", "1 2 3"}}, .fragment = "", }; @@ -532,7 +551,7 @@ TEST(parseURLRelative, queryFragmentRelative) ParsedURL expected{ .scheme = "https", .authority = ParsedURL::Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = "/path/index.html", + .path = {"", "path", "index.html"}, .query = {{"asdf qwer", "1 2 3"}}, .fragment = "frag2", }; @@ -648,6 +667,25 @@ TEST(percentEncode, yen) ASSERT_EQ(percentDecode(e), s); } +TEST(parseURL, gitlabNamespacedProjectUrls) +{ + // Test GitLab URL patterns with namespaced projects + // These should preserve %2F encoding in the path + auto s = "https://gitlab.example.com/api/v4/projects/group%2Fsubgroup%2Fproject/repository/archive.tar.gz"; + auto parsed = parseURL(s); + + ParsedURL expected{ + .scheme = "https", + .authority = Authority{.hostType = HostType::Name, .host = "gitlab.example.com"}, + .path = {"", "api", "v4", "projects", "group/subgroup/project", "repository", "archive.tar.gz"}, + .query = {}, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); + ASSERT_EQ(s, parsed.to_string()); +} + TEST(nix, isValidSchemeName) { ASSERT_TRUE(isValidSchemeName("http")); diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 54bd1e533..1d9797551 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -1,7 +1,10 @@ #pragma once ///@file +#include + #include "nix/util/error.hh" +#include "nix/util/canon-path.hh" namespace nix { @@ -65,6 +68,7 @@ struct ParsedURL }; std::string scheme; + /** * Optional parsed authority component of the URL. * @@ -75,16 +79,155 @@ struct ParsedURL * part of the URL. */ std::optional authority; - std::string path; + + /** + * @note Unlike Unix paths, URLs provide a way to escape path + * separators, in the form of the `%2F` encoding of `/`. That means + * that if one percent-decodes the path into a single string, that + * decoding will be *lossy*, because `/` and `%2F` both become `/`. + * The right thing to do is instead split up the path on `/`, and + * then percent decode each part. + * + * For an example, the path + * ``` + * foo/bar%2Fbaz/quux + * ``` + * is parsed as + * ``` + * {"foo, "bar/baz", "quux"} + * ``` + * + * We're doing splitting and joining that assumes the separator (`/` in this case) only goes *between* elements. + * + * That means the parsed representation will begin with an empty + * element to make an initial `/`, and will end with an ementy + * element to make a trailing `/`. That means that elements of this + * vector mostly, but *not always*, correspond to segments of the + * path. + * + * Examples: + * + * - ``` + * https://foo.com/bar + * ``` + * has path + * ``` + * {"", "bar"} + * ``` + * + * - ``` + * https://foo.com/bar/ + * ``` + * has path + * ``` + * {"", "bar", ""} + * ``` + * + * - ``` + * https://foo.com//bar/// + * ``` + * has path + * ``` + * {"", "", "bar", "", "", ""} + * ``` + * + * - ``` + * https://foo.com + * ``` + * has path + * ``` + * {""} + * ``` + * + * - ``` + * https://foo.com/ + * ``` + * has path + * ``` + * {"", ""} + * ``` + * + * - ``` + * tel:01234 + * ``` + * has path `{"01234"}` (and no authority) + * + * - ``` + * foo:/01234 + * ``` + * has path `{"", "01234"}` (and no authority) + * + * Note that both trailing and leading slashes are, in general, + * semantically significant. + * + * For trailing slashes, the main example affecting many schemes is + * that `../baz` resolves against a base URL different depending on + * the presence/absence of a trailing slash: + * + * - `https://foo.com/bar` is `https://foo.com/baz` + * + * - `https://foo.com/bar/` is `https://foo.com/bar/baz` + * + * See `parseURLRelative` for more details. + * + * For leading slashes, there are some requirements to be aware of. + * + * - When there is an authority, the path *must* start with a leading + * slash. Otherwise the path will not be separated from the + * authority, and will not round trip though the parser: + * + * ``` + * {.scheme="https", .authority.host = "foo", .path={"bad"}} + * ``` + * will render to `https://foobar`. but that would parse back as as + * ``` + * {.scheme="https", .authority.host = "foobar", .path={}} + * ``` + * + * - When there is no authority, the path must *not* begin with two + * slashes. Otherwise, there will be another parser round trip + * issue: + * + * ``` + * {.scheme="https", .path={"", "", "bad"}} + * ``` + * will render to `https://bad`. but that would parse back as as + * ``` + * {.scheme="https", .authority.host = "bad", .path={}} + * ``` + * + * These invariants will be checked in `to_string` and + * `renderAuthorityAndPath`. + */ + std::vector path; + StringMap query; + std::string fragment; + /** + * Render just the middle part of a URL, without the `//` which + * indicates whether the authority is present. + * + * @note This is kind of an ad-hoc + * operation, but it ends up coming up with some frequency, probably + * due to the current design of `StoreReference` in `nix-store`. + */ + std::string renderAuthorityAndPath() const; + std::string to_string() const; + /** + * Render the path to a string. + * + * @param encode Whether to percent encode path segments. + */ + std::string renderPath(bool encode = false) const; + auto operator<=>(const ParsedURL & other) const noexcept = default; /** - * Remove `.` and `..` path elements. + * Remove `.` and `..` path segments. */ ParsedURL canonicalise(); }; @@ -96,6 +239,22 @@ MakeError(BadURL, Error); std::string percentDecode(std::string_view in); std::string percentEncode(std::string_view s, std::string_view keep = ""); +/** + * Get the path part of the URL as an absolute or relative Path. + * + * @throws if any path component contains an slash (which would have + * been escaped `%2F` in the rendered URL). This is because OS file + * paths have no escape sequences --- file names cannot contain a + * `/`. + */ +Path renderUrlPathEnsureLegal(const std::vector & urlPath); + +/** + * Percent encode path. `%2F` for "interior slashes" is the most + * important. + */ +std::string encodeUrlPath(std::span urlPath); + /** * @param lenient @see parseURL */ @@ -114,6 +273,12 @@ std::string encodeQuery(const StringMap & query); * @note IPv6 ZoneId literals (RFC4007) are represented in URIs according to RFC6874. * * @throws BadURL + * + * The WHATWG specification of the URL constructor in Java Script is + * also a useful reference: + * https://url.spec.whatwg.org/#concept-basic-url-parser. Note, however, + * that it includes various scheme-specific normalizations / extra steps + * that we do not implement. */ ParsedURL parseURL(std::string_view url, bool lenient = false); @@ -123,7 +288,11 @@ ParsedURL parseURL(std::string_view url, bool lenient = false); * * This is specified in [IETF RFC 3986, section 5](https://datatracker.ietf.org/doc/html/rfc3986#section-5) * - * Behavior should also match the `new URL(url, base)` JavaScript constructor. + * @throws BadURL + * + * Behavior should also match the `new URL(url, base)` JavaScript + * constructor, except for extra steps specific to the HTTP scheme. See + * `parseURL` for link to the relevant WHATWG standard. */ ParsedURL parseURLRelative(std::string_view url, const ParsedURL & base); diff --git a/src/libutil/url.cc b/src/libutil/url.cc index ff0b7a71b..b9bf0b4f4 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -3,6 +3,7 @@ #include "nix/util/util.hh" #include "nix/util/split.hh" #include "nix/util/canon-path.hh" +#include "nix/util/strings-inline.hh" #include @@ -179,11 +180,14 @@ static ParsedURL fromBoostUrlView(boost::urls::url_view urlView, bool lenient) if (authority && authority->host.size() && transportIsFile) throw BadURL("file:// URL '%s' has unexpected authority '%s'", urlView.buffer(), *authority); - auto path = urlView.path(); /* Does pct-decoding */ auto fragment = urlView.fragment(); /* Does pct-decoding */ - if (transportIsFile && path.empty()) - path = "/"; + boost::core::string_view encodedPath = urlView.encoded_path(); + if (transportIsFile && encodedPath.empty()) + encodedPath = "/"; + + auto path = std::views::transform(splitString>(encodedPath, "/"), percentDecode) + | std::ranges::to>(); /* Get the raw query. Store URI supports smuggling doubly nested queries, where the inner &/? are pct-encoded. */ @@ -192,7 +196,7 @@ static ParsedURL fromBoostUrlView(boost::urls::url_view urlView, bool lenient) return ParsedURL{ .scheme = scheme, .authority = authority, - .path = path, + .path = std::move(path), .query = decodeQuery(query, lenient), .fragment = fragment, }; @@ -215,7 +219,7 @@ try { if (authority.port) resolved.set_port_number(*authority.port); } - resolved.set_path(base.path); + resolved.set_encoded_path(encodeUrlPath(base.path)); resolved.set_encoded_query(encodeQuery(base.query)); resolved.set_fragment(base.fragment); } catch (boost::system::system_error & e) { @@ -291,7 +295,15 @@ try { } const static std::string allowedInQuery = ":@/?"; -const static std::string allowedInPath = ":@/"; +const static std::string allowedInPath = ":@"; + +std::string encodeUrlPath(std::span urlPath) +{ + std::vector encodedPath; + for (auto & p : urlPath) + encodedPath.push_back(percentEncode(p, allowedInPath)); + return concatStringsSep("/", encodedPath); +} std::string encodeQuery(const StringMap & ss) { @@ -308,10 +320,62 @@ std::string encodeQuery(const StringMap & ss) return res; } +Path renderUrlPathEnsureLegal(const std::vector & urlPath) +{ + for (const auto & comp : urlPath) { + /* This is only really valid for UNIX. Windows has more restrictions. */ + if (comp.contains('/')) + throw BadURL("URL path component '%s' contains '/', which is not allowed in file names", comp); + if (comp.contains(char(0))) + throw BadURL("URL path component '%s' contains NUL byte which is not allowed", comp); + } + + return concatStringsSep("/", urlPath); +} + +std::string ParsedURL::renderPath(bool encode) const +{ + if (encode) + return encodeUrlPath(path); + return concatStringsSep("/", path); +} + +std::string ParsedURL::renderAuthorityAndPath() const +{ + std::string res; + /* The following assertions correspond to 3.3. Path [rfc3986]. URL parser + will never violate these properties, but hand-constructed ParsedURLs might. */ + if (authority.has_value()) { + /* If a URI contains an authority component, then the path component + must either be empty or begin with a slash ("/") character. */ + assert(path.empty() || path.front().empty()); + res += authority->to_string(); + } else if (std::ranges::equal(std::views::take(path, 2), std::views::repeat("", 2))) { + /* If a URI does not contain an authority component, then the path cannot begin + with two slash characters ("//") */ + unreachable(); + } + res += encodeUrlPath(path); + return res; +} + std::string ParsedURL::to_string() const { - return scheme + ":" + (authority ? "//" + authority->to_string() : "") + percentEncode(path, allowedInPath) - + (query.empty() ? "" : "?" + encodeQuery(query)) + (fragment.empty() ? "" : "#" + percentEncode(fragment)); + std::string res; + res += scheme; + res += ":"; + if (authority.has_value()) + res += "//"; + res += renderAuthorityAndPath(); + if (!query.empty()) { + res += "?"; + res += encodeQuery(query); + } + if (!fragment.empty()) { + res += "#"; + res += percentEncode(fragment); + } + return res; } std::ostream & operator<<(std::ostream & os, const ParsedURL & url) @@ -323,7 +387,7 @@ std::ostream & operator<<(std::ostream & os, const ParsedURL & url) ParsedURL ParsedURL::canonicalise() { ParsedURL res(*this); - res.path = CanonPath(res.path).abs(); + res.path = splitString>(CanonPath(renderPath()).abs(), "/"); return res; } @@ -352,7 +416,11 @@ ParsedURL fixGitURL(const std::string & url) if (hasPrefix(url, "file:")) return parseURL(url); if (url.find("://") == std::string::npos) { - return (ParsedURL{.scheme = "file", .authority = ParsedURL::Authority{}, .path = url}); + return ParsedURL{ + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = splitString>(url, "/"), + }; } return parseURL(url); } From 53c31c8b2956c1510026bb90132b817ae5b86217 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 27 Aug 2025 15:52:51 -0400 Subject: [PATCH 283/382] Factor out a new `DesugaredEnv` from `DerivationBuildingGoal` Now we have better separation of the core logic --- an integral part of the store layer spec even --- from the goal mechanism and other minutiae. Co-authored-by: Jeremy Kolb --- .../build/derivation-building-goal.cc | 56 +------------ src/libstore/build/derivation-check.hh | 3 + src/libstore/build/derivation-env-desugar.cc | 59 +++++++++++++ .../nix/store/build/derivation-builder.hh | 30 +------ .../nix/store/build/derivation-env-desugar.hh | 83 +++++++++++++++++++ src/libstore/include/nix/store/meson.build | 1 + src/libstore/meson.build | 1 + src/libstore/unix/build/derivation-builder.cc | 15 ++-- 8 files changed, 158 insertions(+), 90 deletions(-) create mode 100644 src/libstore/build/derivation-env-desugar.cc create mode 100644 src/libstore/include/nix/store/build/derivation-env-desugar.hh diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index c290852fc..3d6595012 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -1,4 +1,5 @@ #include "nix/store/build/derivation-building-goal.hh" +#include "nix/store/build/derivation-env-desugar.hh" #include "nix/store/build/derivation-trampoline-goal.hh" #ifndef _WIN32 // TODO enable build hook on Windows # include "nix/store/build/hook-instance.hh" @@ -681,8 +682,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() assert(localStoreP); decltype(DerivationBuilderParams::defaultPathsInChroot) defaultPathsInChroot = settings.sandboxPaths.get(); - decltype(DerivationBuilderParams::finalEnv) finalEnv; - decltype(DerivationBuilderParams::extraFiles) extraFiles; + DesugaredEnv desugaredEnv; /* Add the closure of store paths to the chroot. */ StorePathSet closure; @@ -701,54 +701,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() } try { - if (drv->structuredAttrs) { - auto json = drv->structuredAttrs->prepareStructuredAttrs( - worker.store, *drvOptions, inputPaths, drv->outputs); - - finalEnv.insert_or_assign( - "NIX_ATTRS_SH_FILE", - DerivationBuilderParams::EnvEntry{ - .nameOfPassAsFile = ".attrs.sh", - .value = StructuredAttrs::writeShell(json), - }); - finalEnv.insert_or_assign( - "NIX_ATTRS_JSON_FILE", - DerivationBuilderParams::EnvEntry{ - .nameOfPassAsFile = ".attrs.json", - .value = json.dump(), - }); - } else { - /* In non-structured mode, set all bindings either directory in the - environment or via a file, as specified by - `DerivationOptions::passAsFile`. */ - for (auto & [envName, envValue] : drv->env) { - if (drvOptions->passAsFile.find(envName) == drvOptions->passAsFile.end()) { - finalEnv.insert_or_assign( - envName, - DerivationBuilderParams::EnvEntry{ - .nameOfPassAsFile = std::nullopt, - .value = envValue, - }); - } else { - auto hash = hashString(HashAlgorithm::SHA256, envName); - finalEnv.insert_or_assign( - envName + "Path", - DerivationBuilderParams::EnvEntry{ - .nameOfPassAsFile = ".attr-" + hash.to_string(HashFormat::Nix32, false), - .value = envValue, - }); - } - } - - /* Handle exportReferencesGraph(), if set. */ - for (auto & [fileName, storePaths] : drvOptions->getParsedExportReferencesGraph(worker.store)) { - /* Write closure info to . */ - extraFiles.insert_or_assign( - fileName, - worker.store.makeValidityRegistration( - worker.store.exportReferences(storePaths, inputPaths), false, false)); - } - } + desugaredEnv = DesugaredEnv::create(worker.store, *drv, *drvOptions, inputPaths); } catch (BuildError & e) { outputLocks.unlock(); worker.permanentFailure = true; @@ -770,8 +723,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() .buildMode = buildMode, .defaultPathsInChroot = std::move(defaultPathsInChroot), .systemFeatures = worker.store.config.systemFeatures.get(), - .finalEnv = std::move(finalEnv), - .extraFiles = std::move(extraFiles), + .desugaredEnv = std::move(desugaredEnv), }); } diff --git a/src/libstore/build/derivation-check.hh b/src/libstore/build/derivation-check.hh index 249e176c5..25310bd83 100644 --- a/src/libstore/build/derivation-check.hh +++ b/src/libstore/build/derivation-check.hh @@ -1,3 +1,6 @@ +#pragma once +///@file + #include "nix/store/derivations.hh" #include "nix/store/derivation-options.hh" #include "nix/store/path-info.hh" diff --git a/src/libstore/build/derivation-env-desugar.cc b/src/libstore/build/derivation-env-desugar.cc new file mode 100644 index 000000000..d6e002d91 --- /dev/null +++ b/src/libstore/build/derivation-env-desugar.cc @@ -0,0 +1,59 @@ +#include "nix/store/build/derivation-env-desugar.hh" +#include "nix/store/store-api.hh" +#include "nix/store/derivations.hh" +#include "nix/store/derivation-options.hh" + +namespace nix { + +std::string & DesugaredEnv::atFileEnvPair(std::string_view name, std::string fileName) +{ + auto & ret = extraFiles[fileName]; + variables.insert_or_assign( + std::string{name}, + EnvEntry{ + .prependBuildDirectory = true, + .value = std::move(fileName), + }); + return ret; +} + +DesugaredEnv DesugaredEnv::create( + Store & store, const Derivation & drv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths) +{ + DesugaredEnv res; + + if (drv.structuredAttrs) { + auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); + res.atFileEnvPair("NIX_ATTRS_SH_FILE", ".attrs.sh") = StructuredAttrs::writeShell(json); + res.atFileEnvPair("NIX_ATTRS_JSON_FILE", ".attrs.json") = json.dump(); + } else { + /* In non-structured mode, set all bindings either directory in the + environment or via a file, as specified by + `DerivationOptions::passAsFile`. */ + for (auto & [envName, envValue] : drv.env) { + if (!drvOptions.passAsFile.contains(envName)) { + res.variables.insert_or_assign( + envName, + EnvEntry{ + .value = envValue, + }); + } else { + res.atFileEnvPair( + envName + "Path", + ".attr-" + hashString(HashAlgorithm::SHA256, envName).to_string(HashFormat::Nix32, false)) = + envValue; + } + } + + /* Handle exportReferencesGraph(), if set. */ + for (auto & [fileName, storePaths] : drvOptions.getParsedExportReferencesGraph(store)) { + /* Write closure info to . */ + res.extraFiles.insert_or_assign( + fileName, store.makeValidityRegistration(store.exportReferences(storePaths, inputPaths), false, false)); + } + } + + return res; +} + +} // namespace nix diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index f00d4db25..94a3ffae8 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -8,6 +8,7 @@ #include "nix/store/parsed-derivations.hh" #include "nix/util/processes.hh" #include "nix/store/restricted-store.hh" +#include "nix/store/build/derivation-env-desugar.hh" namespace nix { @@ -73,34 +74,7 @@ struct DerivationBuilderParams */ StringSet systemFeatures; - struct EnvEntry - { - /** - * Actually, this should be passed as a file, but with a custom - * name (rather than hash-derived name for usual "pass as file"). - */ - std::optional nameOfPassAsFile; - - /** - * String value of env var, or contents of the file - */ - std::string value; - }; - - /** - * The final environment variables to additionally set, possibly - * indirectly via a file. - * - * This is used by the caller to desugar the "structured attrs" - * mechanism, so `DerivationBuilder` doesn't need to know about it. - */ - std::map> finalEnv; - - /** - * Inserted in the temp dir, but no file names placed in env, unlike - * `EnvEntry::nameOfPassAsFile` above. - */ - StringMap extraFiles; + DesugaredEnv desugaredEnv; }; /** diff --git a/src/libstore/include/nix/store/build/derivation-env-desugar.hh b/src/libstore/include/nix/store/build/derivation-env-desugar.hh new file mode 100644 index 000000000..6e2efa6bb --- /dev/null +++ b/src/libstore/include/nix/store/build/derivation-env-desugar.hh @@ -0,0 +1,83 @@ +#pragma once +///@file + +#include "nix/util/types.hh" +#include "nix/store/path.hh" + +namespace nix { + +class Store; +struct Derivation; +struct DerivationOptions; + +/** + * Derivations claim to "just" specify their environment variables, but + * actually do a number of different features, such as "structured + * attrs", "pass as file", and "export references graph", things are + * more complicated then they appear. + * + * The good news is that we can simplify all that to the following view, + * where environment variables and extra files are specified exactly, + * with no special cases. + * + * Because we have `DesugaredEnv`, `DerivationBuilder` doesn't need to + * know about any of those above features, and their special case. + */ +struct DesugaredEnv +{ + struct EnvEntry + { + /** + * Whether to prepend the (inside via) path to the sandbox build + * directory to `value`. This is useful for when the env var + * should point to a file visible to the builder. + */ + bool prependBuildDirectory = false; + + /** + * String value of env var, or contents of the file. + */ + std::string value; + }; + + /** + * The final environment variables to set. + */ + std::map> variables; + + /** + * Extra file to be placed in the build directory. + * + * @note `EnvEntry::prependBuildDirectory` can be used to refer to + * those files without knowing what the build directory is. + */ + StringMap extraFiles; + + /** + * A common case is to define an environment variable that points to + * a file, which contains some contents. + * + * In base: + * ``` + * export VAR=FILE_NAME + * echo CONTENTS >FILE_NAME + * ``` + * + * This function assists in doing both parts, so the file name is + * kept in sync. + */ + std::string & atFileEnvPair(std::string_view name, std::string fileName); + + /** + * Given a (resolved) derivation, its options, and the closure of + * its inputs (which we can get since the derivation is resolved), + * desugar the environment to create a `DesguaredEnv`. + * + * @todo drvOptions will go away as a separate argument when it is + * just part of `Derivation`. + */ + static DesugaredEnv create( + Store & store, const Derivation & drv, const DerivationOptions & drvOptions, const StorePathSet & inputPaths); +}; + +} // namespace nix diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index cba5d9ca5..776c7521d 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -15,6 +15,7 @@ headers = [ config_pub_h ] + files( 'build/derivation-builder.hh', 'build/derivation-building-goal.hh', 'build/derivation-building-misc.hh', + 'build/derivation-env-desugar.hh', 'build/derivation-goal.hh', 'build/derivation-trampoline-goal.hh', 'build/drv-output-substitution-goal.hh', diff --git a/src/libstore/meson.build b/src/libstore/meson.build index ca8eac12b..2b0106ff3 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -266,6 +266,7 @@ sources = files( 'build-result.cc', 'build/derivation-building-goal.cc', 'build/derivation-check.cc', + 'build/derivation-env-desugar.cc', 'build/derivation-goal.cc', 'build/derivation-trampoline-goal.cc', 'build/drv-output-substitution-goal.cc', diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index bd5f975fb..3140c716d 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -17,6 +17,7 @@ #include "nix/store/restricted-store.hh" #include "nix/store/user-lock.hh" #include "nix/store/globals.hh" +#include "nix/store/build/derivation-env-desugar.hh" #include @@ -992,19 +993,13 @@ void DerivationBuilderImpl::initEnv() /* Write the final environment. Note that this is intentionally *not* `drv.env`, because we've desugared things like like "passAFile", "expandReferencesGraph", structured attrs, etc. */ - for (const auto & [name, info] : finalEnv) { - if (info.nameOfPassAsFile) { - auto & fileName = *info.nameOfPassAsFile; - writeBuilderFile(fileName, rewriteStrings(info.value, inputRewrites)); - env[name] = tmpDirInSandbox() + "/" + fileName; - } else { - env[name] = info.value; - } + for (const auto & [name, info] : desugaredEnv.variables) { + env[name] = info.prependBuildDirectory ? tmpDirInSandbox() + "/" + info.value : info.value; } /* Add extra files, similar to `finalEnv` */ - for (const auto & [fileName, value] : extraFiles) { - writeBuilderFile(fileName, value); + for (const auto & [fileName, value] : desugaredEnv.extraFiles) { + writeBuilderFile(fileName, rewriteStrings(value, inputRewrites)); } /* For convenience, set an environment pointing to the top build From 1f607b5def1b06003578239484c3fe250c267faf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 29 Aug 2025 00:02:11 +0300 Subject: [PATCH 284/382] libutil: Try to call std::terminate for panic We now have a terminate handler that prints a stack trace, which is useful to have when encountering an unreachable. --- src/libutil/error.cc | 2 +- src/libutil/include/nix/util/error.hh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libutil/error.cc b/src/libutil/error.cc index b50b1f3be..c36026f6c 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -436,7 +436,7 @@ void panic(std::string_view msg) writeErr("\n\n" ANSI_RED "terminating due to unexpected unrecoverable internal error: " ANSI_NORMAL); writeErr(msg); writeErr("\n"); - abort(); + std::terminate(); } void panic(const char * file, int line, const char * func) diff --git a/src/libutil/include/nix/util/error.hh b/src/libutil/include/nix/util/error.hh index bd21e02d3..549116c4d 100644 --- a/src/libutil/include/nix/util/error.hh +++ b/src/libutil/include/nix/util/error.hh @@ -299,20 +299,20 @@ using NativeSysError = void throwExceptionSelfCheck(); /** - * Print a message and abort(). + * Print a message and std::terminate(). */ [[noreturn]] void panic(std::string_view msg); /** - * Print a basic error message with source position and abort(). + * Print a basic error message with source position and std::terminate(). * Use the unreachable() macro to call this. */ [[noreturn]] void panic(const char * file, int line, const char * func); /** - * Print a basic error message with source position and abort(). + * Print a basic error message with source position and std::terminate(). * * @note: This assumes that the logger is operational */ From d59b959c8724510532e0beb9d8337a8bb864fb9a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 29 Aug 2025 00:21:04 +0300 Subject: [PATCH 285/382] libutil: Use std::source_location for unreachable Make unreachable a function instead of a macro, since C++20 provides a convenience class as a replacement for older __FILE__, __LINE__ macros. --- src/libutil/error.cc | 11 +++++++++-- src/libutil/include/nix/util/error.hh | 10 ++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/libutil/error.cc b/src/libutil/error.cc index c36026f6c..35e42823c 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -6,6 +6,7 @@ #include "nix/util/terminal.hh" #include "nix/util/position.hh" +#include #include #include #include "nix/util/serialise.hh" @@ -439,10 +440,16 @@ void panic(std::string_view msg) std::terminate(); } -void panic(const char * file, int line, const char * func) +void unreachable(std::source_location loc) { char buf[512]; - int n = snprintf(buf, sizeof(buf), "Unexpected condition in %s at %s:%d", func, file, line); + int n = snprintf( + buf, + sizeof(buf), + "Unexpected condition in %s at %s:%" PRIuLEAST32, + loc.function_name(), + loc.file_name(), + loc.line()); if (n < 0) panic("Unexpected condition and could not format error message"); panic(std::string_view(buf, std::min(static_cast(sizeof(buf)), n))); diff --git a/src/libutil/include/nix/util/error.hh b/src/libutil/include/nix/util/error.hh index 549116c4d..e564ca5b9 100644 --- a/src/libutil/include/nix/util/error.hh +++ b/src/libutil/include/nix/util/error.hh @@ -22,6 +22,7 @@ #include #include #include +#include #include #include @@ -304,18 +305,11 @@ void throwExceptionSelfCheck(); [[noreturn]] void panic(std::string_view msg); -/** - * Print a basic error message with source position and std::terminate(). - * Use the unreachable() macro to call this. - */ -[[noreturn]] -void panic(const char * file, int line, const char * func); - /** * Print a basic error message with source position and std::terminate(). * * @note: This assumes that the logger is operational */ -#define unreachable() (::nix::panic(__FILE__, __LINE__, __func__)) +[[gnu::noinline, gnu::cold, noreturn]] void unreachable(std::source_location loc = std::source_location::current()); } // namespace nix From 8825bfa7fe9acdf549faafda6242c3cee6f281de Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Aug 2025 17:34:09 -0400 Subject: [PATCH 286/382] Properly separater builer failure content and presentation Before, had a very ugly `appendLogTailErrorMsg` callback. Now, we instead have a `fixupBuilderFailureErrorMessage` that is just used by `DerivationBuildingGoal`, and `DerivationBuilder` just returns the raw data needed by this. --- .../build/derivation-building-goal.cc | 34 ++++++++++--------- .../nix/store/build/derivation-builder.hh | 25 ++++++++++++-- .../store/build/derivation-building-goal.hh | 3 +- src/libstore/unix/build/derivation-builder.cc | 20 +++-------- 4 files changed, 48 insertions(+), 34 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index b1920cadb..6aab48a80 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -632,11 +632,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() { goal.closeLogFile(); } - - void appendLogTailErrorMsg(std::string & msg) override - { - goal.appendLogTailErrorMsg(msg); - } }; auto * localStoreP = dynamic_cast(&worker.store); @@ -773,6 +768,9 @@ Goal::Co DerivationBuildingGoal::tryToBuild() SingleDrvOutputs builtOutputs; try { builtOutputs = builder->unprepareBuild(); + } catch (BuilderFailureError & e) { + outputLocks.unlock(); + co_return doneFailure(fixupBuilderFailureErrorMessage(std::move(e))); } catch (BuildError & e) { outputLocks.unlock(); // Allow selecting a subset of enum values @@ -883,8 +881,16 @@ static void runPostBuildHook( }); } -void DerivationBuildingGoal::appendLogTailErrorMsg(std::string & msg) +BuildError DerivationBuildingGoal::fixupBuilderFailureErrorMessage(BuilderFailureError e) { + auto msg = + fmt("Cannot build '%s'.\n" + "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", + Magenta(worker.store.printStorePath(drvPath)), + statusToString(e.builderStatus)); + + msg += showKnownOutputs(worker.store, *drv); + if (!logger->isVerbose() && !logTail.empty()) { msg += fmt("\nLast %d log lines:\n", logTail.size()); for (auto & line : logTail) { @@ -901,6 +907,10 @@ void DerivationBuildingGoal::appendLogTailErrorMsg(std::string & msg) nixLogCommand, worker.store.printStorePath(drvPath)); } + + msg += e.extraMsgAfter; + + return BuildError{e.status, msg}; } Goal::Co DerivationBuildingGoal::hookDone() @@ -941,21 +951,13 @@ Goal::Co DerivationBuildingGoal::hookDone() /* Check the exit status. */ if (!statusOk(status)) { - auto msg = - fmt("Cannot build '%s'.\n" - "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", - Magenta(worker.store.printStorePath(drvPath)), - statusToString(status)); - - msg += showKnownOutputs(worker.store, *drv); - - appendLogTailErrorMsg(msg); + auto e = fixupBuilderFailureErrorMessage({BuildResult::MiscFailure, status, ""}); outputLocks.unlock(); /* TODO (once again) support fine-grained error codes, see issue #12641. */ - co_return doneFailure(BuildError{BuildResult::MiscFailure, msg}); + co_return doneFailure(std::move(e)); } /* Compute the FS closure of the outputs and register them as diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index a373c4729..4a3993b83 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -11,6 +11,29 @@ namespace nix { +/** + * Denotes a build failure that stemmed from the builder exiting with a + * failing exist status. + */ +struct BuilderFailureError : BuildError +{ + int builderStatus; + + std::string extraMsgAfter; + + BuilderFailureError(BuildResult::Status status, int builderStatus, std::string extraMsgAfter) + : BuildError{ + status, + /* No message for now, because the caller will make for + us, with extra context */ + "", + } + , builderStatus{std::move(builderStatus)} + , extraMsgAfter{std::move(extraMsgAfter)} + { + } +}; + /** * Stuff we need to pass to initChild(). */ @@ -120,8 +143,6 @@ struct DerivationBuilderCallbacks */ virtual void closeLogFile() = 0; - virtual void appendLogTailErrorMsg(std::string & msg) = 0; - /** * Hook up `builderOut` to some mechanism to ingest the log * diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 38f0fc7bf..162cf14ad 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -14,6 +14,7 @@ namespace nix { using std::map; +struct BuilderFailureError; #ifndef _WIN32 // TODO enable build hook on Windows struct HookInstance; struct DerivationBuilder; @@ -174,7 +175,7 @@ struct DerivationBuildingGoal : public Goal Done doneFailure(BuildError ex); - void appendLogTailErrorMsg(std::string & msg); + BuildError fixupBuilderFailureErrorMessage(BuilderFailureError msg); JobCategory jobCategory() const override { diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index bf99c4c1a..60509560d 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -534,26 +534,16 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() /* Check the exit status. */ if (!statusOk(status)) { + /* Check *before* cleaning up. */ bool diskFull = decideWhetherDiskFull(); cleanupBuild(false); - auto msg = - fmt("Cannot build '%s'.\n" - "Reason: " ANSI_RED "builder %s" ANSI_NORMAL ".", - Magenta(store.printStorePath(drvPath)), - statusToString(status)); - - msg += showKnownOutputs(store, drv); - - miscMethods->appendLogTailErrorMsg(msg); - - if (diskFull) - msg += "\nnote: build failure may have been caused by lack of free disk space"; - - throw BuildError( + throw BuilderFailureError{ !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure : BuildResult::PermanentFailure, - msg); + status, + diskFull ? "\nnote: build failure may have been caused by lack of free disk space" : "", + }; } /* Compute the FS closure of the outputs and register them as From 76125f8eb1705ac3230acf134961d6e87da144f3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 13:15:35 -0400 Subject: [PATCH 287/382] Get rid of `Finally` in `DerivationBuilderImpl::unprepareBuild` Calling `reset` on this `std::optional` field of `DerivationBuilderImpl` is also what the (automatically created) destructor of `DerivationBuilderImpl` will do. We should be making sure that the derivation builder is cleaned up by the goal anyways, and if we do that, then this `Finally` is no longer needed. --- src/libstore/build/derivation-building-goal.cc | 3 +++ src/libstore/unix/build/derivation-builder.cc | 8 -------- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 6aab48a80..4497a6070 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -769,9 +769,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() try { builtOutputs = builder->unprepareBuild(); } catch (BuilderFailureError & e) { + builder.reset(); outputLocks.unlock(); co_return doneFailure(fixupBuilderFailureErrorMessage(std::move(e))); } catch (BuildError & e) { + builder.reset(); outputLocks.unlock(); // Allow selecting a subset of enum values # pragma GCC diagnostic push @@ -796,6 +798,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() co_return doneFailure(std::move(e)); } { + builder.reset(); StorePathSet outputPaths; for (auto & [_, output] : builtOutputs) { // for sake of `bmRepair` diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 60509560d..f837efe5a 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -484,14 +484,6 @@ bool DerivationBuilderImpl::prepareBuild() SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() { - // FIXME: get rid of this, rely on RAII. - Finally releaseBuildUser([&]() { - /* Release the build user at the end of this function. We don't do - it right away because we don't want another build grabbing this - uid and then messing around with our output. */ - buildUser.reset(); - }); - /* Since we got an EOF on the logger pipe, the builder is presumed to have terminated. In fact, the builder could also have simply have closed its end of the pipe, so just to be sure, From d7ed86ceb1af865592435c3672a39677be438d47 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 16:10:25 -0400 Subject: [PATCH 288/382] Move deleting redirected outputs in to `cleanupBuild` It is only done in the `force = true` case, and the only `cleanupBuild(true)` call is right after where it used to be, so this has the exact same behavior as before. --- src/libstore/unix/build/derivation-builder.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index f837efe5a..b81deaddc 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -542,10 +542,6 @@ SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() being valid. */ auto builtOutputs = registerOutputs(); - /* Delete unused redirected outputs (when doing hash rewriting). */ - for (auto & i : redirectedOutputs) - deletePath(store.Store::toRealPath(i.second)); - cleanupBuild(true); return builtOutputs; @@ -1855,6 +1851,12 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() void DerivationBuilderImpl::cleanupBuild(bool force) { + if (force) { + /* Delete unused redirected outputs (when doing hash rewriting). */ + for (auto & i : redirectedOutputs) + deletePath(store.Store::toRealPath(i.second)); + } + if (topTmpDir != "") { /* As an extra precaution, even in the event of `deletePath` failing to * clean up, the `tmpDir` will be chowned as if we were to move From b6f98b52a4b22c5d349266175dd8395c5c6b8f6f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 30 Aug 2025 00:03:54 +0300 Subject: [PATCH 289/382] nix/develop: Fix misleading ignored error when run with --arg/--argstr This would print erroneous and misleading diagnostics like: > error (ignored): error: '--arg' and '--argstr' are incompatible with flakes When run with --expr/--file. Since this installable is used to get the bash package it doesn't make sense to check this. --- src/nix/develop.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index f59dc5bee..ed25e655d 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -647,7 +647,7 @@ struct CmdDevelop : Common, MixEnvironment nixpkgs = i->nixpkgsFlakeRef(); auto bashInstallable = make_ref( - this, + nullptr, //< Don't barf when the command is run with --arg/--argstr state, std::move(nixpkgs), "bashInteractive", From a8c4cfae26da270a8554807993b29009cc9f805f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 17:49:11 -0400 Subject: [PATCH 290/382] `DerivationBuildingGoal::done*` restore `outputLocks.unlock()` This was accidentally removed in 169033001d8f9ca44d7324446cfc93932c380295. --- src/libstore/build/derivation-building-goal.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index e8df06564..a15f6f35f 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -1304,6 +1304,7 @@ SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs) { + outputLocks.unlock(); buildResult.status = status; assert(buildResult.success()); @@ -1321,6 +1322,7 @@ Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, Singl Goal::Done DerivationBuildingGoal::doneFailure(BuildError ex) { + outputLocks.unlock(); buildResult.status = ex.status; buildResult.errorMsg = fmt("%s", Uncolored(ex.info().msg)); if (buildResult.status == BuildResult::TimedOut) From 3ef3f525c35c84b90c5a0f4c07ffe53b2291973c Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 30 Aug 2025 01:26:51 +0300 Subject: [PATCH 291/382] libflake: Fix flake id flake refs with revisions Starting from c436b7a32afaf01d62f828697ddf5c49d4f8678c this used to lead to assertion failures like: > std::string nix::ParsedURL::renderAuthorityAndPath() const: Assertion `path.empty() || path.front().empty()' failed. This has the bugfix for the issue and regressions tests so that this gets properly tested in the future. --- src/libflake-tests/flakeref.cc | 85 ++++++++++++++++++++++++++++++++++ src/libflake/flakeref.cc | 2 +- 2 files changed, 86 insertions(+), 1 deletion(-) diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index 404d7590a..3636d3e98 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -2,6 +2,7 @@ #include "nix/fetchers/fetch-settings.hh" #include "nix/flake/flakeref.hh" +#include "nix/fetchers/attrs.hh" namespace nix { @@ -90,6 +91,90 @@ TEST(parseFlakeRef, GitArchiveInput) } } +struct InputFromURLTestCase +{ + std::string url; + fetchers::Attrs attrs; + std::string description; + std::string expectedUrl = url; +}; + +class InputFromURLTest : public ::testing::WithParamInterface, public ::testing::Test +{}; + +TEST_P(InputFromURLTest, attrsAreCorrectAndRoundTrips) +{ + experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes); + fetchers::Settings fetchSettings; + + const auto & testCase = GetParam(); + + auto flakeref = parseFlakeRef(fetchSettings, testCase.url); + + EXPECT_EQ(flakeref.toAttrs(), testCase.attrs); + EXPECT_EQ(flakeref.to_string(), testCase.expectedUrl); + + auto input = fetchers::Input::fromURL(fetchSettings, flakeref.to_string()); + + EXPECT_EQ(input.toURLString(), testCase.expectedUrl); + EXPECT_EQ(input.toAttrs(), testCase.attrs); + + // Round-trip check. + auto input2 = fetchers::Input::fromURL(fetchSettings, input.toURLString()); + EXPECT_EQ(input, input2); + EXPECT_EQ(input.toURLString(), input2.toURLString()); +} + +using fetchers::Attr; + +INSTANTIATE_TEST_SUITE_P( + InputFromURL, + InputFromURLTest, + ::testing::Values( + InputFromURLTestCase{ + .url = "flake:nixpkgs", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + }, + .description = "basic_indirect", + }, + InputFromURLTestCase{ + .url = "flake:nixpkgs/branch", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + }, + .description = "basic_indirect_branch", + }, + InputFromURLTestCase{ + .url = "nixpkgs/branch", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + }, + .description = "flake_id_ref_branch", + .expectedUrl = "flake:nixpkgs/branch", + }, + InputFromURLTestCase{ + .url = "nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + {"rev", Attr("2aae6c35c94fcfb415dbe95f408b9ce91ee846ed")}, + }, + .description = "flake_id_ref_branch_trailing_slash", + .expectedUrl = "flake:nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", + }), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + TEST(to_string, doesntReencodeUrl) { fetchers::Settings fetchSettings; diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index cd176f14b..38979783d 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -198,7 +198,7 @@ parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & ur if (std::regex_match(url, match, flakeRegex)) { auto parsedURL = ParsedURL{ .scheme = "flake", - .authority = ParsedURL::Authority{}, + .authority = std::nullopt, .path = splitString>(match[1].str(), "/"), }; From b88a22504f29127e0b530f923d159dac053c743e Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 30 Aug 2025 02:36:16 +0300 Subject: [PATCH 292/382] libfetchers: Fix mingw build --- src/libfetchers/tarball.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index c1b28f674..52038317e 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -120,7 +120,7 @@ static DownloadTarballResult downloadTarball_( throw Error("tarball '%s' does not exist.", localPath); } if (is_directory(localPath)) { - if (std::filesystem::exists(localPath + "/.git")) { + if (exists(localPath / ".git")) { throw Error( "tarball '%s' is a git repository, not a tarball. Please use `git+file` as the scheme.", localPath); } From a38ebdd5119a3348fec39371a2af1743f3876405 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 30 Aug 2025 14:40:56 +0300 Subject: [PATCH 293/382] libfetchers: Restore path separator ignoring behavior for indirect and git-archive flakerefs Old versions of nix happily accepted a lot of weird flake references, which we didn't have tests for, so this was accidentally broken in c436b7a32afaf01d62f828697ddf5c49d4f8678c. This patch restores previous behavior and adds a plethora of tests to ensure we don't break this in the future. These test cases are aligned with how 2.18/2.28 parsed flake references. --- src/libfetchers/github.cc | 3 +- src/libfetchers/indirect.cc | 3 +- src/libflake-tests/flakeref.cc | 68 ++++++++++++++++++++ src/libutil-tests/url.cc | 97 +++++++++++++++++++++++++++++ src/libutil/include/nix/util/url.hh | 15 +++++ 5 files changed, 184 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index e40757dec..723c075f2 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -38,7 +38,8 @@ struct GitArchiveInputScheme : InputScheme if (url.scheme != schemeName()) return {}; - const auto & path = url.path; + /* This ignores empty path segments for back-compat. Older versions used a tokenizeString here. */ + auto path = url.pathSegments(/*skipEmpty=*/true) | std::ranges::to>(); std::optional rev; std::optional ref; diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index c5cbf156b..e05d27adc 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -14,7 +14,8 @@ struct IndirectInputScheme : InputScheme if (url.scheme != "flake") return {}; - const auto & path = url.path; + /* This ignores empty path segments for back-compat. Older versions used a tokenizeString here. */ + auto path = url.pathSegments(/*skipEmpty=*/true) | std::ranges::to>(); std::optional rev; std::optional ref; diff --git a/src/libflake-tests/flakeref.cc b/src/libflake-tests/flakeref.cc index 3636d3e98..e2cb91bb8 100644 --- a/src/libflake-tests/flakeref.cc +++ b/src/libflake-tests/flakeref.cc @@ -172,6 +172,74 @@ INSTANTIATE_TEST_SUITE_P( }, .description = "flake_id_ref_branch_trailing_slash", .expectedUrl = "flake:nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", + }, + // The following tests are for back-compat with lax parsers in older versions + // that used `tokenizeString` for splitting path segments, which ignores empty + // strings. + InputFromURLTestCase{ + .url = "nixpkgs/branch////", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + }, + .description = "flake_id_ref_branch_ignore_empty_trailing_segments", + .expectedUrl = "flake:nixpkgs/branch", + }, + InputFromURLTestCase{ + .url = "nixpkgs/branch///2aae6c35c94fcfb415dbe95f408b9ce91ee846ed///", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + {"rev", Attr("2aae6c35c94fcfb415dbe95f408b9ce91ee846ed")}, + }, + .description = "flake_id_ref_branch_ignore_empty_segments_ref_rev", + .expectedUrl = "flake:nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", + }, + InputFromURLTestCase{ + // Note that this is different from above because the "flake id" shorthand + // doesn't allow this. + .url = "flake:/nixpkgs///branch////", + .attrs = + { + {"id", Attr("nixpkgs")}, + {"type", Attr("indirect")}, + {"ref", Attr("branch")}, + }, + .description = "indirect_branch_empty_segments_everywhere", + .expectedUrl = "flake:nixpkgs/branch", + }, + InputFromURLTestCase{ + // TODO: Technically this has an empty authority, but it's ignored + // for now. Yes, this is what all versions going back to at least + // 2.18 did and yes, this should not be allowed. + .url = "github://////owner%42/////repo%41///branch%43////", + .attrs = + { + {"type", Attr("github")}, + {"owner", Attr("ownerB")}, + {"repo", Attr("repoA")}, + {"ref", Attr("branchC")}, + }, + .description = "github_ref_slashes_in_path_everywhere", + .expectedUrl = "github:ownerB/repoA/branchC", + }, + InputFromURLTestCase{ + // FIXME: Subgroups in gitlab URLs are busted. This double-encoding + // behavior exists since 2.18. See issue #9161 and PR #8845. + .url = "gitlab:/owner%252Fsubgroup/////repo%41///branch%43////", + .attrs = + { + {"type", Attr("gitlab")}, + {"owner", Attr("owner%2Fsubgroup")}, + {"repo", Attr("repoA")}, + {"ref", Attr("branchC")}, + }, + .description = "gitlab_ref_slashes_in_path_everywhere_with_pct_encoding", + .expectedUrl = "gitlab:owner%252Fsubgroup/repoA/branchC", }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 9c698a943..56b879846 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -3,6 +3,8 @@ #include #include +#include + namespace nix { /* ----------- tests for url.hh --------------------------------------------------*/ @@ -686,7 +688,102 @@ TEST(parseURL, gitlabNamespacedProjectUrls) ASSERT_EQ(s, parsed.to_string()); } +/* ---------------------------------------------------------------------------- + * pathSegments + * --------------------------------------------------------------------------*/ + +struct ParsedURLPathSegmentsTestCase +{ + std::string url; + std::vector segments; + std::string path; + bool skipEmpty; + std::string description; +}; + +class ParsedURLPathSegmentsTest : public ::testing::TestWithParam +{}; + +TEST_P(ParsedURLPathSegmentsTest, segmentsAreCorrect) +{ + const auto & testCase = GetParam(); + auto segments = parseURL(testCase.url).pathSegments(/*skipEmpty=*/testCase.skipEmpty) + | std::ranges::to(); + EXPECT_EQ(segments, testCase.segments); + EXPECT_EQ(encodeUrlPath(segments), testCase.path); +} + +INSTANTIATE_TEST_SUITE_P( + ParsedURL, + ParsedURLPathSegmentsTest, + ::testing::Values( + ParsedURLPathSegmentsTestCase{ + .url = "scheme:", + .segments = {""}, + .path = "", + .skipEmpty = false, + .description = "no_authority_empty_path", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme://", + .segments = {""}, + .path = "", + .skipEmpty = false, + .description = "empty_authority_empty_path", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme:///", + .segments = {"", ""}, + .path = "/", + .skipEmpty = false, + .description = "empty_authority_empty_path_trailing", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme://example.com/", + .segments = {"", ""}, + .path = "/", + .skipEmpty = false, + .description = "non_empty_authority_empty_path", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme://example.com//", + .segments = {"", "", ""}, + .path = "//", + .skipEmpty = false, + .description = "non_empty_authority_non_empty_path", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme://example.com///path///with//strange/empty///segments////", + .segments = {"path", "with", "strange", "empty", "segments"}, + .path = "path/with/strange/empty/segments", + .skipEmpty = true, + .description = "skip_all_empty_segments_with_authority", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme://example.com///lots///empty///", + .segments = {"", "", "", "lots", "", "", "empty", "", "", ""}, + .path = "///lots///empty///", + .skipEmpty = false, + .description = "empty_segments_with_authority", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme:/path///with//strange/empty///segments////", + .segments = {"path", "with", "strange", "empty", "segments"}, + .path = "path/with/strange/empty/segments", + .skipEmpty = true, + .description = "skip_all_empty_segments_no_authority_starts_with_slash", + }, + ParsedURLPathSegmentsTestCase{ + .url = "scheme:path///with//strange/empty///segments////", + .segments = {"path", "with", "strange", "empty", "segments"}, + .path = "path/with/strange/empty/segments", + .skipEmpty = true, + .description = "skip_all_empty_segments_no_authority_doesnt_start_with_slash", + }), + [](const auto & info) { return info.param.description; }); + TEST(nix, isValidSchemeName) + { ASSERT_TRUE(isValidSchemeName("http")); ASSERT_TRUE(isValidSchemeName("https")); diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 1d9797551..5aa85230a 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include #include #include "nix/util/error.hh" @@ -230,6 +231,20 @@ struct ParsedURL * Remove `.` and `..` path segments. */ ParsedURL canonicalise(); + + /** + * Get a range of path segments (the substrings separated by '/' characters). + * + * @param skipEmpty Skip all empty path segments + */ + auto pathSegments(bool skipEmpty) const & + { + return std::views::filter(path, [skipEmpty](std::string_view segment) { + if (skipEmpty) + return !segment.empty(); + return true; + }); + } }; std::ostream & operator<<(std::ostream & os, const ParsedURL & url); From e1c9bc0ef61628e2cfa2438a38638fbfdea7ffb8 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 31 Aug 2025 00:48:37 +0300 Subject: [PATCH 294/382] libstore: Get rid of allocations in printString, allocate 2K bytes on the stack MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Looking at perf: 0.21 │ push %rbp 0.99 │ mov %rsp,%rbp │ push %r15 0.25 │ push %r14 │ push %r13 0.49 │ push %r12 0.66 │ push %rbx 1.23 │ lea -0x10000(%rsp),%r11 0.23 │ 15: sub $0x1000,%rsp 1.01 │ orq $0x0,(%rsp) 59.12 │ cmp %r11,%rsp 0.27 │ ↑ jne 15 Seems like 64K is too much to have on the stack for each invocation, considering that only a minuscule number of allocations are actually larger than 4K. There's actually no good reason this function should use so much stack space. Or use small_string at all. Everything can be done in small chunks that don't require any memory allocations and use up 2K bytes on the stack. This patch also adds a microbenchmark for tracking the unparsing performance. Here are the results for this change: (Before) BM_UnparseRealDerivationFile/hello 7275 ns 7247 ns 96093 bytes_per_second=232.136Mi/s BM_UnparseRealDerivationFile/firefox 40538 ns 40376 ns 17327 bytes_per_second=378.534Mi/s (After) BM_UnparseRealDerivationFile/hello 3228 ns 3218 ns 215671 bytes_per_second=522.775Mi/s BM_UnparseRealDerivationFile/firefox 39724 ns 39584 ns 17617 bytes_per_second=386.101Mi/s This translates into nice evaluation performance improvements (compared to 18c3d2348f59032f1c630e6a232fe3637efb8200): Benchmark 1: GC_INITIAL_HEAP_SIZE=8G old-nix/bin/nix-instantiate ../nixpkgs -A nixosTests.gnome --readonly-mode Time (mean ± σ): 3.111 s ± 0.021 s [User: 2.513 s, System: 0.580 s] Range (min … max): 3.083 s … 3.143 s 10 runs Benchmark 2: GC_INITIAL_HEAP_SIZE=8G result/bin/nix-instantiate ../nixpkgs -A nixosTests.gnome --readonly-mode Time (mean ± σ): 3.037 s ± 0.038 s [User: 2.461 s, System: 0.558 s] Range (min … max): 2.960 s … 3.086 s 10 runs --- src/libstore-tests/derivation-parser-bench.cc | 29 +++++++++++ src/libstore/derivations.cc | 49 ++++++++++--------- 2 files changed, 56 insertions(+), 22 deletions(-) diff --git a/src/libstore-tests/derivation-parser-bench.cc b/src/libstore-tests/derivation-parser-bench.cc index ef698b205..61c9807a6 100644 --- a/src/libstore-tests/derivation-parser-bench.cc +++ b/src/libstore-tests/derivation-parser-bench.cc @@ -28,6 +28,27 @@ static void BM_ParseRealDerivationFile(benchmark::State & state, const std::stri state.SetBytesProcessed(state.iterations() * content.size()); } +// Benchmark unparsing real derivation files +static void BM_UnparseRealDerivationFile(benchmark::State & state, const std::string & filename) +{ + // Read the file once + std::ifstream file(filename); + std::stringstream buffer; + buffer << file.rdbuf(); + std::string content = buffer.str(); + + auto store = openStore("dummy://"); + ExperimentalFeatureSettings xpSettings; + auto drv = parseDerivation(*store, std::string(content), "test", xpSettings); + + for (auto _ : state) { + auto unparsed = drv.unparse(*store, /*maskOutputs=*/false); + benchmark::DoNotOptimize(unparsed); + assert(unparsed.size() == content.size()); + } + state.SetBytesProcessed(state.iterations() * content.size()); +} + // Register benchmarks for actual test derivation files if they exist BENCHMARK_CAPTURE( BM_ParseRealDerivationFile, @@ -37,3 +58,11 @@ BENCHMARK_CAPTURE( BM_ParseRealDerivationFile, firefox, getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); +BENCHMARK_CAPTURE( + BM_UnparseRealDerivationFile, + hello, + getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); +BENCHMARK_CAPTURE( + BM_UnparseRealDerivationFile, + firefox, + getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 1afc343d7..a1831efc6 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -498,28 +498,33 @@ Derivation parseDerivation( */ static void printString(std::string & res, std::string_view s) { - boost::container::small_vector buffer; - buffer.reserve(s.size() * 2 + 2); - char * buf = buffer.data(); - char * p = buf; - *p++ = '"'; - for (auto c : s) - if (c == '\"' || c == '\\') { - *p++ = '\\'; - *p++ = c; - } else if (c == '\n') { - *p++ = '\\'; - *p++ = 'n'; - } else if (c == '\r') { - *p++ = '\\'; - *p++ = 'r'; - } else if (c == '\t') { - *p++ = '\\'; - *p++ = 't'; - } else - *p++ = c; - *p++ = '"'; - res.append(buf, p - buf); + res.reserve(res.size() + s.size() * 2 + 2); + res += '"'; + static constexpr auto chunkSize = 1024; + std::array buffer; + while (!s.empty()) { + auto chunk = s.substr(0, /*n=*/chunkSize); + s.remove_prefix(chunk.size()); + char * buf = buffer.data(); + char * p = buf; + for (auto c : chunk) + if (c == '\"' || c == '\\') { + *p++ = '\\'; + *p++ = c; + } else if (c == '\n') { + *p++ = '\\'; + *p++ = 'n'; + } else if (c == '\r') { + *p++ = '\\'; + *p++ = 'r'; + } else if (c == '\t') { + *p++ = '\\'; + *p++ = 't'; + } else + *p++ = c; + res.append(buf, p - buf); + } + res += '"'; } static void printUnquotedString(std::string & res, std::string_view s) From 112f311c50ca579e45b247863eac0e3f4e73c4a6 Mon Sep 17 00:00:00 2001 From: Matej Urbas Date: Sun, 31 Aug 2025 09:53:14 +0100 Subject: [PATCH 295/382] hacking.md: set installation outputs as well --- doc/manual/source/development/building.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/development/building.md b/doc/manual/source/development/building.md index 33b7b2d5c..a07232a5f 100644 --- a/doc/manual/source/development/building.md +++ b/doc/manual/source/development/building.md @@ -34,7 +34,7 @@ $ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages To build Nix itself in this shell: ```console -[nix-shell]$ mesonFlags+=" --prefix=$(pwd)/outputs/out" +[nix-shell]$ out="$(pwd)/outputs/out" dev=$out debug=$out mesonFlags+=" --prefix=${out}" [nix-shell]$ dontAddPrefix=1 configurePhase [nix-shell]$ buildPhase ``` From 363620dd2449c29dadd4ed8232bf1988c408c601 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 31 Aug 2025 12:56:02 +0300 Subject: [PATCH 296/382] libexpr: Statically allocate commonly used symbols The motivation for this change is two-fold: 1. Commonly used Symbol values can be referred to quite often and they can be assigned at compile-time rather than runtime. 2. This also unclutters EvalState constructor, which was getting very long and unreadable. Spiritually similar to https://gerrit.lix.systems/c/lix/+/2218, though that patch doesn't allocate the Symbol at compile time. Co-authored-by: eldritch horrors --- src/libcmd/installable-flake.cc | 8 +- src/libexpr-tests/value/print.cc | 6 +- src/libexpr/eval-cache.cc | 4 +- src/libexpr/eval-profiler.cc | 4 +- src/libexpr/eval.cc | 186 +++++++------------ src/libexpr/get-drvs.cc | 22 +-- src/libexpr/include/nix/expr/eval.hh | 95 +++++++++- src/libexpr/include/nix/expr/parser-state.hh | 2 +- src/libexpr/include/nix/expr/symbol-table.hh | 66 +++++-- src/libexpr/parser.y | 7 +- src/libexpr/primops.cc | 89 ++++----- src/libexpr/primops/context.cc | 4 +- src/libexpr/primops/fetchMercurial.cc | 2 +- src/libexpr/primops/fetchTree.cc | 6 +- src/libexpr/print.cc | 2 +- src/libexpr/value-to-json.cc | 2 +- src/libexpr/value-to-xml.cc | 4 +- src/libflake/flake.cc | 7 +- src/nix/app.cc | 8 +- src/nix/bundle.cc | 6 +- src/nix/flake.cc | 10 +- src/nix/nix-env/user-env.cc | 20 +- src/nix/search.cc | 8 +- 23 files changed, 313 insertions(+), 255 deletions(-) diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 97f7eb645..5431100d3 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -105,8 +105,8 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() std::optional priority; - if (attr->maybeGetAttr(state->sOutputSpecified)) { - } else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) { + if (attr->maybeGetAttr(state->s.outputSpecified)) { + } else if (auto aMeta = attr->maybeGetAttr(state->s.meta)) { if (auto aPriority = aMeta->maybeGetAttr("priority")) priority = aPriority->getInt().value; } @@ -119,12 +119,12 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() overloaded{ [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { StringSet outputsToInstall; - if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) { + if (auto aOutputSpecified = attr->maybeGetAttr(state->s.outputSpecified)) { if (aOutputSpecified->getBool()) { if (auto aOutputName = attr->maybeGetAttr("outputName")) outputsToInstall = {aOutputName->getString()}; } - } else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) { + } else if (auto aMeta = attr->maybeGetAttr(state->s.meta)) { if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall")) for (auto & s : aOutputsToInstall->getListOfStrings()) outputsToInstall.insert(s); diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index 7647cd334..b32cba667 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -393,7 +393,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivation) vDerivation.mkString("derivation"); BindingsBuilder builder(state, state.allocBindings(10)); - builder.insert(state.sType, &vDerivation); + builder.insert(state.s.type, &vDerivation); Value vAttrs; vAttrs.mkAttrs(builder.finish()); @@ -438,8 +438,8 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) vDerivation.mkString("derivation"); BindingsBuilder builder(state, state.allocBindings(10)); - builder.insert(state.sType, &vDerivation); - builder.insert(state.sDrvPath, &vError); + builder.insert(state.s.type, &vDerivation); + builder.insert(state.s.drvPath, &vError); Value vAttrs; vAttrs.mkAttrs(builder.finish()); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 292d76e02..480ca72c7 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -330,7 +330,7 @@ AttrCursor::AttrCursor( AttrKey AttrCursor::getKey() { if (!parent) - return {0, root->state.sEpsilon}; + return {0, root->state.s.epsilon}; if (!parent->first->cachedValue) { parent->first->cachedValue = root->db->getAttr(parent->first->getKey()); assert(parent->first->cachedValue); @@ -702,7 +702,7 @@ bool AttrCursor::isDerivation() StorePath AttrCursor::forceDerivation() { - auto aDrvPath = getAttr(root->state.sDrvPath); + auto aDrvPath = getAttr(root->state.s.drvPath); auto drvPath = root->state.store->parseStorePath(aDrvPath->getString()); drvPath.requireDerivation(); if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) { diff --git a/src/libexpr/eval-profiler.cc b/src/libexpr/eval-profiler.cc index 7769d47d5..ba92faf18 100644 --- a/src/libexpr/eval-profiler.cc +++ b/src/libexpr/eval-profiler.cc @@ -185,7 +185,7 @@ FrameInfo SampleStack::getPrimOpFrameInfo(const PrimOp & primOp, std::spanattrs(); - auto nameAttr = state.getAttr(state.sName, attrs, ""); + auto nameAttr = state.getAttr(state.s.name, attrs, ""); auto drvName = std::string(state.forceStringNoCtx(*nameAttr->value, pos, "")); return DerivationStrictFrameInfo{.callPos = pos, .drvName = std::move(drvName)}; } catch (...) { @@ -211,7 +211,7 @@ FrameInfo SampleStack::getFrameInfoFromValueAndPos(const Value & v, std::spanget(state.sFunctor); + const auto functor = v.attrs()->get(state.s.functor); if (auto pos_ = posCache.lookup(pos); std::holds_alternative(pos_.origin)) /* HACK: In case callsite position is unresolved. */ return FunctorFrameInfo{.pos = functor->pos}; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f0b199946..8c5646403 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -203,124 +203,65 @@ EvalState::EvalState( std::shared_ptr buildStore) : fetchSettings{fetchSettings} , settings{settings} - , sWith(symbols.create("")) - , sOutPath(symbols.create("outPath")) - , sDrvPath(symbols.create("drvPath")) - , sType(symbols.create("type")) - , sMeta(symbols.create("meta")) - , sName(symbols.create("name")) - , sValue(symbols.create("value")) - , sSystem(symbols.create("system")) - , sOverrides(symbols.create("__overrides")) - , sOutputs(symbols.create("outputs")) - , sOutputName(symbols.create("outputName")) - , sIgnoreNulls(symbols.create("__ignoreNulls")) - , sFile(symbols.create("file")) - , sLine(symbols.create("line")) - , sColumn(symbols.create("column")) - , sFunctor(symbols.create("__functor")) - , sToString(symbols.create("__toString")) - , sRight(symbols.create("right")) - , sWrong(symbols.create("wrong")) - , sStructuredAttrs(symbols.create("__structuredAttrs")) - , sJson(symbols.create("__json")) - , sAllowedReferences(symbols.create("allowedReferences")) - , sAllowedRequisites(symbols.create("allowedRequisites")) - , sDisallowedReferences(symbols.create("disallowedReferences")) - , sDisallowedRequisites(symbols.create("disallowedRequisites")) - , sMaxSize(symbols.create("maxSize")) - , sMaxClosureSize(symbols.create("maxClosureSize")) - , sBuilder(symbols.create("builder")) - , sArgs(symbols.create("args")) - , sContentAddressed(symbols.create("__contentAddressed")) - , sImpure(symbols.create("__impure")) - , sOutputHash(symbols.create("outputHash")) - , sOutputHashAlgo(symbols.create("outputHashAlgo")) - , sOutputHashMode(symbols.create("outputHashMode")) - , sRecurseForDerivations(symbols.create("recurseForDerivations")) - , sDescription(symbols.create("description")) - , sSelf(symbols.create("self")) - , sEpsilon(symbols.create("")) - , sStartSet(symbols.create("startSet")) - , sOperator(symbols.create("operator")) - , sKey(symbols.create("key")) - , sPath(symbols.create("path")) - , sPrefix(symbols.create("prefix")) - , sOutputSpecified(symbols.create("outputSpecified")) - , exprSymbols{ - .sub = symbols.create("__sub"), - .lessThan = symbols.create("__lessThan"), - .mul = symbols.create("__mul"), - .div = symbols.create("__div"), - .or_ = symbols.create("or"), - .findFile = symbols.create("__findFile"), - .nixPath = symbols.create("__nixPath"), - .body = symbols.create("body"), - } + , symbols(StaticEvalSymbols::staticSymbolTable()) , repair(NoRepair) , emptyBindings(0) - , storeFS( - makeMountedSourceAccessor( - { - {CanonPath::root, makeEmptySourceAccessor()}, - /* In the pure eval case, we can simply require - valid paths. However, in the *impure* eval - case this gets in the way of the union - mechanism, because an invalid access in the - upper layer will *not* be caught by the union - source accessor, but instead abort the entire - lookup. + , storeFS(makeMountedSourceAccessor({ + {CanonPath::root, makeEmptySourceAccessor()}, + /* In the pure eval case, we can simply require + valid paths. However, in the *impure* eval + case this gets in the way of the union + mechanism, because an invalid access in the + upper layer will *not* be caught by the union + source accessor, but instead abort the entire + lookup. - This happens when the store dir in the - ambient file system has a path (e.g. because - another Nix store there), but the relocated - store does not. + This happens when the store dir in the + ambient file system has a path (e.g. because + another Nix store there), but the relocated + store does not. - TODO make the various source accessors doing - access control all throw the same type of - exception, and make union source accessor - catch it, so we don't need to do this hack. - */ - {CanonPath(store->storeDir), store->getFSAccessor(settings.pureEval)}, - })) - , rootFS( - ({ - /* In pure eval mode, we provide a filesystem that only - contains the Nix store. + TODO make the various source accessors doing + access control all throw the same type of + exception, and make union source accessor + catch it, so we don't need to do this hack. + */ + {CanonPath(store->storeDir), store->getFSAccessor(settings.pureEval)}, + })) + , rootFS(({ + /* In pure eval mode, we provide a filesystem that only + contains the Nix store. - If we have a chroot store and pure eval is not enabled, - use a union accessor to make the chroot store available - at its logical location while still having the - underlying directory available. This is necessary for - instance if we're evaluating a file from the physical - /nix/store while using a chroot store. */ - auto accessor = getFSSourceAccessor(); + If we have a chroot store and pure eval is not enabled, + use a union accessor to make the chroot store available + at its logical location while still having the + underlying directory available. This is necessary for + instance if we're evaluating a file from the physical + /nix/store while using a chroot store. */ + auto accessor = getFSSourceAccessor(); - auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy)); - if (settings.pureEval || store->storeDir != realStoreDir) { - accessor = settings.pureEval - ? storeFS - : makeUnionSourceAccessor({accessor, storeFS}); - } + auto realStoreDir = dirOf(store->toRealPath(StorePath::dummy)); + if (settings.pureEval || store->storeDir != realStoreDir) { + accessor = settings.pureEval ? storeFS : makeUnionSourceAccessor({accessor, storeFS}); + } - /* Apply access control if needed. */ - if (settings.restrictEval || settings.pureEval) - accessor = AllowListSourceAccessor::create(accessor, {}, {}, - [&settings](const CanonPath & path) -> RestrictedPathError { - auto modeInformation = settings.pureEval - ? "in pure evaluation mode (use '--impure' to override)" - : "in restricted mode"; - throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); - }); + /* Apply access control if needed. */ + if (settings.restrictEval || settings.pureEval) + accessor = AllowListSourceAccessor::create( + accessor, {}, {}, [&settings](const CanonPath & path) -> RestrictedPathError { + auto modeInformation = settings.pureEval ? "in pure evaluation mode (use '--impure' to override)" + : "in restricted mode"; + throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); + }); - accessor; - })) + accessor; + })) , corepkgsFS(make_ref()) , internalFS(make_ref()) , derivationInternal{corepkgsFS->addFile( - CanonPath("derivation-internal.nix"), + CanonPath("derivation-internal.nix"), #include "primops/derivation.nix.gen.hh" - )} + )} , store(store) , buildStore(buildStore ? buildStore : store) , inputCache(fetchers::InputCache::create()) @@ -654,7 +595,7 @@ std::optional EvalState::getDoc(Value & v) } if (isFunctor(v)) { try { - Value & functor = *v.attrs()->find(sFunctor)->value; + Value & functor = *v.attrs()->find(s.functor)->value; Value * vp[] = {&v}; Value partiallyApplied; // The first parameter is not user-provided, and may be @@ -978,8 +919,8 @@ void EvalState::mkPos(Value & v, PosIdx p) auto origin = positions.originOf(p); if (auto path = std::get_if(&origin)) { auto attrs = buildBindings(3); - attrs.alloc(sFile).mkString(path->path.abs()); - makePositionThunks(*this, p, attrs.alloc(sLine), attrs.alloc(sColumn)); + attrs.alloc(s.file).mkString(path->path.abs()); + makePositionThunks(*this, p, attrs.alloc(s.line), attrs.alloc(s.column)); v.mkAttrs(attrs); } else v.mkNull(); @@ -1245,7 +1186,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) dynamicEnv = &env2; Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr; - AttrDefs::iterator overrides = attrs.find(state.sOverrides); + AttrDefs::iterator overrides = attrs.find(state.s.overrides); bool hasOverrides = overrides != attrs.end(); /* The recursive attributes are evaluated in the new @@ -1717,7 +1658,7 @@ void EvalState::callFunction(Value & fun, std::span args, Value & vRes, } } - else if (vCur.type() == nAttrs && (functor = vCur.attrs()->get(sFunctor))) { + else if (vCur.type() == nAttrs && (functor = vCur.attrs()->get(s.functor))) { /* 'vCur' may be allocated on the stack of the calling function, but for functors we may keep a reference, so heap-allocate a copy and use that instead. */ @@ -1779,7 +1720,7 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res forceValue(fun, pos); if (fun.type() == nAttrs) { - auto found = fun.attrs()->find(sFunctor); + auto found = fun.attrs()->find(s.functor); if (found != fun.attrs()->end()) { Value * v = allocValue(); callFunction(*found->value, fun, *v, pos); @@ -2241,7 +2182,7 @@ Bindings::const_iterator EvalState::getAttr(Symbol attrSym, const Bindings * att bool EvalState::isFunctor(const Value & fun) const { - return fun.type() == nAttrs && fun.attrs()->find(sFunctor) != fun.attrs()->end(); + return fun.type() == nAttrs && fun.attrs()->find(s.functor) != fun.attrs()->end(); } void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx) @@ -2310,7 +2251,7 @@ bool EvalState::isDerivation(Value & v) { if (v.type() != nAttrs) return false; - auto i = v.attrs()->get(sType); + auto i = v.attrs()->get(s.type); if (!i) return false; forceValue(*i->value, i->pos); @@ -2322,7 +2263,7 @@ bool EvalState::isDerivation(Value & v) std::optional EvalState::tryAttrsToString(const PosIdx pos, Value & v, NixStringContext & context, bool coerceMore, bool copyToStore) { - auto i = v.attrs()->find(sToString); + auto i = v.attrs()->find(s.toString); if (i != v.attrs()->end()) { Value v1; callFunction(*i->value, v, v1, pos); @@ -2368,7 +2309,7 @@ BackedStringView EvalState::coerceToString( auto maybeString = tryAttrsToString(pos, v, context, coerceMore, copyToStore); if (maybeString) return std::move(*maybeString); - auto i = v.attrs()->find(sOutPath); + auto i = v.attrs()->find(s.outPath); if (i == v.attrs()->end()) { error( "cannot coerce %1% to a string: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) @@ -2475,7 +2416,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext /* Similarly, handle __toString where the result may be a path value. */ if (v.type() == nAttrs) { - auto i = v.attrs()->find(sToString); + auto i = v.attrs()->find(s.toString); if (i != v.attrs()->end()) { Value v1; callFunction(*i->value, v, v1, pos); @@ -2665,8 +2606,8 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st case nAttrs: { if (isDerivation(v1) && isDerivation(v2)) { - auto i = v1.attrs()->get(sOutPath); - auto j = v2.attrs()->get(sOutPath); + auto i = v1.attrs()->get(s.outPath); + auto j = v2.attrs()->get(s.outPath); if (i && j) { try { assertEqValues(*i->value, *j->value, pos, errorCtx); @@ -2819,8 +2760,8 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v /* If both sets denote a derivation (type = "derivation"), then compare their outPaths. */ if (isDerivation(v1) && isDerivation(v2)) { - auto i = v1.attrs()->get(sOutPath); - auto j = v2.attrs()->get(sOutPath); + auto i = v1.attrs()->get(s.outPath); + auto j = v2.attrs()->get(s.outPath); if (i && j) return eqValues(*i->value, *j->value, pos, errorCtx); } @@ -3196,8 +3137,7 @@ Expr * EvalState::parse( docComments = &it->second; } - auto result = parseExprFromBuf( - text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS, exprSymbols); + auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS); result->bindVars(*this, staticEnv); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index a1c3e5611..00b673365 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -45,7 +45,7 @@ PackageInfo::PackageInfo(EvalState & state, ref store, const std::string std::string PackageInfo::queryName() const { if (name == "" && attrs) { - auto i = attrs->find(state->sName); + auto i = attrs->find(state->s.name); if (i == attrs->end()) state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); @@ -56,7 +56,7 @@ std::string PackageInfo::queryName() const std::string PackageInfo::querySystem() const { if (system == "" && attrs) { - auto i = attrs->find(state->sSystem); + auto i = attrs->find(state->s.system); system = i == attrs->end() ? "unknown" @@ -68,7 +68,7 @@ std::string PackageInfo::querySystem() const std::optional PackageInfo::queryDrvPath() const { if (!drvPath && attrs) { - if (auto i = attrs->get(state->sDrvPath)) { + if (auto i = attrs->get(state->s.drvPath)) { NixStringContext context; auto found = state->coerceToStorePath( i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation"); @@ -95,7 +95,7 @@ StorePath PackageInfo::requireDrvPath() const StorePath PackageInfo::queryOutPath() const { if (!outPath && attrs) { - auto i = attrs->find(state->sOutPath); + auto i = attrs->find(state->s.outPath); NixStringContext context; if (i != attrs->end()) outPath = state->coerceToStorePath( @@ -111,7 +111,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT if (outputs.empty()) { /* Get the ‘outputs’ list. */ const Attr * i; - if (attrs && (i = attrs->get(state->sOutputs))) { + if (attrs && (i = attrs->get(state->s.outputs))) { state->forceList(*i->value, i->pos, "while evaluating the 'outputs' attribute of a derivation"); /* For each output... */ @@ -127,7 +127,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT state->forceAttrs(*out->value, i->pos, "while evaluating an output of a derivation"); /* And evaluate its ‘outPath’ attribute. */ - auto outPath = out->value->attrs()->get(state->sOutPath); + auto outPath = out->value->attrs()->get(state->s.outPath); if (!outPath) continue; // FIXME: throw error? NixStringContext context; @@ -146,7 +146,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT return outputs; const Attr * i; - if (attrs && (i = attrs->get(state->sOutputSpecified)) + if (attrs && (i = attrs->get(state->s.outputSpecified)) && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) { Outputs result; auto out = outputs.find(queryOutputName()); @@ -181,7 +181,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT std::string PackageInfo::queryOutputName() const { if (outputName == "" && attrs) { - auto i = attrs->get(state->sOutputName); + auto i = attrs->get(state->s.outputName); outputName = i ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : ""; } @@ -194,7 +194,7 @@ const Bindings * PackageInfo::getMeta() return meta; if (!attrs) return 0; - auto a = attrs->get(state->sMeta); + auto a = attrs->get(state->s.meta); if (!a) return 0; state->forceAttrs(*a->value, a->pos, "while evaluating the 'meta' attribute of a derivation"); @@ -221,7 +221,7 @@ bool PackageInfo::checkMeta(Value & v) return false; return true; } else if (v.type() == nAttrs) { - if (v.attrs()->get(state->sOutPath)) + if (v.attrs()->get(state->s.outPath)) return false; for (auto & i : *v.attrs()) if (!checkMeta(*i.value)) @@ -411,7 +411,7 @@ static void getDerivations( should we recurse into it? => Only if it has a `recurseForDerivations = true' attribute. */ if (i->value->type() == nAttrs) { - auto j = i->value->attrs()->get(state.sRecurseForDerivations); + auto j = i->value->attrs()->get(state.s.recurseForDerivations); if (j && state.forceBool( *j->value, j->pos, "while evaluating the attribute `recurseForDerivations`")) diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index d52ccb545..04729b100 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -213,23 +213,100 @@ struct DebugTrace } }; +struct StaticEvalSymbols +{ + Symbol with, outPath, drvPath, type, meta, name, value, system, overrides, outputs, outputName, ignoreNulls, file, + line, column, functor, toString, right, wrong, structuredAttrs, json, allowedReferences, allowedRequisites, + disallowedReferences, disallowedRequisites, maxSize, maxClosureSize, builder, args, contentAddressed, impure, + outputHash, outputHashAlgo, outputHashMode, recurseForDerivations, description, self, epsilon, startSet, + operator_, key, path, prefix, outputSpecified; + + Expr::AstSymbols exprSymbols; + + static constexpr auto preallocate() + { + StaticSymbolTable alloc; + + StaticEvalSymbols staticSymbols = { + .with = alloc.create(""), + .outPath = alloc.create("outPath"), + .drvPath = alloc.create("drvPath"), + .type = alloc.create("type"), + .meta = alloc.create("meta"), + .name = alloc.create("name"), + .value = alloc.create("value"), + .system = alloc.create("system"), + .overrides = alloc.create("__overrides"), + .outputs = alloc.create("outputs"), + .outputName = alloc.create("outputName"), + .ignoreNulls = alloc.create("__ignoreNulls"), + .file = alloc.create("file"), + .line = alloc.create("line"), + .column = alloc.create("column"), + .functor = alloc.create("__functor"), + .toString = alloc.create("__toString"), + .right = alloc.create("right"), + .wrong = alloc.create("wrong"), + .structuredAttrs = alloc.create("__structuredAttrs"), + .json = alloc.create("__json"), + .allowedReferences = alloc.create("allowedReferences"), + .allowedRequisites = alloc.create("allowedRequisites"), + .disallowedReferences = alloc.create("disallowedReferences"), + .disallowedRequisites = alloc.create("disallowedRequisites"), + .maxSize = alloc.create("maxSize"), + .maxClosureSize = alloc.create("maxClosureSize"), + .builder = alloc.create("builder"), + .args = alloc.create("args"), + .contentAddressed = alloc.create("__contentAddressed"), + .impure = alloc.create("__impure"), + .outputHash = alloc.create("outputHash"), + .outputHashAlgo = alloc.create("outputHashAlgo"), + .outputHashMode = alloc.create("outputHashMode"), + .recurseForDerivations = alloc.create("recurseForDerivations"), + .description = alloc.create("description"), + .self = alloc.create("self"), + .epsilon = alloc.create(""), + .startSet = alloc.create("startSet"), + .operator_ = alloc.create("operator"), + .key = alloc.create("key"), + .path = alloc.create("path"), + .prefix = alloc.create("prefix"), + .outputSpecified = alloc.create("outputSpecified"), + .exprSymbols = { + .sub = alloc.create("__sub"), + .lessThan = alloc.create("__lessThan"), + .mul = alloc.create("__mul"), + .div = alloc.create("__div"), + .or_ = alloc.create("or"), + .findFile = alloc.create("__findFile"), + .nixPath = alloc.create("__nixPath"), + .body = alloc.create("body"), + }}; + + return std::pair{staticSymbols, alloc}; + } + + static consteval StaticEvalSymbols create() + { + return preallocate().first; + } + + static constexpr StaticSymbolTable staticSymbolTable() + { + return preallocate().second; + } +}; + class EvalState : public std::enable_shared_from_this { public: + static constexpr StaticEvalSymbols s = StaticEvalSymbols::create(); + const fetchers::Settings & fetchSettings; const EvalSettings & settings; SymbolTable symbols; PosTable positions; - const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, - sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, sRight, sWrong, sStructuredAttrs, sJson, - sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites, sMaxSize, sMaxClosureSize, - sBuilder, sArgs, sContentAddressed, sImpure, sOutputHash, sOutputHashAlgo, sOutputHashMode, - sRecurseForDerivations, sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, sPrefix, - sOutputSpecified; - - const Expr::AstSymbols exprSymbols; - /** * If set, force copying files to the Nix store even if they * already exist there. diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index dd99192c0..836cc9861 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -88,7 +88,7 @@ struct ParserState SourcePath basePath; PosTable::Origin origin; const ref rootFS; - const Expr::AstSymbols & s; + static constexpr Expr::AstSymbols s = StaticEvalSymbols::create().exprSymbols; const EvalSettings & settings; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index ec1456e2d..ff98077ca 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -28,6 +28,8 @@ public: } }; +class StaticSymbolTable; + /** * Symbols have the property that they can be compared efficiently * (using an equality test), because the symbol table stores only one @@ -37,36 +39,29 @@ class Symbol { friend class SymbolStr; friend class SymbolTable; + friend class StaticSymbolTable; private: uint32_t id; - explicit Symbol(uint32_t id) noexcept + explicit constexpr Symbol(uint32_t id) noexcept : id(id) { } public: - Symbol() noexcept + constexpr Symbol() noexcept : id(0) { } [[gnu::always_inline]] - explicit operator bool() const noexcept + constexpr explicit operator bool() const noexcept { return id > 0; } - auto operator<=>(const Symbol other) const noexcept - { - return id <=> other.id; - } - - bool operator==(const Symbol other) const noexcept - { - return id == other.id; - } + constexpr auto operator<=>(const Symbol & other) const noexcept = default; friend class std::hash; }; @@ -210,6 +205,39 @@ public: }; }; +class SymbolTable; + +/** + * Convenience class to statically assign symbol identifiers at compile-time. + */ +class StaticSymbolTable +{ + static constexpr std::size_t maxSize = 1024; + + struct StaticSymbolInfo + { + std::string_view str; + Symbol sym; + }; + + std::array symbols; + std::size_t size = 0; + +public: + constexpr StaticSymbolTable() = default; + + constexpr Symbol create(std::string_view str) + { + /* No need to check bounds because out of bounds access is + a compilation error. */ + auto sym = Symbol(size + 1); //< +1 because Symbol with id = 0 is reserved + symbols[size++] = {str, sym}; + return sym; + } + + void copyIntoSymbolTable(SymbolTable & symtab) const; +}; + /** * Symbol table used by the parser and evaluator to represent and look * up identifiers and attributes efficiently. @@ -232,6 +260,10 @@ private: boost::unordered_flat_set symbols{SymbolStr::chunkSize}; public: + SymbolTable(const StaticSymbolTable & staticSymtab) + { + staticSymtab.copyIntoSymbolTable(*this); + } /** * Converts a string into a symbol. @@ -276,6 +308,16 @@ public: } }; +inline void StaticSymbolTable::copyIntoSymbolTable(SymbolTable & symtab) const +{ + for (std::size_t i = 0; i < size; ++i) { + auto [str, staticSym] = symbols[i]; + auto sym = symtab.create(str); + if (sym != staticSym) [[unlikely]] + unreachable(); + } +} + } // namespace nix template<> diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 2b2566208..35fe929d9 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -68,8 +68,7 @@ Expr * parseExprFromBuf( const EvalSettings & settings, PosTable & positions, DocCommentMap & docComments, - const ref rootFS, - const Expr::AstSymbols & astSymbols); + const ref rootFS); } @@ -542,8 +541,7 @@ Expr * parseExprFromBuf( const EvalSettings & settings, PosTable & positions, DocCommentMap & docComments, - const ref rootFS, - const Expr::AstSymbols & astSymbols) + const ref rootFS) { yyscan_t scanner; LexerState lexerState { @@ -558,7 +556,6 @@ Expr * parseExprFromBuf( .basePath = basePath, .origin = lexerState.origin, .rootFS = rootFS, - .s = astSymbols, .settings = settings, }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ca84f3038..264f3d155 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -214,20 +214,20 @@ void derivationToValue( auto path2 = path.path.abs(); Derivation drv = state.store->readDerivation(storePath); auto attrs = state.buildBindings(3 + drv.outputs.size()); - attrs.alloc(state.sDrvPath) + attrs.alloc(state.s.drvPath) .mkString( path2, { NixStringContextElem::DrvDeep{.drvPath = storePath}, }); - attrs.alloc(state.sName).mkString(drv.env["name"]); + attrs.alloc(state.s.name).mkString(drv.env["name"]); auto list = state.buildList(drv.outputs.size()); for (const auto & [i, o] : enumerate(drv.outputs)) { mkOutputString(state, attrs, storePath, o); (list[i] = state.allocValue())->mkString(o.first); } - attrs.alloc(state.sOutputs).mkList(list); + attrs.alloc(state.s.outputs).mkList(list); auto w = state.allocValue(); w->mkAttrs(attrs); @@ -731,7 +731,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar /* Get the start set. */ auto startSet = state.getAttr( - state.sStartSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); + state.s.startSet, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); state.forceList( *startSet->value, @@ -749,7 +749,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar /* Get the operator. */ auto op = state.getAttr( - state.sOperator, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); + state.s.operator_, args[0]->attrs(), "in the attrset passed as argument to builtins.genericClosure"); state.forceFunction( *op->value, noPos, "while evaluating the 'operator' attribute passed as argument to builtins.genericClosure"); @@ -771,7 +771,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure"); auto key = state.getAttr( - state.sKey, + state.s.key, e->attrs(), "in one of the attrsets generated by (or initially passed to) builtins.genericClosure"); state.forceValue(*key->value, noPos); @@ -1076,11 +1076,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value ** args, Val try { state.forceValue(*args[0], pos); - attrs.insert(state.sValue, args[0]); + attrs.insert(state.s.value, args[0]); attrs.insert(state.symbols.create("success"), &state.vTrue); } catch (AssertionError & e) { // `value = false;` is unfortunate but removing it is a breaking change. - attrs.insert(state.sValue, &state.vFalse); + attrs.insert(state.s.value, &state.vFalse); attrs.insert(state.symbols.create("success"), &state.vFalse); } @@ -1292,7 +1292,8 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value ** auto attrs = args[0]->attrs(); /* Figure out the name first (for stack backtraces). */ - auto nameAttr = state.getAttr(state.sName, attrs, "in the attrset passed as argument to builtins.derivationStrict"); + auto nameAttr = + state.getAttr(state.s.name, attrs, "in the attrset passed as argument to builtins.derivationStrict"); std::string_view drvName; try { @@ -1366,7 +1367,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName using nlohmann::json; std::optional jsonObject; auto pos = v.determinePos(noPos); - auto attr = attrs->find(state.sStructuredAttrs); + auto attr = attrs->find(state.s.structuredAttrs); if (attr != attrs->end() && state.forceBool( *attr->value, @@ -1377,7 +1378,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; - attr = attrs->find(state.sIgnoreNulls); + attr = attrs->find(state.s.ignoreNulls); if (attr != attrs->end()) ignoreNulls = state.forceBool( *attr->value, @@ -1401,7 +1402,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName outputs.insert("out"); for (auto & i : attrs->lexicographicOrder(state.symbols)) { - if (i->name == state.sIgnoreNulls) + if (i->name == state.s.ignoreNulls) continue; auto key = state.symbols[i->name]; vomit("processing attribute '%1%'", key); @@ -1453,19 +1454,19 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName continue; } - if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) { + if (i->name == state.s.contentAddressed && state.forceBool(*i->value, pos, context_below)) { contentAddressed = true; experimentalFeatureSettings.require(Xp::CaDerivations); } - else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) { + else if (i->name == state.s.impure && state.forceBool(*i->value, pos, context_below)) { isImpure = true; experimentalFeatureSettings.require(Xp::ImpureDerivations); } /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ - else if (i->name == state.sArgs) { + else if (i->name == state.s.args) { state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listView()) { auto s = state @@ -1482,22 +1483,22 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName if (jsonObject) { - if (i->name == state.sStructuredAttrs) + if (i->name == state.s.structuredAttrs) continue; jsonObject->structuredAttrs.emplace(key, printValueAsJSON(state, true, *i->value, pos, context)); - if (i->name == state.sBuilder) + if (i->name == state.s.builder) drv.builder = state.forceString(*i->value, context, pos, context_below); - else if (i->name == state.sSystem) + else if (i->name == state.s.system) drv.platform = state.forceStringNoCtx(*i->value, pos, context_below); - else if (i->name == state.sOutputHash) + else if (i->name == state.s.outputHash) outputHash = state.forceStringNoCtx(*i->value, pos, context_below); - else if (i->name == state.sOutputHashAlgo) + else if (i->name == state.s.outputHashAlgo) outputHashAlgo = parseHashAlgoOpt(state.forceStringNoCtx(*i->value, pos, context_below)); - else if (i->name == state.sOutputHashMode) + else if (i->name == state.s.outputHashMode) handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below)); - else if (i->name == state.sOutputs) { + else if (i->name == state.s.outputs) { /* Require ‘outputs’ to be a list of strings. */ state.forceList(*i->value, pos, context_below); Strings ss; @@ -1506,51 +1507,51 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName handleOutputs(ss); } - if (i->name == state.sAllowedReferences) + if (i->name == state.s.allowedReferences) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks..allowedReferences' instead", drvName); - if (i->name == state.sAllowedRequisites) + if (i->name == state.s.allowedRequisites) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks..allowedRequisites' instead", drvName); - if (i->name == state.sDisallowedReferences) + if (i->name == state.s.disallowedReferences) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks..disallowedReferences' instead", drvName); - if (i->name == state.sDisallowedRequisites) + if (i->name == state.s.disallowedRequisites) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks..disallowedRequisites' instead", drvName); - if (i->name == state.sMaxSize) + if (i->name == state.s.maxSize) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks..maxSize' instead", drvName); - if (i->name == state.sMaxClosureSize) + if (i->name == state.s.maxClosureSize) warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks..maxClosureSize' instead", drvName); } else { auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); - if (i->name == state.sJson) { + if (i->name == state.s.json) { warn( "In derivation '%s': setting structured attributes via '__json' is deprecated, and may be disallowed in future versions of Nix. Set '__structuredAttrs = true' instead.", drvName); drv.structuredAttrs = StructuredAttrs::parse(s); } else { drv.env.emplace(key, s); - if (i->name == state.sBuilder) + if (i->name == state.s.builder) drv.builder = std::move(s); - else if (i->name == state.sSystem) + else if (i->name == state.s.system) drv.platform = std::move(s); - else if (i->name == state.sOutputHash) + else if (i->name == state.s.outputHash) outputHash = std::move(s); - else if (i->name == state.sOutputHashAlgo) + else if (i->name == state.s.outputHashAlgo) outputHashAlgo = parseHashAlgoOpt(s); - else if (i->name == state.sOutputHashMode) + else if (i->name == state.s.outputHashMode) handleHashMode(s); - else if (i->name == state.sOutputs) + else if (i->name == state.s.outputs) handleOutputs(tokenizeString(s)); } } @@ -1722,7 +1723,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName } auto result = state.buildBindings(1 + drv.outputs.size()); - result.alloc(state.sDrvPath) + result.alloc(state.s.drvPath) .mkString( drvPathS, { @@ -2006,14 +2007,14 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.findFile"); std::string prefix; - auto i = v2->attrs()->find(state.sPrefix); + auto i = v2->attrs()->find(state.s.prefix); if (i != v2->attrs()->end()) prefix = state.forceStringNoCtx( *i->value, pos, "while evaluating the `prefix` attribute of an element of the list passed to builtins.findFile"); - i = state.getAttr(state.sPath, v2->attrs(), "in an element of the __nixPath"); + i = state.getAttr(state.s.path, v2->attrs(), "in an element of the __nixPath"); NixStringContext context; auto path = @@ -2786,7 +2787,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value ** args, Value if (n == "path") path.emplace(state.coerceToPath( attr.pos, *attr.value, context, "while evaluating the 'path' attribute passed to 'builtins.path'")); - else if (attr.name == state.sName) + else if (attr.name == state.s.name) name = state.forceStringNoCtx( *attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.path"); else if (n == "filter") @@ -3105,7 +3106,7 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value ** args, for (const auto & [n, v2] : enumerate(listView)) { state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.listToAttrs"); - auto j = state.getAttr(state.sName, v2->attrs(), "in a {name=...; value=...;} pair"); + auto j = state.getAttr(state.s.name, v2->attrs(), "in a {name=...; value=...;} pair"); auto name = state.forceStringNoCtx( *j->value, @@ -3132,7 +3133,7 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value ** args, // Note that .value is actually a Value * *; see earlier comments Value * v2 = *std::bit_cast(attr.value); - auto j = state.getAttr(state.sValue, v2->attrs(), "in a {name=...; value=...;} pair"); + auto j = state.getAttr(state.s.value, v2->attrs(), "in a {name=...; value=...;} pair"); prev = attr.name; bindings.push_back({prev, j->value, j->pos}); } @@ -3948,13 +3949,13 @@ static void prim_partition(EvalState & state, const PosIdx pos, Value ** args, V auto rlist = state.buildList(rsize); if (rsize) memcpy(rlist.elems, right.data(), sizeof(Value *) * rsize); - attrs.alloc(state.sRight).mkList(rlist); + attrs.alloc(state.s.right).mkList(rlist); auto wsize = wrong.size(); auto wlist = state.buildList(wsize); if (wsize) memcpy(wlist.elems, wrong.data(), sizeof(Value *) * wsize); - attrs.alloc(state.sWrong).mkList(wlist); + attrs.alloc(state.s.wrong).mkList(wlist); v.mkAttrs(attrs); } @@ -4873,7 +4874,7 @@ static void prim_parseDrvName(EvalState & state, const PosIdx pos, Value ** args state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.parseDrvName"); DrvName parsed(name); auto attrs = state.buildBindings(2); - attrs.alloc(state.sName).mkString(parsed.name); + attrs.alloc(state.s.name).mkString(parsed.name); attrs.alloc("version").mkString(parsed.version); v.mkAttrs(attrs); } diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index f037fdb80..12b8ffdf9 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -219,7 +219,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value ** args, auto list = state.buildList(info.second.outputs.size()); for (const auto & [i, output] : enumerate(info.second.outputs)) (list[i] = state.allocValue())->mkString(output); - infoAttrs.alloc(state.sOutputs).mkList(list); + infoAttrs.alloc(state.s.outputs).mkList(list); } attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs); } @@ -300,7 +300,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg } } - if (auto attr = i.value->attrs()->get(state.sOutputs)) { + if (auto attr = i.value->attrs()->get(state.s.outputs)) { state.forceList(*attr->value, attr->pos, "while evaluating the `outputs` attribute of a string context"); if (attr->value->listSize() && !isDerivation(name)) { state diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 9fc8e6c83..c856deede 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -84,7 +84,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value ** ar auto [storePath, input2] = input.fetchToStore(state.store); auto attrs2 = state.buildBindings(8); - state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath)); + state.mkStorePathString(storePath, attrs2.alloc(state.s.outPath)); if (input2.getRef()) attrs2.alloc("branch").mkString(*input2.getRef()); // Backward compatibility: set 'rev' to diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index d58d76d75..e673e55a0 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -29,7 +29,7 @@ void emitTreeAttrs( { auto attrs = state.buildBindings(100); - state.mkStorePathString(storePath, attrs.alloc(state.sOutPath)); + state.mkStorePathString(storePath, attrs.alloc(state.s.outPath)); // FIXME: support arbitrary input attributes. @@ -95,7 +95,7 @@ static void fetchTree( fetchers::Attrs attrs; - if (auto aType = args[0]->attrs()->get(state.sType)) { + if (auto aType = args[0]->attrs()->get(state.s.type)) { if (type) state.error("unexpected argument 'type'").atPos(pos).debugThrow(); type = state.forceStringNoCtx( @@ -106,7 +106,7 @@ static void fetchTree( attrs.emplace("type", type.value()); for (auto & attr : *args[0]->attrs()) { - if (attr.name == state.sType) + if (attr.name == state.s.type) continue; state.forceValue(*attr.value, attr.pos); if (attr.value->type() == nPath || attr.value->type() == nString) { diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 502f32ea1..5338e365e 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -272,7 +272,7 @@ private: void printDerivation(Value & v) { std::optional storePath; - if (auto i = v.attrs()->get(state.sDrvPath)) { + if (auto i = v.attrs()->get(state.s.drvPath)) { NixStringContext context; storePath = state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"); diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 2578620f3..2cd853f60 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -53,7 +53,7 @@ json printValueAsJSON( out = *maybeString; break; } - if (auto i = v.attrs()->get(state.sOutPath)) + if (auto i = v.attrs()->get(state.s.outPath)) return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore); else { out = json::object(); diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index b3b986dae..31400e439 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -98,14 +98,14 @@ static void printValueAsXML( XMLAttrs xmlAttrs; Path drvPath; - if (auto a = v.attrs()->get(state.sDrvPath)) { + if (auto a = v.attrs()->get(state.s.drvPath)) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) xmlAttrs["drvPath"] = drvPath = a->value->c_str(); } - if (auto a = v.attrs()->get(state.sOutPath)) { + if (auto a = v.attrs()->get(state.s.outPath)) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index b31bef211..56e455cb6 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -232,7 +232,7 @@ static Flake readFlake( .path = flakePath, }; - if (auto description = vInfo.attrs()->get(state.sDescription)) { + if (auto description = vInfo.attrs()->get(state.s.description)) { expectType(state, nString, *description->value, description->pos); flake.description = description->value->c_str(); } @@ -253,7 +253,7 @@ static Flake readFlake( if (outputs->value->isLambda() && outputs->value->lambda().fun->hasFormals()) { for (auto & formal : outputs->value->lambda().fun->formals->formals) { - if (formal.name != state.sSelf) + if (formal.name != state.s.self) flake.inputs.emplace( state.symbols[formal.name], FlakeInput{.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))}); @@ -305,7 +305,8 @@ static Flake readFlake( } for (auto & attr : *vInfo.attrs()) { - if (attr.name != state.sDescription && attr.name != sInputs && attr.name != sOutputs && attr.name != sNixConfig) + if (attr.name != state.s.description && attr.name != sInputs && attr.name != sOutputs + && attr.name != sNixConfig) throw Error( "flake '%s' has an unsupported attribute '%s', at %s", resolvedRef, diff --git a/src/nix/app.cc b/src/nix/app.cc index 412b53817..f1937bc23 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -103,11 +103,11 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) else if (type == "derivation") { auto drvPath = cursor->forceDerivation(); - auto outPath = cursor->getAttr(state.sOutPath)->getString(); - auto outputName = cursor->getAttr(state.sOutputName)->getString(); - auto name = cursor->getAttr(state.sName)->getString(); + auto outPath = cursor->getAttr(state.s.outPath)->getString(); + auto outputName = cursor->getAttr(state.s.outputName)->getString(); + auto name = cursor->getAttr(state.s.name)->getString(); auto aPname = cursor->maybeGetAttr("pname"); - auto aMeta = cursor->maybeGetAttr(state.sMeta); + auto aMeta = cursor->maybeGetAttr(state.s.meta); auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr; auto mainProgram = aMainProgram ? aMainProgram->getString() : aPname ? aPname->getString() : DrvName(name).name; auto program = outPath + "/bin/" + mainProgram; diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 29960c281..e11f37b84 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -100,7 +100,7 @@ struct CmdBundle : InstallableValueCommand if (!evalState->isDerivation(*vRes)) throw Error("the bundler '%s' does not produce a derivation", bundler.what()); - auto attr1 = vRes->attrs()->get(evalState->sDrvPath); + auto attr1 = vRes->attrs()->get(evalState->s.drvPath); if (!attr1) throw Error("the bundler '%s' does not produce a derivation", bundler.what()); @@ -109,7 +109,7 @@ struct CmdBundle : InstallableValueCommand drvPath.requireDerivation(); - auto attr2 = vRes->attrs()->get(evalState->sOutPath); + auto attr2 = vRes->attrs()->get(evalState->s.outPath); if (!attr2) throw Error("the bundler '%s' does not produce a derivation", bundler.what()); @@ -123,7 +123,7 @@ struct CmdBundle : InstallableValueCommand }); if (!outLink) { - auto * attr = vRes->attrs()->get(evalState->sName); + auto * attr = vRes->attrs()->get(evalState->s.name); if (!attr) throw Error("attribute 'name' missing"); outLink = evalState->forceStringNoCtx(*attr->value, attr->pos, ""); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index c04eab291..8d6387c9d 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1232,12 +1232,12 @@ struct CmdFlakeShow : FlakeCommand, MixJSON }; auto showDerivation = [&]() { - auto name = visitor.getAttr(state->sName)->getString(); + auto name = visitor.getAttr(state->s.name)->getString(); if (json) { std::optional description; - if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { - if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) + if (auto aMeta = visitor.maybeGetAttr(state->s.meta)) { + if (auto aDescription = aMeta->maybeGetAttr(state->s.description)) description = aDescription->getString(); } j.emplace("type", "derivation"); @@ -1365,8 +1365,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON || (attrPath.size() == 3 && attrPathS[0] == "apps")) { auto aType = visitor.maybeGetAttr("type"); std::optional description; - if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { - if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) + if (auto aMeta = visitor.maybeGetAttr(state->s.meta)) { + if (auto aDescription = aMeta->maybeGetAttr(state->s.description)) description = aDescription->getString(); } if (!aType || aType->getString() != "app") diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 1b6e552f7..766c6d42a 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -56,21 +56,21 @@ bool createUserEnv( auto attrs = state.buildBindings(7 + outputs.size()); - attrs.alloc(state.sType).mkString("derivation"); - attrs.alloc(state.sName).mkString(i.queryName()); + attrs.alloc(state.s.type).mkString("derivation"); + attrs.alloc(state.s.name).mkString(i.queryName()); auto system = i.querySystem(); if (!system.empty()) - attrs.alloc(state.sSystem).mkString(system); - attrs.alloc(state.sOutPath).mkString(state.store->printStorePath(i.queryOutPath())); + attrs.alloc(state.s.system).mkString(system); + attrs.alloc(state.s.outPath).mkString(state.store->printStorePath(i.queryOutPath())); if (drvPath) - attrs.alloc(state.sDrvPath).mkString(state.store->printStorePath(*drvPath)); + attrs.alloc(state.s.drvPath).mkString(state.store->printStorePath(*drvPath)); // Copy each output meant for installation. auto outputsList = state.buildList(outputs.size()); for (const auto & [m, j] : enumerate(outputs)) { (outputsList[m] = state.allocValue())->mkString(j.first); auto outputAttrs = state.buildBindings(2); - outputAttrs.alloc(state.sOutPath).mkString(state.store->printStorePath(*j.second)); + outputAttrs.alloc(state.s.outPath).mkString(state.store->printStorePath(*j.second)); attrs.alloc(j.first).mkAttrs(outputAttrs); /* This is only necessary when installing store paths, e.g., @@ -80,7 +80,7 @@ bool createUserEnv( references.insert(*j.second); } - attrs.alloc(state.sOutputs).mkList(outputsList); + attrs.alloc(state.s.outputs).mkList(outputsList); // Copy the meta attributes. auto meta = state.buildBindings(metaNames.size()); @@ -91,7 +91,7 @@ bool createUserEnv( meta.insert(state.symbols.create(j), v); } - attrs.alloc(state.sMeta).mkAttrs(meta); + attrs.alloc(state.s.meta).mkAttrs(meta); (list[n] = state.allocValue())->mkAttrs(attrs); @@ -141,10 +141,10 @@ bool createUserEnv( debug("evaluating user environment builder"); state.forceValue(topLevel, topLevel.determinePos(noPos)); NixStringContext context; - auto & aDrvPath(*topLevel.attrs()->find(state.sDrvPath)); + auto & aDrvPath(*topLevel.attrs()->find(state.s.drvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); topLevelDrv.requireDerivation(); - auto & aOutPath(*topLevel.attrs()->find(state.sOutPath)); + auto & aOutPath(*topLevel.attrs()->find(state.s.outPath)); auto topLevelOut = state.coerceToStorePath(aOutPath.pos, *aOutPath.value, context, ""); /* Realise the resulting store expression. */ diff --git a/src/nix/search.cc b/src/nix/search.cc index 562af3151..910450e95 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -108,10 +108,10 @@ struct CmdSearch : InstallableValueCommand, MixJSON }; if (cursor.isDerivation()) { - DrvName name(cursor.getAttr(state->sName)->getString()); + DrvName name(cursor.getAttr(state->s.name)->getString()); - auto aMeta = cursor.maybeGetAttr(state->sMeta); - auto aDescription = aMeta ? aMeta->maybeGetAttr(state->sDescription) : nullptr; + auto aMeta = cursor.maybeGetAttr(state->s.meta); + auto aDescription = aMeta ? aMeta->maybeGetAttr(state->s.description) : nullptr; auto description = aDescription ? aDescription->getString() : ""; std::replace(description.begin(), description.end(), '\n', ' '); auto attrPath2 = concatStringsSep(".", attrPathS); @@ -176,7 +176,7 @@ struct CmdSearch : InstallableValueCommand, MixJSON recurse(); else if (attrPathS[0] == "legacyPackages" && attrPath.size() > 2) { - auto attr = cursor.maybeGetAttr(state->sRecurseForDerivations); + auto attr = cursor.maybeGetAttr(state->s.recurseForDerivations); if (attr && attr->getBool()) recurse(); } From 8251305affdf8b9ce55313f612b9f4795b6a5b13 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 1 Sep 2025 01:26:14 +0300 Subject: [PATCH 297/382] Reapply "Merge pull request #13741 from xokdvium/toml-timestamps" This reverts commit 75740fbd757567adfeb0917fe47995cc01df1879. --- packaging/dependencies.nix | 10 ++ src/libexpr/meson.build | 6 + src/libexpr/primops/fromTOML.cc | 119 ++++++++++++++---- .../lang/eval-okay-fromTOML-timestamps.exp | 2 +- .../lang/eval-okay-fromTOML-timestamps.nix | 46 ++++++- 5 files changed, 158 insertions(+), 25 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 3d7da9acb..16dd34d0e 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -76,6 +76,16 @@ scope: { prevAttrs.postInstall; }); + toml11 = pkgs.toml11.overrideAttrs rec { + version = "4.4.0"; + src = pkgs.fetchFromGitHub { + owner = "ToruNiina"; + repo = "toml11"; + tag = "v${version}"; + hash = "sha256-sgWKYxNT22nw376ttGsTdg0AMzOwp8QH3E8mx0BZJTQ="; + }; + }; + # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. boost = (pkgs.boost.override { diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 15bca88f0..0331d3c61 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -71,6 +71,12 @@ toml11 = dependency( method : 'cmake', include_type : 'system', ) + +configdata_priv.set( + 'HAVE_TOML11_4', + toml11.version().version_compare('>= 4.0.0').to_int(), +) + deps_other += toml11 config_priv_h = configure_file( diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 533739592..7d98a5de9 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -1,73 +1,140 @@ #include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" +#include "expr-config-private.hh" + #include #include namespace nix { +#if HAVE_TOML11_4 + +/** + * This is what toml11 < 4.0 did when choosing the subsecond precision. + * TOML 1.0.0 spec doesn't define how sub-millisecond ranges should be handled and calls it + * implementation defined behavior. For a lack of a better choice we stick with what older versions + * of toml11 did [1]. + * + * [1]: https://github.com/ToruNiina/toml11/blob/dcfe39a783a94e8d52c885e5883a6fbb21529019/toml/datetime.hpp#L282 + */ +static size_t normalizeSubsecondPrecision(toml::local_time lt) +{ + auto millis = lt.millisecond; + auto micros = lt.microsecond; + auto nanos = lt.nanosecond; + if (millis != 0 || micros != 0 || nanos != 0) { + if (micros != 0 || nanos != 0) { + if (nanos != 0) + return 9; + return 6; + } + return 3; + } + return 0; +} + +/** + * Normalize date/time formats to serialize to the same strings as versions prior to toml11 4.0. + * + * Several things to consider: + * + * 1. Sub-millisecond range is represented the same way as in toml11 versions prior to 4.0. Precisioun is rounded + * towards the next multiple of 3 or capped at 9 digits. + * 2. Seconds must be specified. This may become optional in (yet unreleased) TOML 1.1.0, but 1.0.0 defined local time + * in terms of RFC3339 [1]. + * 3. date-time separator (`t`, `T` or space ` `) is canonicalized to an upper T. This is compliant with RFC3339 + * [1] 5.6: + * > Applications that generate this format SHOULD use upper case letters. + * + * [1]: https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + */ +static void normalizeDatetimeFormat(toml::value & t) +{ + if (t.is_local_datetime()) { + auto & ldt = t.as_local_datetime(); + t.as_local_datetime_fmt() = { + .delimiter = toml::datetime_delimiter_kind::upper_T, + // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(ldt.time), + }; + return; + } + + if (t.is_offset_datetime()) { + auto & odt = t.as_offset_datetime(); + t.as_offset_datetime_fmt() = { + .delimiter = toml::datetime_delimiter_kind::upper_T, + // https://datatracker.ietf.org/doc/html/rfc3339#section-5.6 + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(odt.time), + }; + return; + } + + if (t.is_local_time()) { + auto & lt = t.as_local_time(); + t.as_local_time_fmt() = { + .has_seconds = true, // Mandated by TOML 1.0.0 + .subsecond_precision = normalizeSubsecondPrecision(lt), + }; + return; + } +} + +#endif + static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Value & val) { auto toml = state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.fromTOML"); std::istringstream tomlStream(std::string{toml}); - std::function visit; - - visit = [&](Value & v, toml::value t) { + auto visit = [&](auto & self, Value & v, toml::value t) -> void { switch (t.type()) { case toml::value_t::table: { auto table = toml::get(t); - - size_t size = 0; - for (auto & i : table) { - (void) i; - size++; - } - - auto attrs = state.buildBindings(size); + auto attrs = state.buildBindings(table.size()); for (auto & elem : table) { forceNoNullByte(elem.first); - visit(attrs.alloc(elem.first), elem.second); + self(self, attrs.alloc(elem.first), elem.second); } v.mkAttrs(attrs); } break; - ; case toml::value_t::array: { auto array = toml::get>(t); auto list = state.buildList(array.size()); for (const auto & [n, v] : enumerate(list)) - visit(*(v = state.allocValue()), array[n]); + self(self, *(v = state.allocValue()), array[n]); v.mkList(list); } break; - ; case toml::value_t::boolean: v.mkBool(toml::get(t)); break; - ; case toml::value_t::integer: v.mkInt(toml::get(t)); break; - ; case toml::value_t::floating: v.mkFloat(toml::get(t)); break; - ; case toml::value_t::string: { auto s = toml::get(t); forceNoNullByte(s); v.mkString(s); } break; - ; case toml::value_t::local_datetime: case toml::value_t::offset_datetime: case toml::value_t::local_date: case toml::value_t::local_time: { if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { +#if HAVE_TOML11_4 + normalizeDatetimeFormat(t); +#endif auto attrs = state.buildBindings(2); attrs.alloc("_type").mkString("timestamp"); std::ostringstream s; @@ -80,16 +147,24 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va throw std::runtime_error("Dates and times are not supported"); } } break; - ; case toml::value_t::empty: v.mkNull(); break; - ; } }; try { - visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); + visit( + visit, + val, + toml::parse( + tomlStream, + "fromTOML" /* the "filename" */ +#if HAVE_TOML11_4 + , + toml::spec::v(1, 0, 0) // Be explicit that we are parsing TOML 1.0.0 without extensions +#endif + )); } catch (std::exception & e) { // TODO: toml::syntax_error state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp index 08b3c69a6..56e610533 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.exp +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp @@ -1 +1 @@ -{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt2 = { _type = "timestamp"; value = "00:32:00.999999"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } +{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt10 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt11 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456789"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100"; }; ldt3 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120"; }; ldt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123"; }; ldt5 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123400"; }; ldt6 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123450"; }; ldt7 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456"; }; ldt8 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456700"; }; ldt9 = { _type = "timestamp"; value = "1979-05-27T00:32:00.123456780"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt10 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt11 = { _type = "timestamp"; value = "00:32:00.123456789"; }; lt2 = { _type = "timestamp"; value = "00:32:00.100"; }; lt3 = { _type = "timestamp"; value = "00:32:00.120"; }; lt4 = { _type = "timestamp"; value = "00:32:00.123"; }; lt5 = { _type = "timestamp"; value = "00:32:00.123400"; }; lt6 = { _type = "timestamp"; value = "00:32:00.123450"; }; lt7 = { _type = "timestamp"; value = "00:32:00.123456"; }; lt8 = { _type = "timestamp"; value = "00:32:00.123456700"; }; lt9 = { _type = "timestamp"; value = "00:32:00.123456780"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt10 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456Z"; }; odt11 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456700Z"; }; odt12 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456780Z"; }; odt13 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt14 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123456789Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt5 = { _type = "timestamp"; value = "1979-05-27T07:32:00.100Z"; }; odt6 = { _type = "timestamp"; value = "1979-05-27T07:32:00.120Z"; }; odt7 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123Z"; }; odt8 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123400Z"; }; odt9 = { _type = "timestamp"; value = "1979-05-27T07:32:00.123450Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } diff --git a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix index 74cff9470..d8f3a03e9 100644 --- a/tests/functional/lang/eval-okay-fromTOML-timestamps.nix +++ b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix @@ -55,11 +55,53 @@ builtins.fromTOML '' odt2 = 1979-05-27T00:32:00-07:00 odt3 = 1979-05-27T00:32:00.999999-07:00 odt4 = 1979-05-27 07:32:00Z + # milliseconds + odt5 = 1979-05-27 07:32:00.1Z + odt6 = 1979-05-27 07:32:00.12Z + odt7 = 1979-05-27 07:32:00.123Z + # microseconds + odt8 = 1979-05-27t07:32:00.1234Z + odt9 = 1979-05-27t07:32:00.12345Z + odt10 = 1979-05-27t07:32:00.123456Z + # nanoseconds + odt11 = 1979-05-27 07:32:00.1234567Z + odt12 = 1979-05-27 07:32:00.12345678Z + odt13 = 1979-05-27 07:32:00.123456789Z + # no more precision after nanoseconds + odt14 = 1979-05-27t07:32:00.1234567891Z + ldt1 = 1979-05-27T07:32:00 - ldt2 = 1979-05-27T00:32:00.999999 + # milliseconds + ldt2 = 1979-05-27T07:32:00.1 + ldt3 = 1979-05-27T07:32:00.12 + ldt4 = 1979-05-27T07:32:00.123 + # microseconds + ldt5 = 1979-05-27t00:32:00.1234 + ldt6 = 1979-05-27t00:32:00.12345 + ldt7 = 1979-05-27t00:32:00.123456 + # nanoseconds + ldt8 = 1979-05-27 00:32:00.1234567 + ldt9 = 1979-05-27 00:32:00.12345678 + ldt10 = 1979-05-27 00:32:00.123456789 + # no more precision after nanoseconds + ldt11 = 1979-05-27t00:32:00.1234567891 + ld1 = 1979-05-27 lt1 = 07:32:00 - lt2 = 00:32:00.999999 + # milliseconds + lt2 = 00:32:00.1 + lt3 = 00:32:00.12 + lt4 = 00:32:00.123 + # microseconds + lt5 = 00:32:00.1234 + lt6 = 00:32:00.12345 + lt7 = 00:32:00.123456 + # nanoseconds + lt8 = 00:32:00.1234567 + lt9 = 00:32:00.12345678 + lt10 = 00:32:00.123456789 + # no more precision after nanoseconds + lt11 = 00:32:00.1234567891 arr1 = [ 1, 2, 3 ] arr2 = [ "red", "yellow", "green" ] From acd627fa46fd496443ca2196d2d7e44787d66bdf Mon Sep 17 00:00:00 2001 From: Emily Date: Thu, 14 Aug 2025 17:15:17 +0100 Subject: [PATCH 298/382] tests/functional/lang: Add tests for `builtins.fromTOML` overflow This adds regression tests for fromTOML overflow/underflow behavior. Previous versions of toml11 used to saturate, but this was never an intended behavior (and Snix/Nix 2.3/toml11 >= 4.0 validate this). (cherry picked from Lix [1,2]) [1]: https://git.lix.systems/lix-project/lix/commit/7ee442079dfbae4fe2d2fbb91a7226b87251cd65 [2]: https://git.lix.systems/lix-project/lix/commit/4de09b6b5493db4bd7f6348255a1fdcb38b9ed2f --- .../lang/eval-fail-fromTOML-overflow.err.exp | 13 +++++++++++++ .../functional/lang/eval-fail-fromTOML-overflow.nix | 1 + .../lang/eval-fail-fromTOML-underflow.err.exp | 13 +++++++++++++ .../lang/eval-fail-fromTOML-underflow.nix | 1 + 4 files changed, 28 insertions(+) create mode 100644 tests/functional/lang/eval-fail-fromTOML-overflow.err.exp create mode 100644 tests/functional/lang/eval-fail-fromTOML-overflow.nix create mode 100644 tests/functional/lang/eval-fail-fromTOML-underflow.err.exp create mode 100644 tests/functional/lang/eval-fail-fromTOML-underflow.nix diff --git a/tests/functional/lang/eval-fail-fromTOML-overflow.err.exp b/tests/functional/lang/eval-fail-fromTOML-overflow.err.exp new file mode 100644 index 000000000..14b0e31c1 --- /dev/null +++ b/tests/functional/lang/eval-fail-fromTOML-overflow.err.exp @@ -0,0 +1,13 @@ +error: + … while calling the 'fromTOML' builtin + at /pwd/lang/eval-fail-fromTOML-overflow.nix:1:1: + 1| builtins.fromTOML ''attr = 9223372036854775808'' + | ^ + 2| + + error: while parsing TOML: [error] toml::parse_dec_integer: too large integer: current max digits = 2^63 + --> fromTOML + | + 1 | attr = 9223372036854775808 + | ^-- must be < 2^63 + diff --git a/tests/functional/lang/eval-fail-fromTOML-overflow.nix b/tests/functional/lang/eval-fail-fromTOML-overflow.nix new file mode 100644 index 000000000..17f0448b3 --- /dev/null +++ b/tests/functional/lang/eval-fail-fromTOML-overflow.nix @@ -0,0 +1 @@ +builtins.fromTOML ''attr = 9223372036854775808'' diff --git a/tests/functional/lang/eval-fail-fromTOML-underflow.err.exp b/tests/functional/lang/eval-fail-fromTOML-underflow.err.exp new file mode 100644 index 000000000..28f1079dc --- /dev/null +++ b/tests/functional/lang/eval-fail-fromTOML-underflow.err.exp @@ -0,0 +1,13 @@ +error: + … while calling the 'fromTOML' builtin + at /pwd/lang/eval-fail-fromTOML-underflow.nix:1:1: + 1| builtins.fromTOML ''attr = -9223372036854775809'' + | ^ + 2| + + error: while parsing TOML: [error] toml::parse_dec_integer: too large integer: current max digits = 2^63 + --> fromTOML + | + 1 | attr = -9223372036854775809 + | ^-- must be < 2^63 + diff --git a/tests/functional/lang/eval-fail-fromTOML-underflow.nix b/tests/functional/lang/eval-fail-fromTOML-underflow.nix new file mode 100644 index 000000000..923fdf354 --- /dev/null +++ b/tests/functional/lang/eval-fail-fromTOML-underflow.nix @@ -0,0 +1 @@ +builtins.fromTOML ''attr = -9223372036854775809'' From e54870001037fd4b7b2b9f3d6ff9e8c751e6f8df Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 1 Sep 2025 00:35:31 +0300 Subject: [PATCH 299/382] lib{store,fetchers}: Pass URLs specified directly verbatim to FileTransferRequest The URL should not be normalized before handing it off to cURL, because builtin fetchers like fetchTarball/fetchurl are expected to work with arbitrary URLs, that might not be RFC3986 compliant. For those cases Nix should not normalize URLs, though validation is fine. ParseURL and cURL are supposed to match the set of acceptable URLs, since they implement the same RFC. --- src/libfetchers/tarball.cc | 12 ++-- src/libstore/builtins/fetchurl.cc | 2 +- src/libstore/filetransfer.cc | 6 +- .../include/nix/store/filetransfer.hh | 6 +- src/libutil/include/nix/util/url.hh | 59 +++++++++++++++++++ src/libutil/url.cc | 6 ++ src/nix/prefetch.cc | 2 +- tests/functional/fetchurl.sh | 5 ++ 8 files changed, 84 insertions(+), 14 deletions(-) diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 52038317e..8a8039b6b 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -43,7 +43,7 @@ DownloadFileResult downloadFile( if (cached && !cached->expired) return useCached(); - FileTransferRequest request(parseURL(url)); + FileTransferRequest request(ValidURL{url}); request.headers = headers; if (cached) request.expectedETag = getStrAttr(cached->value, "etag"); @@ -109,13 +109,13 @@ DownloadFileResult downloadFile( static DownloadTarballResult downloadTarball_( const Settings & settings, const std::string & urlS, const Headers & headers, const std::string & displayPrefix) { - auto url = parseURL(urlS); + ValidURL url = urlS; // Some friendly error messages for common mistakes. // Namely lets catch when the url is a local file path, but // it is not in fact a tarball. - if (url.scheme == "file") { - std::filesystem::path localPath = renderUrlPathEnsureLegal(url.path); + if (url.scheme() == "file") { + std::filesystem::path localPath = renderUrlPathEnsureLegal(url.path()); if (!exists(localPath)) { throw Error("tarball '%s' does not exist.", localPath); } @@ -166,7 +166,7 @@ static DownloadTarballResult downloadTarball_( /* Note: if the download is cached, `importTarball()` will receive no data, which causes it to import an empty tarball. */ - auto archive = !url.path.empty() && hasSuffix(toLower(url.path.back()), ".zip") ? ({ + auto archive = !url.path().empty() && hasSuffix(toLower(url.path().back()), ".zip") ? ({ /* In streaming mode, libarchive doesn't handle symlinks in zip files correctly (#10649). So write the entire file to disk so libarchive can access it @@ -180,7 +180,7 @@ static DownloadTarballResult downloadTarball_( } TarArchive{path}; }) - : TarArchive{*source}; + : TarArchive{*source}; auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index c44d4d5ee..7abfa4495 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -37,7 +37,7 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) auto fetch = [&](const std::string & url) { auto source = sinkToSource([&](Sink & sink) { - FileTransferRequest request(parseURL(url)); + FileTransferRequest request(ValidURL{url}); request.decompress = false; auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 0007b9ad8..a162df1ad 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -784,7 +784,7 @@ struct curlFileTransfer : public FileTransfer void enqueueItem(std::shared_ptr item) { - if (item->request.data && item->request.uri.scheme != "http" && item->request.uri.scheme != "https") + if (item->request.data && item->request.uri.scheme() != "http" && item->request.uri.scheme() != "https") throw nix::Error("uploading to '%s' is not supported", item->request.uri.to_string()); { @@ -801,11 +801,11 @@ struct curlFileTransfer : public FileTransfer void enqueueFileTransfer(const FileTransferRequest & request, Callback callback) override { /* Ugly hack to support s3:// URIs. */ - if (request.uri.scheme == "s3") { + if (request.uri.scheme() == "s3") { // FIXME: do this on a worker thread try { #if NIX_WITH_S3_SUPPORT - auto parsed = ParsedS3URL::parse(request.uri); + auto parsed = ParsedS3URL::parse(request.uri.parsed()); std::string profile = parsed.profile.value_or(""); std::string region = parsed.region.value_or(Aws::Region::US_EAST_1); diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 8a04293bd..6f541d463 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -71,7 +71,7 @@ extern const unsigned int RETRY_TIME_MS_DEFAULT; struct FileTransferRequest { - ParsedURL uri; + ValidURL uri; Headers headers; std::string expectedETag; bool verifyTLS = true; @@ -85,8 +85,8 @@ struct FileTransferRequest std::string mimeType; std::function dataCallback; - FileTransferRequest(ParsedURL uri) - : uri(uri) + FileTransferRequest(ValidURL uri) + : uri(std::move(uri)) , parentAct(getCurActivity()) { } diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 5aa85230a..f2bd79b08 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -341,4 +341,63 @@ ParsedURL fixGitURL(const std::string & url); */ bool isValidSchemeName(std::string_view scheme); +/** + * Either a ParsedURL or a verbatim string, but the string must be a valid + * ParsedURL. This is necessary because in certain cases URI must be passed + * verbatim (e.g. in builtin fetchers), since those are specified by the user. + * In those cases normalizations performed by the ParsedURL might be surprising + * and undesirable, since Nix must be a universal client that has to work with + * various broken services that might interpret URLs in quirky and non-standard ways. + * + * One of those examples is space-as-plus encoding that is very widespread, but it's + * not strictly RFC3986 compliant. We must preserve that information verbatim. + * + * Though we perform parsing and validation for internal needs. + */ +struct ValidURL : private ParsedURL +{ + std::optional encoded; + + ValidURL(std::string str) + : ParsedURL(parseURL(str, /*lenient=*/false)) + , encoded(std::move(str)) + { + } + + ValidURL(std::string_view str) + : ValidURL(std::string{str}) + { + } + + ValidURL(ParsedURL parsed) + : ParsedURL{std::move(parsed)} + { + } + + /** + * Get the encoded URL (if specified) verbatim or encode the parsed URL. + */ + std::string to_string() const + { + return encoded.or_else([&]() -> std::optional { return ParsedURL::to_string(); }).value(); + } + + const ParsedURL & parsed() const & + { + return *this; + } + + std::string_view scheme() const & + { + return ParsedURL::scheme; + } + + const auto & path() const & + { + return ParsedURL::path; + } +}; + +std::ostream & operator<<(std::ostream & os, const ValidURL & url); + } // namespace nix diff --git a/src/libutil/url.cc b/src/libutil/url.cc index b9bf0b4f4..1c7fd3f0f 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -434,4 +434,10 @@ bool isValidSchemeName(std::string_view s) return std::regex_match(s.begin(), s.end(), regex, std::regex_constants::match_default); } +std::ostream & operator<<(std::ostream & os, const ValidURL & url) +{ + os << url.to_string(); + return os; +} + } // namespace nix diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 88a4717a0..26905e34c 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -105,7 +105,7 @@ std::tuple prefetchFile( FdSink sink(fd.get()); - FileTransferRequest req(parseURL(url)); + FileTransferRequest req(ValidURL{url}); req.decompress = false; getFileTransfer()->download(std::move(req), sink); } diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index c25ac3216..5bc8ca625 100755 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -88,3 +88,8 @@ requireDaemonNewerThan "2.20" expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url "file://$narxz" 2>&1 | grep 'must be a fixed-output or impure derivation' + +requireDaemonNewerThan "2.32.0pre20250831" + +expect 1 nix-build --expr 'import ' --argstr name 'name' --argstr url "file://authority.not.allowed/fetchurl.sh?a=1&a=2" --no-out-link |& + grepQuiet "error: file:// URL 'file://authority.not.allowed/fetchurl.sh?a=1&a=2' has unexpected authority 'authority.not.allowed'" From 04ad66af5f0fbec60783d8913292125f43954dcd Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Sun, 24 Aug 2025 20:19:53 -0700 Subject: [PATCH 300/382] Improve Git URI handling Git URI can also support scp style links similar to git itself. This change augments the function fixGitURL to better handle the scp style urls through a minimal parser rather than regex which has been found to be brittle. * Support for IPV6 added * New test cases added for fixGitURL * Clearer documentation on purpose and goal of function * More `std::string_view` for performance * A few more URL tests Fixes #5958 --- src/libutil-tests/url.cc | 58 +++++++++++++++++++++ src/libutil/include/nix/util/url.hh | 21 ++++++-- src/libutil/url.cc | 78 +++++++++++++++++++++++++---- 3 files changed, 142 insertions(+), 15 deletions(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 56b879846..3f856b0aa 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -12,6 +12,64 @@ namespace nix { using Authority = ParsedURL::Authority; using HostType = Authority::HostType; +struct FixGitURLParam +{ + std::string_view input; + std::string_view expected; +}; + +std::ostream & operator<<(std::ostream & os, const FixGitURLParam & param) +{ + return os << "Input: \"" << param.input << "\", Expected: \"" << param.expected << "\""; +} + +class FixGitURLTestSuite : public ::testing::TestWithParam +{}; + +INSTANTIATE_TEST_SUITE_P( + FixGitURLs, + FixGitURLTestSuite, + ::testing::Values( + // https://github.com/NixOS/nix/issues/5958 + // Already proper URL with git+ssh + FixGitURLParam{"git+ssh://user@domain:1234/path", "git+ssh://user@domain:1234/path"}, + // SCP-like URL (rewritten to ssh://) + FixGitURLParam{"git@github.com:owner/repo.git", "ssh://git@github.com/owner/repo.git"}, + // SCP-like URL (no user) + FixGitURLParam{"github.com:owner/repo.git", "ssh://github.com/owner/repo.git"}, + // SCP-like URL (leading slash) + FixGitURLParam{"github.com:/owner/repo.git", "ssh://github.com/owner/repo.git"}, + // Absolute path (becomes file:) + FixGitURLParam{"/home/me/repo", "file:///home/me/repo"}, + // Relative path (becomes file:// absolute) + FixGitURLParam{"relative/repo", "file:///relative/repo"}, + // Already file: scheme + // NOTE: This is not valid technically as it's not absolute + FixGitURLParam{"file:/var/repos/x", "file:/var/repos/x"}, + // IPV6 test case + FixGitURLParam{"user@[2001:db8:1::2]:/home/file", "ssh://user@[2001:db8:1::2]/home/file"})); + +TEST_P(FixGitURLTestSuite, parsesVariedGitUrls) +{ + auto & p = GetParam(); + const auto actual = fixGitURL(p.input).to_string(); + EXPECT_EQ(actual, p.expected); +} + +TEST_P(FixGitURLTestSuite, fixGitIsIdempotent) +{ + auto & p = GetParam(); + const auto actual = fixGitURL(p.expected).to_string(); + EXPECT_EQ(actual, p.expected); +} + +TEST_P(FixGitURLTestSuite, fixGitOutputParses) +{ + auto & p = GetParam(); + const auto parsed = fixGitURL(p.expected); + EXPECT_EQ(parseURL(parsed.to_string()), parsed); +} + TEST(parseURL, parsesSimpleHttpUrl) { auto s = "http://www.example.org/file.tar.gz"; diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 5aa85230a..e04fe73f4 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -327,10 +327,23 @@ struct ParsedUrlScheme ParsedUrlScheme parseUrlScheme(std::string_view scheme); -/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes - them by removing the `:` and assuming a scheme of `ssh://`. Also - changes absolute paths into file:// URLs. */ -ParsedURL fixGitURL(const std::string & url); +/** + * Normalize a Git remote string from various styles into a URL-like form. + * Input forms handled: + * 1) SCP-style SSH syntax: "[user@]host:path" -> "ssh://user@host/path" + * 2) Already "file:" URLs: "file:/abs/or/rel" -> unchanged + * 3) Bare paths / filenames: "src/repo" or "/abs" -> "file:src/repo" or "file:/abs" + * 4) Anything with "://": treated as a proper URL -> unchanged + * + * Note: for the scp-style, as they are converted to ssh-form, all paths are assumed to + * then be absolute whereas in programs like git, they retain the scp form which allows + * relative paths. + * + * Additionally, if no url can be determined, it is returned as a file:// URI. + * If the url does not start with a leading slash, one will be added since there are no + * relative path URIs. + */ +ParsedURL fixGitURL(std::string_view url); /** * Whether a string is valid as RFC 3986 scheme name. diff --git a/src/libutil/url.cc b/src/libutil/url.cc index b9bf0b4f4..1acc219df 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -408,21 +408,77 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) }; } -ParsedURL fixGitURL(const std::string & url) +struct ScpLike { - std::regex scpRegex("([^/]*)@(.*):(.*)"); - if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex)) - return parseURL(std::regex_replace(url, scpRegex, "ssh://$1@$2/$3")); - if (hasPrefix(url, "file:")) - return parseURL(url); - if (url.find("://") == std::string::npos) { + ParsedURL::Authority authority; + std::string_view path; +}; + +/** + * Parse a scp url. This is a helper struct for fixGitURL. + * This is needed since we support scp-style urls for git urls. + * https://git-scm.com/book/ms/v2/Git-on-the-Server-The-Protocols + * + * A good reference is libgit2 also allows scp style + * https://github.com/libgit2/libgit2/blob/58d9363f02f1fa39e46d49b604f27008e75b72f2/src/util/net.c#L806 + */ +static std::optional parseScp(const std::string_view s) noexcept +{ + if (s.empty() || s.front() == '/') + return std::nullopt; + + // Find the colon that separates host from path. + // Find the right-most since ipv6 has colons + const auto colon = s.rfind(':'); + if (colon == std::string_view::npos) + return std::nullopt; + + // Split head:[path] + const auto head = s.substr(0, colon); + const auto path = s.substr(colon + 1); + + if (head.empty()) + return std::nullopt; + + return ScpLike{ + .authority = ParsedURL::Authority::parse(head), + .path = path, + }; +} + +ParsedURL fixGitURL(const std::string_view url) +{ + try { + if (auto parsed = parseURL(url); parsed.scheme == "file" || parsed.authority) + return parsed; + } catch (BadURL &) { + } + + // if the url does not start with forward slash, add one + auto splitMakeAbs = [&](std::string_view pathS) { + std::vector path; + + if (!hasPrefix(pathS, "/")) { + path.emplace_back(""); + } + splitStringInto(path, pathS, "/"); + + return path; + }; + + if (auto scp = parseScp(url)) { return ParsedURL{ - .scheme = "file", - .authority = ParsedURL::Authority{}, - .path = splitString>(url, "/"), + .scheme = "ssh", + .authority = std::move(scp->authority), + .path = splitMakeAbs(scp->path), }; } - return parseURL(url); + + return ParsedURL{ + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = splitMakeAbs(url), + }; } // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 From d2f1860ee52ef6263065a6a73d7d8ea331e4c65d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 1 Sep 2025 15:08:57 -0400 Subject: [PATCH 301/382] Revert "Improve Git URI handling" I (@Ericson2314) messed up. We were supposed to test the status quo before landing any new chnages, and also there is one change that is not quite right (relative paths). I am reverting for now, and then backporting the test suite to the old situation. This reverts commit 04ad66af5f0fbec60783d8913292125f43954dcd. --- src/libutil-tests/url.cc | 58 --------------------- src/libutil/include/nix/util/url.hh | 21 ++------ src/libutil/url.cc | 78 ++++------------------------- 3 files changed, 15 insertions(+), 142 deletions(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 3f856b0aa..56b879846 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -12,64 +12,6 @@ namespace nix { using Authority = ParsedURL::Authority; using HostType = Authority::HostType; -struct FixGitURLParam -{ - std::string_view input; - std::string_view expected; -}; - -std::ostream & operator<<(std::ostream & os, const FixGitURLParam & param) -{ - return os << "Input: \"" << param.input << "\", Expected: \"" << param.expected << "\""; -} - -class FixGitURLTestSuite : public ::testing::TestWithParam -{}; - -INSTANTIATE_TEST_SUITE_P( - FixGitURLs, - FixGitURLTestSuite, - ::testing::Values( - // https://github.com/NixOS/nix/issues/5958 - // Already proper URL with git+ssh - FixGitURLParam{"git+ssh://user@domain:1234/path", "git+ssh://user@domain:1234/path"}, - // SCP-like URL (rewritten to ssh://) - FixGitURLParam{"git@github.com:owner/repo.git", "ssh://git@github.com/owner/repo.git"}, - // SCP-like URL (no user) - FixGitURLParam{"github.com:owner/repo.git", "ssh://github.com/owner/repo.git"}, - // SCP-like URL (leading slash) - FixGitURLParam{"github.com:/owner/repo.git", "ssh://github.com/owner/repo.git"}, - // Absolute path (becomes file:) - FixGitURLParam{"/home/me/repo", "file:///home/me/repo"}, - // Relative path (becomes file:// absolute) - FixGitURLParam{"relative/repo", "file:///relative/repo"}, - // Already file: scheme - // NOTE: This is not valid technically as it's not absolute - FixGitURLParam{"file:/var/repos/x", "file:/var/repos/x"}, - // IPV6 test case - FixGitURLParam{"user@[2001:db8:1::2]:/home/file", "ssh://user@[2001:db8:1::2]/home/file"})); - -TEST_P(FixGitURLTestSuite, parsesVariedGitUrls) -{ - auto & p = GetParam(); - const auto actual = fixGitURL(p.input).to_string(); - EXPECT_EQ(actual, p.expected); -} - -TEST_P(FixGitURLTestSuite, fixGitIsIdempotent) -{ - auto & p = GetParam(); - const auto actual = fixGitURL(p.expected).to_string(); - EXPECT_EQ(actual, p.expected); -} - -TEST_P(FixGitURLTestSuite, fixGitOutputParses) -{ - auto & p = GetParam(); - const auto parsed = fixGitURL(p.expected); - EXPECT_EQ(parseURL(parsed.to_string()), parsed); -} - TEST(parseURL, parsesSimpleHttpUrl) { auto s = "http://www.example.org/file.tar.gz"; diff --git a/src/libutil/include/nix/util/url.hh b/src/libutil/include/nix/util/url.hh index 55844ab95..f2bd79b08 100644 --- a/src/libutil/include/nix/util/url.hh +++ b/src/libutil/include/nix/util/url.hh @@ -327,23 +327,10 @@ struct ParsedUrlScheme ParsedUrlScheme parseUrlScheme(std::string_view scheme); -/** - * Normalize a Git remote string from various styles into a URL-like form. - * Input forms handled: - * 1) SCP-style SSH syntax: "[user@]host:path" -> "ssh://user@host/path" - * 2) Already "file:" URLs: "file:/abs/or/rel" -> unchanged - * 3) Bare paths / filenames: "src/repo" or "/abs" -> "file:src/repo" or "file:/abs" - * 4) Anything with "://": treated as a proper URL -> unchanged - * - * Note: for the scp-style, as they are converted to ssh-form, all paths are assumed to - * then be absolute whereas in programs like git, they retain the scp form which allows - * relative paths. - * - * Additionally, if no url can be determined, it is returned as a file:// URI. - * If the url does not start with a leading slash, one will be added since there are no - * relative path URIs. - */ -ParsedURL fixGitURL(std::string_view url); +/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes + them by removing the `:` and assuming a scheme of `ssh://`. Also + changes absolute paths into file:// URLs. */ +ParsedURL fixGitURL(const std::string & url); /** * Whether a string is valid as RFC 3986 scheme name. diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 7304a2150..1c7fd3f0f 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -408,77 +408,21 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) }; } -struct ScpLike +ParsedURL fixGitURL(const std::string & url) { - ParsedURL::Authority authority; - std::string_view path; -}; - -/** - * Parse a scp url. This is a helper struct for fixGitURL. - * This is needed since we support scp-style urls for git urls. - * https://git-scm.com/book/ms/v2/Git-on-the-Server-The-Protocols - * - * A good reference is libgit2 also allows scp style - * https://github.com/libgit2/libgit2/blob/58d9363f02f1fa39e46d49b604f27008e75b72f2/src/util/net.c#L806 - */ -static std::optional parseScp(const std::string_view s) noexcept -{ - if (s.empty() || s.front() == '/') - return std::nullopt; - - // Find the colon that separates host from path. - // Find the right-most since ipv6 has colons - const auto colon = s.rfind(':'); - if (colon == std::string_view::npos) - return std::nullopt; - - // Split head:[path] - const auto head = s.substr(0, colon); - const auto path = s.substr(colon + 1); - - if (head.empty()) - return std::nullopt; - - return ScpLike{ - .authority = ParsedURL::Authority::parse(head), - .path = path, - }; -} - -ParsedURL fixGitURL(const std::string_view url) -{ - try { - if (auto parsed = parseURL(url); parsed.scheme == "file" || parsed.authority) - return parsed; - } catch (BadURL &) { - } - - // if the url does not start with forward slash, add one - auto splitMakeAbs = [&](std::string_view pathS) { - std::vector path; - - if (!hasPrefix(pathS, "/")) { - path.emplace_back(""); - } - splitStringInto(path, pathS, "/"); - - return path; - }; - - if (auto scp = parseScp(url)) { + std::regex scpRegex("([^/]*)@(.*):(.*)"); + if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex)) + return parseURL(std::regex_replace(url, scpRegex, "ssh://$1@$2/$3")); + if (hasPrefix(url, "file:")) + return parseURL(url); + if (url.find("://") == std::string::npos) { return ParsedURL{ - .scheme = "ssh", - .authority = std::move(scp->authority), - .path = splitMakeAbs(scp->path), + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = splitString>(url, "/"), }; } - - return ParsedURL{ - .scheme = "file", - .authority = ParsedURL::Authority{}, - .path = splitMakeAbs(url), - }; + return parseURL(url); } // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 From 2b310aee1310a0eb43dffb1095ae6c53f0649a7f Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Sun, 24 Aug 2025 20:19:53 -0700 Subject: [PATCH 302/382] A few more URL tests Adapted from commit 04ad66af5f0fbec60783d8913292125f43954dcd --- src/libutil-tests/url.cc | 148 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index 56b879846..d545c747b 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -12,6 +12,154 @@ namespace nix { using Authority = ParsedURL::Authority; using HostType = Authority::HostType; +struct FixGitURLParam +{ + std::string input; + std::string expected; + ParsedURL parsed; +}; + +std::ostream & operator<<(std::ostream & os, const FixGitURLParam & param) +{ + return os << "Input: \"" << param.input << "\", Expected: \"" << param.expected << "\""; +} + +class FixGitURLTestSuite : public ::testing::TestWithParam +{}; + +INSTANTIATE_TEST_SUITE_P( + FixGitURLs, + FixGitURLTestSuite, + ::testing::Values( + // https://github.com/NixOS/nix/issues/5958 + // Already proper URL with git+ssh + FixGitURLParam{ + .input = "git+ssh://user@domain:1234/path", + .expected = "git+ssh://user@domain:1234/path", + .parsed = + ParsedURL{ + .scheme = "git+ssh", + .authority = + ParsedURL::Authority{ + .host = "domain", + .user = "user", + .port = 1234, + }, + .path = {"", "path"}, + }, + }, + // SCP-like URL (rewritten to ssh://) + FixGitURLParam{ + .input = "git@github.com:owner/repo.git", + .expected = "ssh://git@github.com/owner/repo.git", + .parsed = + ParsedURL{ + .scheme = "ssh", + .authority = + ParsedURL::Authority{ + .host = "github.com", + .user = "git", + }, + .path = {"", "owner", "repo.git"}, + }, + }, + // Absolute path (becomes file:) + FixGitURLParam{ + .input = "/home/me/repo", + .expected = "file:///home/me/repo", + .parsed = + ParsedURL{ + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = {"", "home", "me", "repo"}, + }, + }, + // Already file: scheme + // NOTE: Git/SCP treat this as a `:`, so we are + // failing to "fix up" this case. + FixGitURLParam{ + .input = "file:/var/repos/x", + .expected = "file:/var/repos/x", + .parsed = + ParsedURL{ + .scheme = "file", + .authority = std::nullopt, + .path = {"", "var", "repos", "x"}, + }, + }, + // IPV6 test case + FixGitURLParam{ + .input = "user@[2001:db8:1::2]:/home/file", + .expected = "ssh://user@[2001:db8:1::2]//home/file", + .parsed = + ParsedURL{ + .scheme = "ssh", + .authority = + ParsedURL::Authority{ + .hostType = HostType::IPv6, + .host = "2001:db8:1::2", + .user = "user", + }, + .path = {"", "", "home", "file"}, + }, + })); + +TEST_P(FixGitURLTestSuite, parsesVariedGitUrls) +{ + auto & p = GetParam(); + const auto actual = fixGitURL(p.input); + EXPECT_EQ(actual, p.parsed); + EXPECT_EQ(actual.to_string(), p.expected); +} + +TEST(FixGitURLTestSuite, scpLikeNoUserParsesPoorly) +{ + // SCP-like URL (no user) + + // Cannot "to_string" this because has illegal path not starting + // with `/`. + EXPECT_EQ( + fixGitURL("github.com:owner/repo.git"), + (ParsedURL{ + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = {"github.com:owner", "repo.git"}, + })); +} + +TEST(FixGitURLTestSuite, scpLikePathLeadingSlashParsesPoorly) +{ + // SCP-like URL (no user) + + // Cannot "to_string" this because has illegal path not starting + // with `/`. + EXPECT_EQ( + fixGitURL("github.com:/owner/repo.git"), + (ParsedURL{ + .scheme = "file", + .authority = ParsedURL::Authority{}, + .path = {"github.com:", "owner", "repo.git"}, + })); +} + +TEST(FixGitURLTestSuite, relativePathParsesPoorly) +{ + // Relative path (becomes file:// absolute) + + // Cannot "to_string" this because has illegal path not starting + // with `/`. + EXPECT_EQ( + fixGitURL("relative/repo"), + (ParsedURL{ + .scheme = "file", + .authority = + ParsedURL::Authority{ + .hostType = ParsedURL::Authority::HostType::Name, + .host = "", + }, + .path = {"relative", "repo"}})); +} + TEST(parseURL, parsesSimpleHttpUrl) { auto s = "http://www.example.org/file.tar.gz"; From d62cfc1c9764eb63e4fcc4c9330c78511afa276c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 19 Sep 2024 20:33:35 +0200 Subject: [PATCH 303/382] Re-introduce mkStringNoCopy (revised) In b70d22b `mkStringNoCopy()` was renamed to `mkString()`, but this is a bit risky since in code like vStringRegular.mkString("regular"); we want to be sure that the right overload is picked. (This is especially problematic since the overload that takes an `std::string_view` *does* allocate.) So let's be explicit. (Rebased from https://github.com/NixOS/nix/pull/11551) --- src/libexpr-tests/json.cc | 4 ++-- src/libexpr-tests/value/print.cc | 20 ++++++++++---------- src/libexpr/eval.cc | 14 +++++++------- src/libexpr/include/nix/expr/nixexpr.hh | 2 +- src/libexpr/include/nix/expr/symbol-table.hh | 4 ++-- src/libexpr/include/nix/expr/value.hh | 2 +- src/libexpr/primops.cc | 2 +- src/libexpr/primops/fromTOML.cc | 2 +- src/nix/nix-env/user-env.cc | 2 +- 9 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/libexpr-tests/json.cc b/src/libexpr-tests/json.cc index c090ac5d7..8b1bd7d96 100644 --- a/src/libexpr-tests/json.cc +++ b/src/libexpr-tests/json.cc @@ -54,7 +54,7 @@ TEST_F(JSONValueTest, IntNegative) TEST_F(JSONValueTest, String) { Value v; - v.mkString("test"); + v.mkStringNoCopy("test"); ASSERT_EQ(getJSONValue(v), "\"test\""); } @@ -62,7 +62,7 @@ TEST_F(JSONValueTest, StringQuotes) { Value v; - v.mkString("test\""); + v.mkStringNoCopy("test\""); ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); } diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index b32cba667..739d4e40b 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -35,14 +35,14 @@ TEST_F(ValuePrintingTests, tBool) TEST_F(ValuePrintingTests, tString) { Value vString; - vString.mkString("some-string"); + vString.mkStringNoCopy("some-string"); test(vString, "\"some-string\""); } TEST_F(ValuePrintingTests, tPath) { Value vPath; - vPath.mkString("/foo"); + vPath.mkStringNoCopy("/foo"); test(vPath, "\"/foo\""); } @@ -290,10 +290,10 @@ TEST_F(StringPrintingTests, maxLengthTruncation) TEST_F(ValuePrintingTests, attrsTypeFirst) { Value vType; - vType.mkString("puppy"); + vType.mkStringNoCopy("puppy"); Value vApple; - vApple.mkString("apple"); + vApple.mkStringNoCopy("apple"); BindingsBuilder builder(state, state.allocBindings(10)); builder.insert(state.symbols.create("type"), &vType); @@ -334,7 +334,7 @@ TEST_F(ValuePrintingTests, ansiColorsBool) TEST_F(ValuePrintingTests, ansiColorsString) { Value v; - v.mkString("puppy"); + v.mkStringNoCopy("puppy"); test(v, ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } @@ -342,7 +342,7 @@ TEST_F(ValuePrintingTests, ansiColorsString) TEST_F(ValuePrintingTests, ansiColorsStringElided) { Value v; - v.mkString("puppy"); + v.mkStringNoCopy("puppy"); test( v, @@ -390,7 +390,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs) TEST_F(ValuePrintingTests, ansiColorsDerivation) { Value vDerivation; - vDerivation.mkString("derivation"); + vDerivation.mkStringNoCopy("derivation"); BindingsBuilder builder(state, state.allocBindings(10)); builder.insert(state.s.type, &vDerivation); @@ -413,7 +413,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkString("uh oh!"); + message.mkStringNoCopy("uh oh!"); Value vError; vError.mkApp(&throw_, &message); @@ -430,12 +430,12 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkString("uh oh!"); + message.mkStringNoCopy("uh oh!"); Value vError; vError.mkApp(&throw_, &message); Value vDerivation; - vDerivation.mkString("derivation"); + vDerivation.mkStringNoCopy("derivation"); BindingsBuilder builder(state, state.allocBindings(10)); builder.insert(state.s.type, &vDerivation); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 8c5646403..fd2108537 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -292,10 +292,10 @@ EvalState::EvalState( vNull.mkNull(); vTrue.mkBool(true); vFalse.mkBool(false); - vStringRegular.mkString("regular"); - vStringDirectory.mkString("directory"); - vStringSymlink.mkString("symlink"); - vStringUnknown.mkString("unknown"); + vStringRegular.mkStringNoCopy("regular"); + vStringDirectory.mkStringNoCopy("directory"); + vStringSymlink.mkStringNoCopy("symlink"); + vStringUnknown.mkStringNoCopy("unknown"); /* Construct the Nix expression search path. */ assert(lookupPath.elements.empty()); @@ -824,7 +824,7 @@ DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t) void Value::mkString(std::string_view s) { - mkString(makeImmutableString(s)); + mkStringNoCopy(makeImmutableString(s)); } static const char ** encodeContext(const NixStringContext & context) @@ -843,12 +843,12 @@ static const char ** encodeContext(const NixStringContext & context) void Value::mkString(std::string_view s, const NixStringContext & context) { - mkString(makeImmutableString(s), encodeContext(context)); + mkStringNoCopy(makeImmutableString(s), encodeContext(context)); } void Value::mkStringMove(const char * s, const NixStringContext & context) { - mkString(s, encodeContext(context)); + mkStringNoCopy(s, encodeContext(context)); } void Value::mkPath(const SourcePath & path) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 3c3c5e6f9..414eb5116 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -158,7 +158,7 @@ struct ExprString : Expr ExprString(std::string && s) : s(std::move(s)) { - v.mkString(this->s.data()); + v.mkStringNoCopy(this->s.data()); }; Value * maybeThunk(EvalState & state, Env & env) override; diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index ff98077ca..cb31923bf 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -113,12 +113,12 @@ public: // for multi-threaded implementations: lock store and allocator here const auto & [v, idx] = key.store.add(SymbolValue{}); if (size == 0) { - v.mkString("", nullptr); + v.mkStringNoCopy("", nullptr); } else { auto s = key.alloc.allocate(size + 1); memcpy(s, key.s.data(), size); s[size] = '\0'; - v.mkString(s, nullptr); + v.mkStringNoCopy(s, nullptr); } v.size_ = size; v.idx = idx; diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index a2833679b..9d0cf1e54 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -960,7 +960,7 @@ public: setStorage(b); } - inline void mkString(const char * s, const char ** context = 0) noexcept + void mkStringNoCopy(const char * s, const char ** context = 0) noexcept { setStorage(StringWithContext{.c_str = s, .context = context}); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 264f3d155..515fc0626 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4349,7 +4349,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, V if (len == 0) { state.forceValue(*args[2], pos); if (args[2]->type() == nString) { - v.mkString("", args[2]->context()); + v.mkStringNoCopy("", args[2]->context()); return; } } diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 7d98a5de9..3ab594905 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -136,7 +136,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va normalizeDatetimeFormat(t); #endif auto attrs = state.buildBindings(2); - attrs.alloc("_type").mkString("timestamp"); + attrs.alloc("_type").mkStringNoCopy("timestamp"); std::ostringstream s; s << t; auto str = toView(s); diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 766c6d42a..4ed93135d 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -56,7 +56,7 @@ bool createUserEnv( auto attrs = state.buildBindings(7 + outputs.size()); - attrs.alloc(state.s.type).mkString("derivation"); + attrs.alloc(state.s.type).mkStringNoCopy("derivation"); attrs.alloc(state.s.name).mkString(i.queryName()); auto system = i.querySystem(); if (!system.empty()) From 34181afc6aa6efb9e0e6a1c6fa49e172f5742681 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 2 Sep 2025 00:09:33 +0300 Subject: [PATCH 304/382] libexpr: Use mkStringNoCopy in prim_typeOf This would lead to an unnecessary allocation. Not a significant issue by any means, but it doesn't have to allocate for most cases. --- src/libexpr/primops.cc | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 515fc0626..4efd7ea86 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -483,42 +483,40 @@ void prim_exec(EvalState & state, const PosIdx pos, Value ** args, Value & v) static void prim_typeOf(EvalState & state, const PosIdx pos, Value ** args, Value & v) { state.forceValue(*args[0], pos); - std::string t; switch (args[0]->type()) { case nInt: - t = "int"; + v.mkStringNoCopy("int"); break; case nBool: - t = "bool"; + v.mkStringNoCopy("bool"); break; case nString: - t = "string"; + v.mkStringNoCopy("string"); break; case nPath: - t = "path"; + v.mkStringNoCopy("path"); break; case nNull: - t = "null"; + v.mkStringNoCopy("null"); break; case nAttrs: - t = "set"; + v.mkStringNoCopy("set"); break; case nList: - t = "list"; + v.mkStringNoCopy("list"); break; case nFunction: - t = "lambda"; + v.mkStringNoCopy("lambda"); break; case nExternal: - t = args[0]->external()->typeOf(); + v.mkString(args[0]->external()->typeOf()); break; case nFloat: - t = "float"; + v.mkStringNoCopy("float"); break; case nThunk: unreachable(); } - v.mkString(t); } static RegisterPrimOp primop_typeOf({ From 7195250fc41ee21d8bac2615a613bd4289976fad Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 1 Sep 2025 16:55:12 -0400 Subject: [PATCH 305/382] Add another `fixGitURL` test Also improve a similar `parseURL` test. --- src/libutil-tests/url.cc | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index d545c747b..ac123fc17 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -127,6 +127,15 @@ TEST(FixGitURLTestSuite, scpLikeNoUserParsesPoorly) })); } +TEST(FixGitURLTestSuite, properlyRejectFileURLWithAuthority) +{ + /* From the underlying `parseURL` validations. */ + EXPECT_THAT( + []() { fixGitURL("file://var/repos/x"); }, + ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("file:// URL 'file://var/repos/x' has unexpected authority 'var'"))); +} + TEST(FixGitURLTestSuite, scpLikePathLeadingSlashParsesPoorly) { // SCP-like URL (no user) @@ -246,8 +255,10 @@ TEST(parseURL, parsesFilePlusHttpsUrl) TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) { - auto s = "file://www.example.org/video.mp4"; - ASSERT_THROW(parseURL(s), Error); + EXPECT_THAT( + []() { parseURL("file://www.example.org/video.mp4"); }, + ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("has unexpected authority 'www.example.org'"))); } TEST(parseURL, parseIPv4Address) From 7f91e91876b4cc84ab7d8a0fd6744d2f05432b61 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 1 Sep 2025 18:26:21 -0400 Subject: [PATCH 306/382] More URL testing More parameterized tests, we can have more coverage. --- src/libutil-tests/url.cc | 222 +++++++++++++++++++-------------------- 1 file changed, 109 insertions(+), 113 deletions(-) diff --git a/src/libutil-tests/url.cc b/src/libutil-tests/url.cc index ac123fc17..5c7b02248 100644 --- a/src/libutil-tests/url.cc +++ b/src/libutil-tests/url.cc @@ -169,55 +169,124 @@ TEST(FixGitURLTestSuite, relativePathParsesPoorly) .path = {"relative", "repo"}})); } -TEST(parseURL, parsesSimpleHttpUrl) +struct ParseURLSuccessCase { - auto s = "http://www.example.org/file.tar.gz"; - auto parsed = parseURL(s); + std::string_view input; + ParsedURL expected; +}; - ParsedURL expected{ - .scheme = "http", - .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = {"", "file.tar.gz"}, - .query = (StringMap) {}, - .fragment = "", - }; +class ParseURLSuccess : public ::testing::TestWithParam +{}; - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); +INSTANTIATE_TEST_SUITE_P( + ParseURLSuccessCases, + ParseURLSuccess, + ::testing::Values( + ParseURLSuccessCase{ + .input = "http://www.example.org/file.tar.gz", + .expected = + ParsedURL{ + .scheme = "http", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = {"", "file.tar.gz"}, + .query = (StringMap) {}, + .fragment = "", + }, + }, + ParseURLSuccessCase{ + .input = "https://www.example.org/file.tar.gz", + .expected = + ParsedURL{ + .scheme = "https", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = {"", "file.tar.gz"}, + .query = (StringMap) {}, + .fragment = "", + }, + }, + ParseURLSuccessCase{ + .input = "https://www.example.org/file.tar.gz?download=fast&when=now#hello", + .expected = + ParsedURL{ + .scheme = "https", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = {"", "file.tar.gz"}, + .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, + .fragment = "hello", + }, + }, + ParseURLSuccessCase{ + .input = "file+https://www.example.org/video.mp4", + .expected = + ParsedURL{ + .scheme = "file+https", + .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, + .path = {"", "video.mp4"}, + .query = (StringMap) {}, + .fragment = "", + }, + }, + ParseURLSuccessCase{ + .input = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello", + .expected = + ParsedURL{ + .scheme = "http", + .authority = Authority{.hostType = HostType::IPv4, .host = "127.0.0.1", .port = 8080}, + .path = {"", "file.tar.gz"}, + .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, + .fragment = "hello", + }, + }, + ParseURLSuccessCase{ + .input = "http://[fe80::818c:da4d:8975:415c\%25enp0s25]:8080", + .expected = + ParsedURL{ + .scheme = "http", + .authority = + Authority{ + .hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, + .path = {""}, + .query = (StringMap) {}, + .fragment = "", + }, + + }, + ParseURLSuccessCase{ + .input = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080", + .expected = + ParsedURL{ + .scheme = "http", + .authority = + Authority{ + .hostType = HostType::IPv6, + .host = "2a02:8071:8192:c100:311d:192d:81ac:11ea", + .port = 8080, + }, + .path = {""}, + .query = (StringMap) {}, + .fragment = "", + }, + })); + +TEST_P(ParseURLSuccess, parsesAsExpected) +{ + auto & p = GetParam(); + const auto parsed = parseURL(p.input); + EXPECT_EQ(parsed, p.expected); } -TEST(parseURL, parsesSimpleHttpsUrl) +TEST_P(ParseURLSuccess, toStringRoundTrips) { - auto s = "https://www.example.org/file.tar.gz"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "https", - .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = {"", "file.tar.gz"}, - .query = (StringMap) {}, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); + auto & p = GetParam(); + const auto parsed = parseURL(p.input); + EXPECT_EQ(p.input, parsed.to_string()); } -TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) +TEST_P(ParseURLSuccess, makeSureFixGitURLDoesNotModify) { - auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "https", - .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = {"", "file.tar.gz"}, - .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, - .fragment = "hello", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); + auto & p = GetParam(); + const auto parsed = fixGitURL(std::string{p.input}); + EXPECT_EQ(p.input, parsed.to_string()); } TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) @@ -236,23 +305,6 @@ TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) ASSERT_EQ(parsed, expected); } -TEST(parseURL, parsesFilePlusHttpsUrl) -{ - auto s = "file+https://www.example.org/video.mp4"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "file+https", - .authority = Authority{.hostType = HostType::Name, .host = "www.example.org"}, - .path = {"", "video.mp4"}, - .query = (StringMap) {}, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); -} - TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) { EXPECT_THAT( @@ -261,62 +313,6 @@ TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) testing::HasSubstrIgnoreANSIMatcher("has unexpected authority 'www.example.org'"))); } -TEST(parseURL, parseIPv4Address) -{ - auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "http", - .authority = Authority{.hostType = HostType::IPv4, .host = "127.0.0.1", .port = 8080}, - .path = {"", "file.tar.gz"}, - .query = (StringMap) {{"download", "fast"}, {"when", "now"}}, - .fragment = "hello", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); -} - -TEST(parseURL, parseScopedRFC6874IPv6Address) -{ - auto s = "http://[fe80::818c:da4d:8975:415c\%25enp0s25]:8080"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "http", - .authority = Authority{.hostType = HostType::IPv6, .host = "fe80::818c:da4d:8975:415c\%enp0s25", .port = 8080}, - .path = {""}, - .query = (StringMap) {}, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); -} - -TEST(parseURL, parseIPv6Address) -{ - auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080"; - auto parsed = parseURL(s); - - ParsedURL expected{ - .scheme = "http", - .authority = - Authority{ - .hostType = HostType::IPv6, - .host = "2a02:8071:8192:c100:311d:192d:81ac:11ea", - .port = 8080, - }, - .path = {""}, - .query = (StringMap) {}, - .fragment = "", - }; - - ASSERT_EQ(parsed, expected); - ASSERT_EQ(s, parsed.to_string()); -} - TEST(parseURL, parseEmptyQueryParams) { auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&"; From 6bdb5e8e099057822a767cae1f8c2c93152dae3c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Sep 2025 10:40:06 -0400 Subject: [PATCH 307/382] Fix downstream MinGW build by not looking for Boost Regex --- src/libexpr/meson.build | 5 ++++- src/libstore/meson.build | 6 +++++- src/libutil/meson.build | 7 ++++++- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 0331d3c61..00fb82e3c 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -40,7 +40,10 @@ endforeach boost = dependency( 'boost', - modules : [ 'container', 'context' ], + modules : [ + 'container', + 'context', + ], include_type : 'system', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 2b0106ff3..253152772 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -101,7 +101,11 @@ subdir('nix-meson-build-support/libatomic') boost = dependency( 'boost', - modules : [ 'container', 'regex' ], + modules : [ + 'container', + # Shouldn't list, because can header-only, and Meson currently looks for libs + #'regex', + ], include_type : 'system', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we diff --git a/src/libutil/meson.build b/src/libutil/meson.build index c294f895a..cdffc892a 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -57,7 +57,12 @@ deps_private += blake3 boost = dependency( 'boost', - modules : [ 'context', 'coroutine', 'iostreams', 'url' ], + modules : [ + 'context', + 'coroutine', + 'iostreams', + 'url', + ], include_type : 'system', version : '>=1.82.0', ) From cbcb434cb3eb9b647b7f0e8c22dbb526f5599849 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sun, 31 Aug 2025 13:55:11 +0200 Subject: [PATCH 308/382] libexpr: Convert Symbol comparisons to switch statements Now that Symbols are statically allocated at compile time with known IDs, we can use switch statements instead of if-else chains for Symbol comparisons. This provides better performance through compiler optimizations like jump tables. Changes: - Add public getId() method to Symbol class to access the internal ID - Convert if-else chains comparing Symbol values to switch statements in primops.cc's derivationStrictInternal function - Simplify control flow by removing the 'handled' flag and moving the default attribute handling into the switch's default case The static and runtime Symbol IDs are guaranteed to match by the copyIntoSymbolTable implementation which asserts this invariant. Co-authored-by: John Ericson --- src/libexpr/include/nix/expr/symbol-table.hh | 9 ++ src/libexpr/primops.cc | 99 +++++++++++++------- 2 files changed, 75 insertions(+), 33 deletions(-) diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index ff98077ca..9a9cbae61 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -61,6 +61,15 @@ public: return id > 0; } + /** + * The ID is a private implementation detail that should generally not be observed. However, we expose here just for + * sake of `switch...case`, which needs to dispatch on numbers. */ + [[gnu::always_inline]] + constexpr uint32_t getId() const noexcept + { + return id; + } + constexpr auto operator<=>(const Symbol & other) const noexcept = default; friend class std::hash; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 264f3d155..c6cdf09a1 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1454,19 +1454,22 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName continue; } - if (i->name == state.s.contentAddressed && state.forceBool(*i->value, pos, context_below)) { - contentAddressed = true; - experimentalFeatureSettings.require(Xp::CaDerivations); - } - - else if (i->name == state.s.impure && state.forceBool(*i->value, pos, context_below)) { - isImpure = true; - experimentalFeatureSettings.require(Xp::ImpureDerivations); - } - + switch (i->name.getId()) { + case EvalState::s.contentAddressed.getId(): + if (state.forceBool(*i->value, pos, context_below)) { + contentAddressed = true; + experimentalFeatureSettings.require(Xp::CaDerivations); + } + break; + case EvalState::s.impure.getId(): + if (state.forceBool(*i->value, pos, context_below)) { + isImpure = true; + experimentalFeatureSettings.require(Xp::ImpureDerivations); + } + break; /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ - else if (i->name == state.s.args) { + case EvalState::s.args.getId(): state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listView()) { auto s = state @@ -1475,11 +1478,10 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName .toOwned(); drv.args.push_back(s); } - } - + break; /* All other attributes are passed to the builder through the environment. */ - else { + default: if (jsonObject) { @@ -1488,49 +1490,69 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName jsonObject->structuredAttrs.emplace(key, printValueAsJSON(state, true, *i->value, pos, context)); - if (i->name == state.s.builder) + switch (i->name.getId()) { + case EvalState::s.builder.getId(): drv.builder = state.forceString(*i->value, context, pos, context_below); - else if (i->name == state.s.system) + break; + case EvalState::s.system.getId(): drv.platform = state.forceStringNoCtx(*i->value, pos, context_below); - else if (i->name == state.s.outputHash) + break; + case EvalState::s.outputHash.getId(): outputHash = state.forceStringNoCtx(*i->value, pos, context_below); - else if (i->name == state.s.outputHashAlgo) + break; + case EvalState::s.outputHashAlgo.getId(): outputHashAlgo = parseHashAlgoOpt(state.forceStringNoCtx(*i->value, pos, context_below)); - else if (i->name == state.s.outputHashMode) + break; + case EvalState::s.outputHashMode.getId(): handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below)); - else if (i->name == state.s.outputs) { - /* Require ‘outputs’ to be a list of strings. */ + break; + case EvalState::s.outputs.getId(): { + /* Require 'outputs' to be a list of strings. */ state.forceList(*i->value, pos, context_below); Strings ss; for (auto elem : i->value->listView()) ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below)); handleOutputs(ss); + break; + } + default: + break; } - if (i->name == state.s.allowedReferences) + switch (i->name.getId()) { + case EvalState::s.allowedReferences.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedReferences'; use 'outputChecks..allowedReferences' instead", drvName); - if (i->name == state.s.allowedRequisites) + break; + case EvalState::s.allowedRequisites.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'allowedRequisites'; use 'outputChecks..allowedRequisites' instead", drvName); - if (i->name == state.s.disallowedReferences) + break; + case EvalState::s.disallowedReferences.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedReferences'; use 'outputChecks..disallowedReferences' instead", drvName); - if (i->name == state.s.disallowedRequisites) + break; + case EvalState::s.disallowedRequisites.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'disallowedRequisites'; use 'outputChecks..disallowedRequisites' instead", drvName); - if (i->name == state.s.maxSize) + break; + case EvalState::s.maxSize.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxSize'; use 'outputChecks..maxSize' instead", drvName); - if (i->name == state.s.maxClosureSize) + break; + case EvalState::s.maxClosureSize.getId(): warn( "In a derivation named '%s', 'structuredAttrs' disables the effect of the derivation attribute 'maxClosureSize'; use 'outputChecks..maxClosureSize' instead", drvName); + break; + default: + break; + } } else { auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); @@ -1541,20 +1563,31 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName drv.structuredAttrs = StructuredAttrs::parse(s); } else { drv.env.emplace(key, s); - if (i->name == state.s.builder) + switch (i->name.getId()) { + case EvalState::s.builder.getId(): drv.builder = std::move(s); - else if (i->name == state.s.system) + break; + case EvalState::s.system.getId(): drv.platform = std::move(s); - else if (i->name == state.s.outputHash) + break; + case EvalState::s.outputHash.getId(): outputHash = std::move(s); - else if (i->name == state.s.outputHashAlgo) + break; + case EvalState::s.outputHashAlgo.getId(): outputHashAlgo = parseHashAlgoOpt(s); - else if (i->name == state.s.outputHashMode) + break; + case EvalState::s.outputHashMode.getId(): handleHashMode(s); - else if (i->name == state.s.outputs) + break; + case EvalState::s.outputs.getId(): handleOutputs(tokenizeString(s)); + break; + default: + break; + } } } + break; } } catch (Error & e) { From 1286d5db78701a5c0a83ae6b5f838b9ac60a61c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 3 Sep 2025 11:07:16 +0200 Subject: [PATCH 309/382] Fix macOS HUP detection using kqueue instead of poll On macOS, poll() is fundamentally broken for HUP detection. It loses event subscriptions when EVFILT_READ fires without matching the requested events in the pollfd. This causes daemon processes to linger after client disconnect. This commit replaces poll() with kqueue on macOS, which is what poll() uses internally but without the bugs. The kqueue implementation uses EVFILT_READ which works for both sockets and pipes, avoiding EVFILT_SOCK which only works for sockets. On Linux and other platforms, we continue using poll() with the standard POSIX behavior where POLLHUP is always reported regardless of requested events. Based on work from the Lix project (https://git.lix.systems/lix-project/lix) commit 69ba3c92db3ecca468bcd5ff7849fa8e8e0fc6c0 Fixes: https://github.com/NixOS/nix/issues/13847 Related: https://git.lix.systems/lix-project/lix/issues/729 Apple bugs: rdar://37537852 (poll), FB17447257 (poll) Co-authored-by: Jade Lovelace --- .../unix/include/nix/util/monitor-fd.hh | 211 +++++++++--------- 1 file changed, 108 insertions(+), 103 deletions(-) diff --git a/src/libutil/unix/include/nix/util/monitor-fd.hh b/src/libutil/unix/include/nix/util/monitor-fd.hh index 5c1e5f195..b87bf5ca4 100644 --- a/src/libutil/unix/include/nix/util/monitor-fd.hh +++ b/src/libutil/unix/include/nix/util/monitor-fd.hh @@ -2,15 +2,18 @@ ///@file #include -#include +#include -#include #include -#include -#include -#include +#include + +#ifdef __APPLE__ +# include +# include +#endif #include "nix/util/signals.hh" +#include "nix/util/file-descriptor.hh" namespace nix { @@ -20,111 +23,113 @@ private: std::thread thread; Pipe notifyPipe; + void runThread(int watchFd, int notifyFd); + public: - MonitorFdHup(int fd) - { - notifyPipe.create(); - thread = std::thread([this, fd]() { - while (true) { - // There is a POSIX violation on macOS: you have to listen for - // at least POLLHUP to receive HUP events for a FD. POSIX says - // this is not so, and you should just receive them regardless. - // However, as of our testing on macOS 14.5, the events do not - // get delivered if in the all-bits-unset case, but do get - // delivered if `POLLHUP` is set. - // - // This bug filed as rdar://37537852 - // (https://openradar.appspot.com/37537852). - // - // macOS's own man page - // (https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/poll.2.html) - // additionally says that `POLLHUP` is ignored as an input. It - // seems the likely order of events here was - // - // 1. macOS did not follow the POSIX spec - // - // 2. Somebody ninja-fixed this other spec violation to make - // sure `POLLHUP` was not forgotten about, even though they - // "fixed" this issue in a spec-non-compliant way. Whatever, - // we'll use the fix. - // - // Relevant code, current version, which shows the : - // https://github.com/apple-oss-distributions/xnu/blob/94d3b452840153a99b38a3a9659680b2a006908e/bsd/kern/sys_generic.c#L1751-L1758 - // - // The `POLLHUP` detection was added in - // https://github.com/apple-oss-distributions/xnu/commit/e13b1fa57645afc8a7b2e7d868fe9845c6b08c40#diff-a5aa0b0e7f4d866ca417f60702689fc797e9cdfe33b601b05ccf43086c35d395R1468 - // That means added in 2007 or earlier. Should be good enough - // for us. - short hangup_events = -#ifdef __APPLE__ - POLLHUP -#else - 0 -#endif - ; - - /* Wait indefinitely until a POLLHUP occurs. */ - constexpr size_t num_fds = 2; - struct pollfd fds[num_fds] = { - { - .fd = fd, - .events = hangup_events, - }, - { - .fd = notifyPipe.readSide.get(), - .events = hangup_events, - }, - }; - - auto count = poll(fds, num_fds, -1); - if (count == -1) { - if (errno == EINTR || errno == EAGAIN) - continue; - throw SysError("failed to poll() in MonitorFdHup"); - } - /* This shouldn't happen, but can on macOS due to a bug. - See rdar://37550628. - - This may eventually need a delay or further - coordination with the main thread if spinning proves - too harmful. - */ - if (count == 0) - continue; - if (fds[0].revents & POLLHUP) { - unix::triggerInterrupt(); - break; - } - if (fds[1].revents & POLLHUP) { - break; - } - // On macOS, (jade thinks that) it is possible (although not - // observed on macOS 14.5) that in some limited cases on buggy - // kernel versions, all the non-POLLHUP events for the socket - // get delivered. - // - // We could sleep to avoid pointlessly spinning a thread on - // those, but this opens up a different problem, which is that - // if do sleep, it will be longer before the daemon fork for a - // client exits. Imagine a sequential shell script, running Nix - // commands, each of which talk to the daemon. If the previous - // command registered a temp root, exits, and then the next - // command issues a delete request before the temp root is - // cleaned up, that delete request might fail. - // - // Not sleeping doesn't actually fix the race condition --- we - // would need to block on the old connections' tempt roots being - // cleaned up in in the new connection --- but it does make it - // much less likely. - } - }); - }; + MonitorFdHup(int fd); ~MonitorFdHup() { + // Close the write side to signal termination via POLLHUP notifyPipe.writeSide.close(); thread.join(); } }; +#ifdef __APPLE__ +/* This custom kqueue usage exists because Apple's poll implementation is + * broken and loses event subscriptions if EVFILT_READ fires without matching + * the requested `events` in the pollfd. + * + * We use EVFILT_READ, which causes some spurious wakeups (at most one per write + * from the client, in addition to the socket lifecycle events), because the + * alternate API, EVFILT_SOCK, doesn't work on pipes, which this is also used + * to monitor in certain situations. + * + * See (EVFILT_SOCK): + * https://github.com/netty/netty/blob/64bd2f4eb62c2fb906bc443a2aabf894c8b7dce9/transport-classes-kqueue/src/main/java/io/netty/channel/kqueue/AbstractKQueueChannel.java#L434 + * + * See: https://git.lix.systems/lix-project/lix/issues/729 + * Apple bug in poll(2): FB17447257, available at https://openradar.appspot.com/FB17447257 + */ +inline void MonitorFdHup::runThread(int watchFd, int notifyFd) +{ + int kqResult = kqueue(); + if (kqResult < 0) { + throw SysError("MonitorFdHup kqueue"); + } + AutoCloseFD kq{kqResult}; + + std::array kevs; + + // kj uses EVFILT_WRITE for this, but it seems that it causes more spurious + // wakeups in our case of doing blocking IO from another thread compared to + // EVFILT_READ. + // + // EVFILT_WRITE and EVFILT_READ (for sockets at least, where I am familiar + // with the internals) both go through a common filter which catches EOFs + // and generates spurious wakeups for either readable/writable events. + EV_SET(&kevs[0], watchFd, EVFILT_READ, EV_ADD | EV_ENABLE | EV_CLEAR, 0, 0, nullptr); + EV_SET(&kevs[1], notifyFd, EVFILT_READ, EV_ADD | EV_ENABLE | EV_CLEAR, 0, 0, nullptr); + + int result = kevent(kq.get(), kevs.data(), kevs.size(), nullptr, 0, nullptr); + if (result < 0) { + throw SysError("MonitorFdHup kevent add"); + } + + while (true) { + struct kevent event; + int numEvents = kevent(kq.get(), nullptr, 0, &event, 1, nullptr); + if (numEvents < 0) { + throw SysError("MonitorFdHup kevent watch"); + } + + if (numEvents > 0 && (event.flags & EV_EOF)) { + if (event.ident == uintptr_t(watchFd)) { + unix::triggerInterrupt(); + } + // Either watched fd or notify fd closed, exit + return; + } + } +} +#else +inline void MonitorFdHup::runThread(int watchFd, int notifyFd) +{ + while (true) { + struct pollfd fds[2]; + fds[0].fd = watchFd; + fds[0].events = 0; // POSIX: POLLHUP is always reported + fds[1].fd = notifyFd; + fds[1].events = 0; + + auto count = poll(fds, 2, -1); + if (count == -1) { + if (errno == EINTR || errno == EAGAIN) { + continue; + } else { + throw SysError("in MonitorFdHup poll()"); + } + } + + if (fds[0].revents & POLLHUP) { + unix::triggerInterrupt(); + break; + } + + if (fds[1].revents & POLLHUP) { + // Notify pipe closed, exit thread + break; + } + } +} +#endif + +inline MonitorFdHup::MonitorFdHup(int fd) +{ + notifyPipe.create(); + int notifyFd = notifyPipe.readSide.get(); + thread = std::thread([this, fd, notifyFd]() { this->runThread(fd, notifyFd); }); +}; + } // namespace nix From 2fe629c5d49ef9ab7de9ea43f3b5ecd871ccb4e7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Sep 2025 15:32:56 +0200 Subject: [PATCH 310/382] Fix deadlock in SSHMaster::addCommonSSHOpts() When useMaster is true, startMaster() acquires the state lock, then calls isMasterRunning(), which calls addCommonSSHOpts(), which tries to acquire the state lock again, causing a deadlock. The solution is to move tmpDir out of the state. It doesn't need to be there in the first place because it never changes. --- src/libstore/include/nix/store/ssh.hh | 4 +++- src/libstore/ssh.cc | 9 +++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/src/libstore/include/nix/store/ssh.hh b/src/libstore/include/nix/store/ssh.hh index c7228464b..7e27a0d3e 100644 --- a/src/libstore/include/nix/store/ssh.hh +++ b/src/libstore/include/nix/store/ssh.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include "nix/util/ref.hh" #include "nix/util/sync.hh" #include "nix/util/url.hh" #include "nix/util/processes.hh" @@ -26,12 +27,13 @@ private: const bool compress; const Descriptor logFD; + ref tmpDir; + struct State { #ifndef _WIN32 // TODO re-enable on Windows, once we can start processes. Pid sshMaster; #endif - std::unique_ptr tmpDir; Path socketPath; }; diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 8a4614a0d..0f1dba1e9 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -84,23 +84,20 @@ SSHMaster::SSHMaster( , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) + , tmpDir(make_ref(createTempDir("", "nix", 0700))) { checkValidAuthority(authority); - auto state(state_.lock()); - state->tmpDir = std::make_unique(createTempDir("", "nix", 0700)); } void SSHMaster::addCommonSSHOpts(Strings & args) { - auto state(state_.lock()); - auto sshArgs = getNixSshOpts(); args.insert(args.end(), sshArgs.begin(), sshArgs.end()); if (!keyFile.empty()) args.insert(args.end(), {"-i", keyFile}); if (!sshPublicHostKey.empty()) { - std::filesystem::path fileName = state->tmpDir->path() / "host-key"; + std::filesystem::path fileName = tmpDir->path() / "host-key"; writeFile(fileName.string(), authority.host + " " + sshPublicHostKey + "\n"); args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName.string()}); } @@ -241,7 +238,7 @@ Path SSHMaster::startMaster() if (state->sshMaster != INVALID_DESCRIPTOR) return state->socketPath; - state->socketPath = (Path) *state->tmpDir + "/ssh.sock"; + state->socketPath = (Path) *tmpDir + "/ssh.sock"; Pipe out; out.create(); From c7603c61c8052b47ab7cc5be327cca3f573a5330 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Sep 2025 20:16:39 +0200 Subject: [PATCH 311/382] Mark tmpDir as const --- src/libstore/include/nix/store/ssh.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/ssh.hh b/src/libstore/include/nix/store/ssh.hh index 7e27a0d3e..574cb5cf4 100644 --- a/src/libstore/include/nix/store/ssh.hh +++ b/src/libstore/include/nix/store/ssh.hh @@ -27,7 +27,7 @@ private: const bool compress; const Descriptor logFD; - ref tmpDir; + const ref tmpDir; struct State { From 95c577988023d69f5596a8763fae445b21396abb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 13:52:17 -0400 Subject: [PATCH 312/382] `DerivationBuildingGoal::tryToBuild` pull hook waiting out of switch Do this with a new `useHook` boolean we carefully make sure is set in all cases. This change isn't really worthwhile by itself, but it allows us to make further refactors (see later commits) which are well-motivated. --- .../build/derivation-building-goal.cc | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 53343ce84..327955714 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -470,6 +470,8 @@ void DerivationBuildingGoal::started() Goal::Co DerivationBuildingGoal::tryToBuild() { + bool useHook; + trace("trying to build"); /* Obtain locks on all output paths, if the paths are known a priori. @@ -539,16 +541,15 @@ Goal::Co DerivationBuildingGoal::tryToBuild() bool buildLocally = (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) && settings.maxBuildJobs.get() != 0; - if (!buildLocally) { + if (buildLocally) { + useHook = false; + } else { switch (tryBuildHook()) { case rpAccept: /* Yes, it has started doing so. Wait until we get EOF from the hook. */ - actLock.reset(); - buildResult.startTime = time(0); // inexact - started(); - co_await Suspend{}; - co_return hookDone(); + useHook = true; + break; case rpPostpone: /* Not now; wait until at least one child finishes or the wake-up timeout expires. */ @@ -563,12 +564,20 @@ Goal::Co DerivationBuildingGoal::tryToBuild() co_return tryToBuild(); case rpDecline: /* We should do it ourselves. */ + useHook = false; break; } } actLock.reset(); + if (useHook) { + buildResult.startTime = time(0); // inexact + started(); + co_await Suspend{}; + co_return hookDone(); + } + co_await yield(); if (!dynamic_cast(&worker.store)) { From 4c44a213a330daf315c2464db95d29495945a206 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 13:59:47 -0400 Subject: [PATCH 313/382] Get rid of a `tryToBuild` tail recursive call with loop This will make it easier to convert somethings to RAII. --- .../build/derivation-building-goal.cc | 176 +++++++++--------- 1 file changed, 90 insertions(+), 86 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 327955714..77ab23b4c 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -472,101 +472,105 @@ Goal::Co DerivationBuildingGoal::tryToBuild() { bool useHook; - trace("trying to build"); + while (true) { + trace("trying to build"); - /* Obtain locks on all output paths, if the paths are known a priori. + /* Obtain locks on all output paths, if the paths are known a priori. - The locks are automatically released when we exit this function or Nix - crashes. If we can't acquire the lock, then continue; hopefully some - other goal can start a build, and if not, the main loop will sleep a few - seconds and then retry this goal. */ - PathSet lockFiles; - /* FIXME: Should lock something like the drv itself so we don't build same - CA drv concurrently */ - if (dynamic_cast(&worker.store)) { - /* If we aren't a local store, we might need to use the local store as - a build remote, but that would cause a deadlock. */ - /* FIXME: Make it so we can use ourselves as a build remote even if we - are the local store (separate locking for building vs scheduling? */ - /* FIXME: find some way to lock for scheduling for the other stores so - a forking daemon with --store still won't farm out redundant builds. - */ - for (auto & i : drv->outputsAndOptPaths(worker.store)) { - if (i.second.second) - lockFiles.insert(worker.store.Store::toRealPath(*i.second.second)); - else - lockFiles.insert(worker.store.Store::toRealPath(drvPath) + "." + i.first); + The locks are automatically released when we exit this function or Nix + crashes. If we can't acquire the lock, then continue; hopefully some + other goal can start a build, and if not, the main loop will sleep a few + seconds and then retry this goal. */ + PathSet lockFiles; + /* FIXME: Should lock something like the drv itself so we don't build same + CA drv concurrently */ + if (dynamic_cast(&worker.store)) { + /* If we aren't a local store, we might need to use the local store as + a build remote, but that would cause a deadlock. */ + /* FIXME: Make it so we can use ourselves as a build remote even if we + are the local store (separate locking for building vs scheduling? */ + /* FIXME: find some way to lock for scheduling for the other stores so + a forking daemon with --store still won't farm out redundant builds. + */ + for (auto & i : drv->outputsAndOptPaths(worker.store)) { + if (i.second.second) + lockFiles.insert(worker.store.Store::toRealPath(*i.second.second)); + else + lockFiles.insert(worker.store.Store::toRealPath(drvPath) + "." + i.first); + } } - } - if (!outputLocks.lockPaths(lockFiles, "", false)) { - Activity act(*logger, lvlWarn, actBuildWaiting, fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); + if (!outputLocks.lockPaths(lockFiles, "", false)) { + Activity act( + *logger, lvlWarn, actBuildWaiting, fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); - /* Wait then try locking again, repeat until success (returned - boolean is true). */ - do { - co_await waitForAWhile(); - } while (!outputLocks.lockPaths(lockFiles, "", false)); - } + /* Wait then try locking again, repeat until success (returned + boolean is true). */ + do { + co_await waitForAWhile(); + } while (!outputLocks.lockPaths(lockFiles, "", false)); + } - /* Now check again whether the outputs are valid. This is because - another process may have started building in parallel. After - it has finished and released the locks, we can (and should) - reuse its results. (Strictly speaking the first check can be - omitted, but that would be less efficient.) Note that since we - now hold the locks on the output paths, no other process can - build this derivation, so no further checks are necessary. */ - auto [allValid, validOutputs] = checkPathValidity(); + /* Now check again whether the outputs are valid. This is because + another process may have started building in parallel. After + it has finished and released the locks, we can (and should) + reuse its results. (Strictly speaking the first check can be + omitted, but that would be less efficient.) Note that since we + now hold the locks on the output paths, no other process can + build this derivation, so no further checks are necessary. */ + auto [allValid, validOutputs] = checkPathValidity(); - if (buildMode != bmCheck && allValid) { - debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); - outputLocks.setDeletion(true); - outputLocks.unlock(); - co_return doneSuccess(BuildResult::AlreadyValid, std::move(validOutputs)); - } - - /* If any of the outputs already exist but are not valid, delete - them. */ - for (auto & [_, status] : initialOutputs) { - if (!status.known || status.known->isValid()) - continue; - auto storePath = status.known->path; - debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path)); - deletePath(worker.store.Store::toRealPath(storePath)); - } - - /* Don't do a remote build if the derivation has the attribute - `preferLocalBuild' set. Also, check and repair modes are only - supported for local builds. */ - bool buildLocally = - (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) && settings.maxBuildJobs.get() != 0; - - if (buildLocally) { - useHook = false; - } else { - switch (tryBuildHook()) { - case rpAccept: - /* Yes, it has started doing so. Wait until we get - EOF from the hook. */ - useHook = true; - break; - case rpPostpone: - /* Not now; wait until at least one child finishes or - the wake-up timeout expires. */ - if (!actLock) - actLock = std::make_unique( - *logger, - lvlWarn, - actBuildWaiting, - fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); + if (buildMode != bmCheck && allValid) { + debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); + outputLocks.setDeletion(true); outputLocks.unlock(); - co_await waitForAWhile(); - co_return tryToBuild(); - case rpDecline: - /* We should do it ourselves. */ - useHook = false; - break; + co_return doneSuccess(BuildResult::AlreadyValid, std::move(validOutputs)); } + + /* If any of the outputs already exist but are not valid, delete + them. */ + for (auto & [_, status] : initialOutputs) { + if (!status.known || status.known->isValid()) + continue; + auto storePath = status.known->path; + debug("removing invalid path '%s'", worker.store.printStorePath(status.known->path)); + deletePath(worker.store.Store::toRealPath(storePath)); + } + + /* Don't do a remote build if the derivation has the attribute + `preferLocalBuild' set. Also, check and repair modes are only + supported for local builds. */ + bool buildLocally = (buildMode != bmNormal || drvOptions->willBuildLocally(worker.store, *drv)) + && settings.maxBuildJobs.get() != 0; + + if (buildLocally) { + useHook = false; + } else { + switch (tryBuildHook()) { + case rpAccept: + /* Yes, it has started doing so. Wait until we get + EOF from the hook. */ + useHook = true; + break; + case rpPostpone: + /* Not now; wait until at least one child finishes or + the wake-up timeout expires. */ + if (!actLock) + actLock = std::make_unique( + *logger, + lvlWarn, + actBuildWaiting, + fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); + outputLocks.unlock(); + co_await waitForAWhile(); + continue; + case rpDecline: + /* We should do it ourselves. */ + useHook = false; + break; + } + } + break; } actLock.reset(); From 7c1e5b3345b2e0a95b1a04b65ddcb6350be2e86a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 14:02:01 -0400 Subject: [PATCH 314/382] In `DerivationBuildingGoal` Demote `actLock` to local variable It doesn't need to be a field any more, because we just use it with two loops. --- src/libstore/build/derivation-building-goal.cc | 5 +++++ .../include/nix/store/build/derivation-building-goal.hh | 5 ----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 77ab23b4c..fd85a066d 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -470,6 +470,11 @@ void DerivationBuildingGoal::started() Goal::Co DerivationBuildingGoal::tryToBuild() { + /** + * Activity that denotes waiting for a lock. + */ + std::unique_ptr actLock; + bool useHook; while (true) { diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 162cf14ad..dd8b27dc2 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -92,11 +92,6 @@ struct DerivationBuildingGoal : public Goal std::unique_ptr act; - /** - * Activity that denotes waiting for a lock. - */ - std::unique_ptr actLock; - std::map builderActivities; /** From 51dadaded444907ecb97e19a34483f06d10d1ab5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 14:40:41 -0400 Subject: [PATCH 315/382] Move up `assert(!hook);` We don't need to keep doing this every loop iteration, hook stuff it is only set above. --- src/libstore/build/derivation-building-goal.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index fd85a066d..510304653 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -602,12 +602,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() #ifdef _WIN32 // TODO enable `DerivationBuilder` on Windows throw UnimplementedError("building derivations is not yet implemented on Windows"); #else + assert(!hook); // Will continue here while waiting for a build user below while (true) { - assert(!hook); - unsigned int curBuilds = worker.getNrLocalBuilds(); if (curBuilds >= settings.maxBuildJobs) { outputLocks.unlock(); From a63ac8d98b005937e0b65389c9a40dd953b90888 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:07:01 -0400 Subject: [PATCH 316/382] Inline `DerivationBuildingGoal::hookDone` --- .../build/derivation-building-goal.cc | 146 +++++++++--------- .../store/build/derivation-building-goal.hh | 1 - 2 files changed, 71 insertions(+), 76 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 510304653..5493845a5 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -584,7 +584,77 @@ Goal::Co DerivationBuildingGoal::tryToBuild() buildResult.startTime = time(0); // inexact started(); co_await Suspend{}; - co_return hookDone(); + +#ifndef _WIN32 + assert(hook); +#endif + + trace("hook build done"); + + /* Since we got an EOF on the logger pipe, the builder is presumed + to have terminated. In fact, the builder could also have + simply have closed its end of the pipe, so just to be sure, + kill it. */ + int status = +#ifndef _WIN32 // TODO enable build hook on Windows + hook->pid.kill(); +#else + 0; +#endif + + debug("build hook for '%s' finished", worker.store.printStorePath(drvPath)); + + buildResult.timesBuilt++; + buildResult.stopTime = time(0); + + /* So the child is gone now. */ + worker.childTerminated(this); + + /* Close the read side of the logger pipe. */ +#ifndef _WIN32 // TODO enable build hook on Windows + hook->builderOut.readSide.close(); + hook->fromHook.readSide.close(); +#endif + + /* Close the log file. */ + closeLogFile(); + + /* Check the exit status. */ + if (!statusOk(status)) { + auto e = fixupBuilderFailureErrorMessage({BuildResult::MiscFailure, status, ""}); + + outputLocks.unlock(); + + /* TODO (once again) support fine-grained error codes, see issue #12641. */ + + co_return doneFailure(std::move(e)); + } + + /* Compute the FS closure of the outputs and register them as + being valid. */ + auto builtOutputs = + /* When using a build hook, the build hook can register the output + as valid (by doing `nix-store --import'). If so we don't have + to do anything here. + + We can only early return when the outputs are known a priori. For + floating content-addressing derivations this isn't the case. + */ + assertPathValidity(); + + StorePathSet outputPaths; + for (auto & [_, output] : builtOutputs) + outputPaths.insert(output.outPath); + runPostBuildHook(worker.store, *logger, drvPath, outputPaths); + + /* It is now safe to delete the lock files, since all future + lockers will see that the output paths are valid; they will + not create new lock files with the same names as the old + (unlinked) lock files. */ + outputLocks.setDeletion(true); + outputLocks.unlock(); + + co_return doneSuccess(BuildResult::Built, std::move(builtOutputs)); } co_await yield(); @@ -885,80 +955,6 @@ BuildError DerivationBuildingGoal::fixupBuilderFailureErrorMessage(BuilderFailur return BuildError{e.status, msg}; } -Goal::Co DerivationBuildingGoal::hookDone() -{ -#ifndef _WIN32 - assert(hook); -#endif - - trace("hook build done"); - - /* Since we got an EOF on the logger pipe, the builder is presumed - to have terminated. In fact, the builder could also have - simply have closed its end of the pipe, so just to be sure, - kill it. */ - int status = -#ifndef _WIN32 // TODO enable build hook on Windows - hook->pid.kill(); -#else - 0; -#endif - - debug("build hook for '%s' finished", worker.store.printStorePath(drvPath)); - - buildResult.timesBuilt++; - buildResult.stopTime = time(0); - - /* So the child is gone now. */ - worker.childTerminated(this); - - /* Close the read side of the logger pipe. */ -#ifndef _WIN32 // TODO enable build hook on Windows - hook->builderOut.readSide.close(); - hook->fromHook.readSide.close(); -#endif - - /* Close the log file. */ - closeLogFile(); - - /* Check the exit status. */ - if (!statusOk(status)) { - auto e = fixupBuilderFailureErrorMessage({BuildResult::MiscFailure, status, ""}); - - outputLocks.unlock(); - - /* TODO (once again) support fine-grained error codes, see issue #12641. */ - - co_return doneFailure(std::move(e)); - } - - /* Compute the FS closure of the outputs and register them as - being valid. */ - auto builtOutputs = - /* When using a build hook, the build hook can register the output - as valid (by doing `nix-store --import'). If so we don't have - to do anything here. - - We can only early return when the outputs are known a priori. For - floating content-addressing derivations this isn't the case. - */ - assertPathValidity(); - - StorePathSet outputPaths; - for (auto & [_, output] : builtOutputs) - outputPaths.insert(output.outPath); - runPostBuildHook(worker.store, *logger, drvPath, outputPaths); - - /* It is now safe to delete the lock files, since all future - lockers will see that the output paths are valid; they will - not create new lock files with the same names as the old - (unlinked) lock files. */ - outputLocks.setDeletion(true); - outputLocks.unlock(); - - co_return doneSuccess(BuildResult::Built, std::move(builtOutputs)); -} - HookReply DerivationBuildingGoal::tryBuildHook() { #ifdef _WIN32 // TODO enable build hook on Windows diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index dd8b27dc2..041abfad2 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -112,7 +112,6 @@ struct DerivationBuildingGoal : public Goal */ Co gaveUpOnSubstitution(); Co tryToBuild(); - Co hookDone(); /** * Is the build hook willing to perform the build? From 3b9c510ab1b9eb7ebf8e48c4c8a3ebe0d3c6f570 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:08:35 -0400 Subject: [PATCH 317/382] `DerivationBuildingGoal::outputLocks` make local variable --- src/libstore/build/derivation-building-goal.cc | 7 +++++-- .../include/nix/store/build/derivation-building-goal.hh | 5 ----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 5493845a5..3cb9c8135 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -475,6 +475,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() */ std::unique_ptr actLock; + /** + * Locks on (fixed) output paths. + */ + PathLocks outputLocks; + bool useHook; while (true) { @@ -1301,7 +1306,6 @@ SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs) { - outputLocks.unlock(); buildResult.status = status; assert(buildResult.success()); @@ -1319,7 +1323,6 @@ Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, Singl Goal::Done DerivationBuildingGoal::doneFailure(BuildError ex) { - outputLocks.unlock(); buildResult.status = ex.status; buildResult.errorMsg = fmt("%s", Uncolored(ex.info().msg)); if (buildResult.status == BuildResult::TimedOut) diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 041abfad2..2ec573293 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -43,11 +43,6 @@ struct DerivationBuildingGoal : public Goal * The remainder is state held during the build. */ - /** - * Locks on (fixed) output paths. - */ - PathLocks outputLocks; - /** * All input paths (that is, the union of FS closures of the * immediate input paths). From c6ba120000ae7ca489ad476a8a3d961d36d64459 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:10:56 -0400 Subject: [PATCH 318/382] `DerivationBuildingGoal::started` make local (lambda) variable --- .../build/derivation-building-goal.cc | 57 +++++++++---------- .../store/build/derivation-building-goal.hh | 2 - 2 files changed, 28 insertions(+), 31 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 3cb9c8135..008549acb 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -439,37 +439,36 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() co_return tryToBuild(); } -void DerivationBuildingGoal::started() -{ - auto msg = - fmt(buildMode == bmRepair ? "repairing outputs of '%s'" - : buildMode == bmCheck ? "checking outputs of '%s'" - : "building '%s'", - worker.store.printStorePath(drvPath)); - fmt("building '%s'", worker.store.printStorePath(drvPath)); -#ifndef _WIN32 // TODO enable build hook on Windows - if (hook) - msg += fmt(" on '%s'", machineName); -#endif - act = std::make_unique( - *logger, - lvlInfo, - actBuild, - msg, - Logger::Fields{ - worker.store.printStorePath(drvPath), -#ifndef _WIN32 // TODO enable build hook on Windows - hook ? machineName : -#endif - "", - 1, - 1}); - mcRunningBuilds = std::make_unique>(worker.runningBuilds); - worker.updateProgress(); -} - Goal::Co DerivationBuildingGoal::tryToBuild() { + auto started = [&]() { + auto msg = + fmt(buildMode == bmRepair ? "repairing outputs of '%s'" + : buildMode == bmCheck ? "checking outputs of '%s'" + : "building '%s'", + worker.store.printStorePath(drvPath)); + fmt("building '%s'", worker.store.printStorePath(drvPath)); +#ifndef _WIN32 // TODO enable build hook on Windows + if (hook) + msg += fmt(" on '%s'", machineName); +#endif + act = std::make_unique( + *logger, + lvlInfo, + actBuild, + msg, + Logger::Fields{ + worker.store.printStorePath(drvPath), +#ifndef _WIN32 // TODO enable build hook on Windows + hook ? machineName : +#endif + "", + 1, + 1}); + mcRunningBuilds = std::make_unique>(worker.runningBuilds); + worker.updateProgress(); + }; + /** * Activity that denotes waiting for a lock. */ diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 2ec573293..f6dcad83d 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -158,8 +158,6 @@ struct DerivationBuildingGoal : public Goal */ void killChild(); - void started(); - Done doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs); Done doneFailure(BuildError ex); From eb56b181aeaeef40de996202267e12a73f245adb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:40:10 -0400 Subject: [PATCH 319/382] DerivationBuildingGoal: Make almost everything private --- .../nix/store/build/derivation-building-goal.hh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index f6dcad83d..2cb111760 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -29,6 +29,12 @@ typedef enum { rpAccept, rpDecline, rpPostpone } HookReply; */ struct DerivationBuildingGoal : public Goal { + DerivationBuildingGoal( + const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode = bmNormal); + ~DerivationBuildingGoal(); + +private: + /** The path of the derivation. */ StorePath drvPath; @@ -94,10 +100,6 @@ struct DerivationBuildingGoal : public Goal */ std::string machineName; - DerivationBuildingGoal( - const StorePath & drvPath, const Derivation & drv, Worker & worker, BuildMode buildMode = bmNormal); - ~DerivationBuildingGoal(); - void timedOut(Error && ex) override; std::string key() override; From 7e4608a3f8112451cd5597577a0ac73744c8980e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Sep 2025 01:54:27 -0400 Subject: [PATCH 320/382] More `extern "C"` for FFI This allows us to catch the header and file getting out of sync, because we are not doing overloading by mistake. --- src/libexpr-c/nix_api_expr.cc | 4 ++++ src/libexpr-c/nix_api_expr_internal.h | 4 ++++ src/libexpr-c/nix_api_external.cc | 8 ++++++++ src/libexpr-c/nix_api_value.cc | 4 ++++ src/libfetchers-c/nix_api_fetchers.cc | 4 ++++ src/libflake-c/nix_api_flake.cc | 4 ++++ src/libmain-c/nix_api_main.cc | 4 ++++ src/libstore-c/nix_api_store.cc | 4 ++++ src/libstore-c/nix_api_store_internal.h | 4 ++++ src/libutil-c/nix_api_util.cc | 4 ++++ src/libutil-c/nix_api_util_internal.h | 4 ++++ 11 files changed, 48 insertions(+) diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index 02e901de9..a028202ae 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -40,6 +40,8 @@ static T * unsafe_new_with_self(F && init) return new (p) T(init(static_cast(p))); } +extern "C" { + nix_err nix_libexpr_init(nix_c_context * context) { if (context) @@ -287,3 +289,5 @@ void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * o GC_REGISTER_FINALIZER(obj, finalizer, cd, 0, 0); #endif } + +} // extern "C" diff --git a/src/libexpr-c/nix_api_expr_internal.h b/src/libexpr-c/nix_api_expr_internal.h index a26595cec..3aa1d9932 100644 --- a/src/libexpr-c/nix_api_expr_internal.h +++ b/src/libexpr-c/nix_api_expr_internal.h @@ -8,6 +8,8 @@ #include "nix_api_value.h" #include "nix/expr/search-path.hh" +extern "C" { + struct nix_eval_state_builder { nix::ref store; @@ -61,4 +63,6 @@ struct nix_realised_string std::vector storePaths; }; +} // extern "C" + #endif // NIX_API_EXPR_INTERNAL_H diff --git a/src/libexpr-c/nix_api_external.cc b/src/libexpr-c/nix_api_external.cc index ecb67cfb4..ff2950448 100644 --- a/src/libexpr-c/nix_api_external.cc +++ b/src/libexpr-c/nix_api_external.cc @@ -14,6 +14,8 @@ #include +extern "C" { + void nix_set_string_return(nix_string_return * str, const char * c) { str->str = c; @@ -40,6 +42,8 @@ nix_err nix_external_add_string_context(nix_c_context * context, nix_string_cont NIXC_CATCH_ERRS } +} // extern "C" + class NixCExternalValue : public nix::ExternalValueBase { NixCExternalValueDesc & desc; @@ -170,6 +174,8 @@ public: virtual ~NixCExternalValue() override {}; }; +extern "C" { + ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v) { if (context) @@ -198,3 +204,5 @@ void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b } NIXC_CATCH_ERRS_NULL } + +} // extern "C" diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index fb90e2872..0f6595e49 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -111,6 +111,8 @@ static void nix_c_primop_wrapper( v = vTmp; } +extern "C" { + PrimOp * nix_alloc_primop( nix_c_context * context, PrimOpFun fun, @@ -651,3 +653,5 @@ const StorePath * nix_realised_string_get_store_path(nix_realised_string * s, si { return &s->storePaths[i]; } + +} // extern "C" diff --git a/src/libfetchers-c/nix_api_fetchers.cc b/src/libfetchers-c/nix_api_fetchers.cc index 4e8037a5e..7fefedb0c 100644 --- a/src/libfetchers-c/nix_api_fetchers.cc +++ b/src/libfetchers-c/nix_api_fetchers.cc @@ -2,6 +2,8 @@ #include "nix_api_fetchers_internal.hh" #include "nix_api_util_internal.h" +extern "C" { + nix_fetchers_settings * nix_fetchers_settings_new(nix_c_context * context) { try { @@ -17,3 +19,5 @@ void nix_fetchers_settings_free(nix_fetchers_settings * settings) { delete settings; } + +} // extern "C" diff --git a/src/libflake-c/nix_api_flake.cc b/src/libflake-c/nix_api_flake.cc index ad8f0bf4e..2de0e667e 100644 --- a/src/libflake-c/nix_api_flake.cc +++ b/src/libflake-c/nix_api_flake.cc @@ -10,6 +10,8 @@ #include "nix/flake/flake.hh" +extern "C" { + nix_flake_settings * nix_flake_settings_new(nix_c_context * context) { nix_clear_err(context); @@ -203,3 +205,5 @@ nix_value * nix_locked_flake_get_output_attrs( } NIXC_CATCH_ERRS_NULL } + +} // extern "C" diff --git a/src/libmain-c/nix_api_main.cc b/src/libmain-c/nix_api_main.cc index eacb80455..2d4f588a8 100644 --- a/src/libmain-c/nix_api_main.cc +++ b/src/libmain-c/nix_api_main.cc @@ -5,6 +5,8 @@ #include "nix/main/plugin.hh" +extern "C" { + nix_err nix_init_plugins(nix_c_context * context) { if (context) @@ -14,3 +16,5 @@ nix_err nix_init_plugins(nix_c_context * context) } NIXC_CATCH_ERRS } + +} // extern "C" diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 4f91f5332..1026c2227 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -10,6 +10,8 @@ #include "nix/store/globals.hh" +extern "C" { + nix_err nix_libstore_init(nix_c_context * context) { if (context) @@ -180,3 +182,5 @@ nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store } NIXC_CATCH_ERRS } + +} // extern "C" diff --git a/src/libstore-c/nix_api_store_internal.h b/src/libstore-c/nix_api_store_internal.h index b0194bfd3..cbe04b2c7 100644 --- a/src/libstore-c/nix_api_store_internal.h +++ b/src/libstore-c/nix_api_store_internal.h @@ -2,6 +2,8 @@ #define NIX_API_STORE_INTERNAL_H #include "nix/store/store-api.hh" +extern "C" { + struct Store { nix::ref ptr; @@ -12,4 +14,6 @@ struct StorePath nix::StorePath path; }; +} // extern "C" + #endif diff --git a/src/libutil-c/nix_api_util.cc b/src/libutil-c/nix_api_util.cc index 2254f18fa..a43e7103b 100644 --- a/src/libutil-c/nix_api_util.cc +++ b/src/libutil-c/nix_api_util.cc @@ -9,6 +9,8 @@ #include "nix_api_util_config.h" +extern "C" { + nix_c_context * nix_c_context_create() { return new nix_c_context(); @@ -156,3 +158,5 @@ nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callb callback(str.c_str(), str.size(), user_data); return NIX_OK; } + +} // extern "C" diff --git a/src/libutil-c/nix_api_util_internal.h b/src/libutil-c/nix_api_util_internal.h index 664cd6e23..92bb9c1d2 100644 --- a/src/libutil-c/nix_api_util_internal.h +++ b/src/libutil-c/nix_api_util_internal.h @@ -7,6 +7,8 @@ #include "nix/util/error.hh" #include "nix_api_util.h" +extern "C" { + struct nix_c_context { nix_err last_err_code = NIX_OK; @@ -47,4 +49,6 @@ nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callb } #define NIXC_CATCH_ERRS_NULL NIXC_CATCH_ERRS_RES(nullptr) +} // extern "C" + #endif // NIX_API_UTIL_INTERNAL_H From 44d096f68d3d427c824f8e619583b17506aa6603 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Sep 2025 01:54:27 -0400 Subject: [PATCH 321/382] `nix_store_is_valid_path` param `path` should be `const` --- src/libstore-c/nix_api_store.cc | 2 +- src/libstore-c/nix_api_store.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 1026c2227..73c820d59 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -93,7 +93,7 @@ nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_cal NIXC_CATCH_ERRS } -bool nix_store_is_valid_path(nix_c_context * context, Store * store, StorePath * path) +bool nix_store_is_valid_path(nix_c_context * context, Store * store, const StorePath * path) { if (context) context->last_err_code = NIX_OK; diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index ad3d7b22a..89cfc1a3c 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -148,7 +148,7 @@ void nix_store_path_free(StorePath * p); * @param[in] path Path to check * @return true or false, error info in context */ -bool nix_store_is_valid_path(nix_c_context * context, Store * store, StorePath * path); +bool nix_store_is_valid_path(nix_c_context * context, Store * store, const StorePath * path); /** * @brief Get the physical location of a store path From fa76b6e215c2b846ca0fe1c75dcb40d22dda3158 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Sep 2025 11:16:31 -0400 Subject: [PATCH 322/382] nix store benchmarks: Only get unit test dir from env var --- src/libstore-tests/derivation-parser-bench.cc | 16 ++++------------ src/libstore-tests/meson.build | 11 +++++++---- 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/src/libstore-tests/derivation-parser-bench.cc b/src/libstore-tests/derivation-parser-bench.cc index 61c9807a6..1709eed1c 100644 --- a/src/libstore-tests/derivation-parser-bench.cc +++ b/src/libstore-tests/derivation-parser-bench.cc @@ -51,18 +51,10 @@ static void BM_UnparseRealDerivationFile(benchmark::State & state, const std::st // Register benchmarks for actual test derivation files if they exist BENCHMARK_CAPTURE( - BM_ParseRealDerivationFile, - hello, - getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); + BM_ParseRealDerivationFile, hello, getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value() + "/derivation/hello.drv"); BENCHMARK_CAPTURE( - BM_ParseRealDerivationFile, - firefox, - getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); + BM_ParseRealDerivationFile, firefox, getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value() + "/derivation/firefox.drv"); BENCHMARK_CAPTURE( - BM_UnparseRealDerivationFile, - hello, - getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/hello.drv"); + BM_UnparseRealDerivationFile, hello, getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value() + "/derivation/hello.drv"); BENCHMARK_CAPTURE( - BM_UnparseRealDerivationFile, - firefox, - getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value_or(NIX_UNIT_TEST_DATA) + "/derivation/firefox.drv"); + BM_UnparseRealDerivationFile, firefox, getEnvNonEmpty("_NIX_TEST_UNIT_DATA").value() + "/derivation/firefox.drv"); diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index fced20269..4c2840ab7 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -130,10 +130,13 @@ if get_option('benchmarks') link_args : linker_export_flags, install : true, cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], - cpp_args : [ - '-DNIX_UNIT_TEST_DATA="' + meson.current_source_dir() + '/data"', - ], ) - benchmark('nix-store-benchmarks', benchmark_exe) + benchmark( + 'nix-store-benchmarks', + benchmark_exe, + env : { + '_NIX_TEST_UNIT_DATA' : meson.current_source_dir() / 'data', + }, + ) endif From f6bc47bc50e9c70a0a44cc7e158e2de942715a8b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Sep 2025 11:17:26 -0400 Subject: [PATCH 323/382] `nix_store_realise`: Improve typing of store path Use `StorePath *` not `const char *`. --- src/libstore-c/nix_api_store.cc | 6 +++--- src/libstore-c/nix_api_store.h | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 73c820d59..7ce63f5c2 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -131,7 +131,7 @@ nix_err nix_store_realise( Store * store, StorePath * path, void * userdata, - void (*callback)(void * userdata, const char *, const char *)) + void (*callback)(void * userdata, const char *, const StorePath *)) { if (context) context->last_err_code = NIX_OK; @@ -146,8 +146,8 @@ nix_err nix_store_realise( if (callback) { for (const auto & result : results) { for (const auto & [outputName, realisation] : result.builtOutputs) { - auto op = store->ptr->printStorePath(realisation.outPath); - callback(userdata, outputName.c_str(), op.c_str()); + StorePath p{realisation.outPath}; + callback(userdata, outputName.c_str(), &p); } } } diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index 89cfc1a3c..51bd1bc89 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -190,7 +190,7 @@ nix_err nix_store_realise( Store * store, StorePath * path, void * userdata, - void (*callback)(void * userdata, const char * outname, const char * out)); + void (*callback)(void * userdata, const char * outname, const StorePath * out)); /** * @brief get the version of a nix store. From 8089102164cda23d4beafc3c44aaf1cdecaeb2cf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Sep 2025 16:08:35 -0400 Subject: [PATCH 324/382] Separate internal from non-internal unit tests of the C API This helps us make sure that the external C API is sufficient for the tasks that we think it is sufficient for. --- src/libexpr-tests/meson.build | 1 + src/libexpr-tests/nix_api_expr.cc | 28 +++--- src/libexpr-tests/nix_api_external.cc | 5 +- src/libexpr-tests/nix_api_value.cc | 21 ++--- src/libexpr-tests/nix_api_value_internal.cc | 25 ++++++ src/libstore-tests/nix_api_store.cc | 10 +-- .../include/nix/util/tests/nix_api_util.hh | 8 ++ src/libutil-tests/meson.build | 1 + src/libutil-tests/nix_api_util.cc | 80 +---------------- src/libutil-tests/nix_api_util_internal.cc | 85 +++++++++++++++++++ 10 files changed, 145 insertions(+), 119 deletions(-) create mode 100644 src/libexpr-tests/nix_api_value_internal.cc create mode 100644 src/libutil-tests/nix_api_util_internal.cc diff --git a/src/libexpr-tests/meson.build b/src/libexpr-tests/meson.build index a876e9705..c5dafe0de 100644 --- a/src/libexpr-tests/meson.build +++ b/src/libexpr-tests/meson.build @@ -55,6 +55,7 @@ sources = files( 'nix_api_expr.cc', 'nix_api_external.cc', 'nix_api_value.cc', + 'nix_api_value_internal.cc', 'primops.cc', 'search-path.cc', 'trivial.cc', diff --git a/src/libexpr-tests/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc index 529c2f584..5e0868b6e 100644 --- a/src/libexpr-tests/nix_api_expr.cc +++ b/src/libexpr-tests/nix_api_expr.cc @@ -1,7 +1,5 @@ #include "nix_api_store.h" -#include "nix_api_store_internal.h" #include "nix_api_util.h" -#include "nix_api_util_internal.h" #include "nix_api_expr.h" #include "nix_api_value.h" @@ -151,8 +149,8 @@ TEST_F(nix_api_expr_test, nix_expr_realise_context_bad_value) assert_ctx_ok(); auto r = nix_string_realise(ctx, state, value, false); ASSERT_EQ(nullptr, r); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); - ASSERT_THAT(ctx->last_err, testing::Optional(testing::HasSubstr("cannot coerce"))); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("cannot coerce")); } TEST_F(nix_api_expr_test, nix_expr_realise_context_bad_build) @@ -168,8 +166,8 @@ TEST_F(nix_api_expr_test, nix_expr_realise_context_bad_build) assert_ctx_ok(); auto r = nix_string_realise(ctx, state, value, false); ASSERT_EQ(nullptr, r); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); - ASSERT_THAT(ctx->last_err, testing::Optional(testing::HasSubstr("failed with exit code 1"))); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("failed with exit code 1")); } TEST_F(nix_api_expr_test, nix_expr_realise_context) @@ -381,12 +379,11 @@ TEST_F(nix_api_expr_test, nix_expr_primop_bad_no_return) nix_value * result = nix_alloc_value(ctx, state); assert_ctx_ok(); nix_value_call(ctx, state, primopValue, three, result); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); ASSERT_THAT( - ctx->last_err, - testing::Optional( - testing::HasSubstr("Implementation error in custom function: return value was not initialized"))); - ASSERT_THAT(ctx->last_err, testing::Optional(testing::HasSubstr("badNoReturn"))); + nix_err_msg(nullptr, ctx, nullptr), + testing::HasSubstr("Implementation error in custom function: return value was not initialized")); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("badNoReturn")); } static void primop_bad_return_thunk( @@ -419,12 +416,11 @@ TEST_F(nix_api_expr_test, nix_expr_primop_bad_return_thunk) assert_ctx_ok(); NIX_VALUE_CALL(ctx, state, result, primopValue, toString, four); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); ASSERT_THAT( - ctx->last_err, - testing::Optional( - testing::HasSubstr("Implementation error in custom function: return value must not be a thunk"))); - ASSERT_THAT(ctx->last_err, testing::Optional(testing::HasSubstr("badReturnThunk"))); + nix_err_msg(nullptr, ctx, nullptr), + testing::HasSubstr("Implementation error in custom function: return value must not be a thunk")); + ASSERT_THAT(nix_err_msg(nullptr, ctx, nullptr), testing::HasSubstr("badReturnThunk")); } TEST_F(nix_api_expr_test, nix_value_call_multi_no_args) diff --git a/src/libexpr-tests/nix_api_external.cc b/src/libexpr-tests/nix_api_external.cc index 93da3ca39..ec19f1212 100644 --- a/src/libexpr-tests/nix_api_external.cc +++ b/src/libexpr-tests/nix_api_external.cc @@ -1,9 +1,6 @@ #include "nix_api_store.h" -#include "nix_api_store_internal.h" #include "nix_api_util.h" -#include "nix_api_util_internal.h" #include "nix_api_expr.h" -#include "nix_api_expr_internal.h" #include "nix_api_value.h" #include "nix_api_external.h" @@ -39,7 +36,7 @@ private: std::string type_string = "nix-external_x); type_string += " )>"; - res->str = &*type_string.begin(); + nix_set_string_return(res, &*type_string.begin()); } }; diff --git a/src/libexpr-tests/nix_api_value.cc b/src/libexpr-tests/nix_api_value.cc index 5d85ed68d..af95224de 100644 --- a/src/libexpr-tests/nix_api_value.cc +++ b/src/libexpr-tests/nix_api_value.cc @@ -1,10 +1,7 @@ #include "nix_api_store.h" -#include "nix_api_store_internal.h" #include "nix_api_util.h" -#include "nix_api_util_internal.h" #include "nix_api_expr.h" #include "nix_api_value.h" -#include "nix_api_expr_internal.h" #include "nix/expr/tests/nix_api_expr.hh" #include "nix/util/tests/string_callback.hh" @@ -16,14 +13,6 @@ namespace nixC { -TEST_F(nix_api_expr_test, as_nix_value_ptr) -{ - // nix_alloc_value casts nix::Value to nix_value - // It should be obvious from the decl that that works, but if it doesn't, - // the whole implementation would be utterly broken. - ASSERT_EQ(sizeof(nix::Value), sizeof(nix_value)); -} - TEST_F(nix_api_expr_test, nix_value_get_int_invalid) { ASSERT_EQ(0, nix_get_int(ctx, nullptr)); @@ -320,8 +309,10 @@ TEST_F(nix_api_expr_test, nix_value_init_apply_error) // Evaluate it nix_value_force(ctx, state, v); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); - ASSERT_THAT(ctx->last_err.value(), testing::HasSubstr("attempt to call something which is not a function but")); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); + ASSERT_THAT( + nix_err_msg(nullptr, ctx, nullptr), + testing::HasSubstr("attempt to call something which is not a function but")); // Clean up nix_gc_decref(ctx, some_string); @@ -380,7 +371,9 @@ TEST_F(nix_api_expr_test, nix_value_init_apply_lazy_arg) // nix_get_attr_byname isn't lazy (it could have been) so it will throw the exception nix_value * foo = nix_get_attr_byname(ctx, r, state, "foo"); ASSERT_EQ(nullptr, foo); - ASSERT_THAT(ctx->last_err.value(), testing::HasSubstr("error message for test case nix_value_init_apply_lazy_arg")); + ASSERT_THAT( + nix_err_msg(nullptr, ctx, nullptr), + testing::HasSubstr("error message for test case nix_value_init_apply_lazy_arg")); // Clean up nix_gc_decref(ctx, f); diff --git a/src/libexpr-tests/nix_api_value_internal.cc b/src/libexpr-tests/nix_api_value_internal.cc new file mode 100644 index 000000000..34db6ac81 --- /dev/null +++ b/src/libexpr-tests/nix_api_value_internal.cc @@ -0,0 +1,25 @@ +#include "nix_api_store.h" +#include "nix_api_util.h" +#include "nix_api_expr.h" +#include "nix_api_value.h" +#include "nix_api_expr_internal.h" + +#include "nix/expr/tests/nix_api_expr.hh" +#include "nix/util/tests/string_callback.hh" + +#include +#include +#include +#include + +namespace nixC { + +TEST_F(nix_api_expr_test, as_nix_value_ptr) +{ + // nix_alloc_value casts nix::Value to nix_value + // It should be obvious from the decl that that works, but if it doesn't, + // the whole implementation would be utterly broken. + ASSERT_EQ(sizeof(nix::Value), sizeof(nix_value)); +} + +} // namespace nixC diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index c7146f977..c14fb6d9f 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -1,7 +1,5 @@ #include "nix_api_util.h" -#include "nix_api_util_internal.h" #include "nix_api_store.h" -#include "nix_api_store_internal.h" #include "nix/store/tests/nix_api_store.hh" #include "nix/util/tests/string_callback.hh" @@ -65,7 +63,7 @@ TEST_F(nix_api_store_test, nix_store_get_storedir) TEST_F(nix_api_store_test, InvalidPathFails) { nix_store_parse_path(ctx, store, "invalid-path"); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); } TEST_F(nix_api_store_test, ReturnsValidStorePath) @@ -80,7 +78,7 @@ TEST_F(nix_api_store_test, ReturnsValidStorePath) TEST_F(nix_api_store_test, SetsLastErrCodeToNixOk) { StorePath * path = nix_store_parse_path(ctx, store, (nixStoreDir + PATH_SUFFIX).c_str()); - ASSERT_EQ(ctx->last_err_code, NIX_OK); + ASSERT_EQ(nix_err_code(ctx), NIX_OK); nix_store_path_free(path); } @@ -103,7 +101,7 @@ TEST_F(nix_api_util_context, nix_store_open_dummy) { nix_libstore_init(ctx); Store * store = nix_store_open(ctx, "dummy://", nullptr); - ASSERT_EQ(NIX_OK, ctx->last_err_code); + ASSERT_EQ(NIX_OK, nix_err_code(ctx)); ASSERT_STREQ("dummy://", store->ptr->config.getReference().render(/*withParams=*/true).c_str()); std::string str; @@ -117,7 +115,7 @@ TEST_F(nix_api_util_context, nix_store_open_invalid) { nix_libstore_init(ctx); Store * store = nix_store_open(ctx, "invalid://", nullptr); - ASSERT_EQ(NIX_ERR_NIX_ERROR, ctx->last_err_code); + ASSERT_EQ(NIX_ERR_NIX_ERROR, nix_err_code(ctx)); ASSERT_EQ(nullptr, store); nix_store_free(store); } diff --git a/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh b/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh index 57f7f1ecf..cc1d244f5 100644 --- a/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh +++ b/src/libutil-test-support/include/nix/util/tests/nix_api_util.hh @@ -54,4 +54,12 @@ protected: #define assert_ctx_err() assert_ctx_err(__FILE__, __LINE__) }; +static inline auto createOwnedNixContext() +{ + return std::unique_ptr(nix_c_context_create(), {}); +} + } // namespace nixC diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index 0e2a2e468..ff71d2215 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -63,6 +63,7 @@ sources = files( 'lru-cache.cc', 'monitorfdhup.cc', 'nix_api_util.cc', + 'nix_api_util_internal.cc', 'pool.cc', 'position.cc', 'processes.cc', diff --git a/src/libutil-tests/nix_api_util.cc b/src/libutil-tests/nix_api_util.cc index 9693ab3a5..48f85c403 100644 --- a/src/libutil-tests/nix_api_util.cc +++ b/src/libutil-tests/nix_api_util.cc @@ -1,7 +1,6 @@ #include "nix/util/config-global.hh" #include "nix/util/args.hh" #include "nix_api_util.h" -#include "nix_api_util_internal.h" #include "nix/util/tests/nix_api_util.hh" #include "nix/util/tests/string_callback.hh" @@ -13,41 +12,6 @@ namespace nixC { -TEST_F(nix_api_util_context, nix_context_error) -{ - std::string err_msg_ref; - try { - throw nix::Error("testing error"); - } catch (nix::Error & e) { - err_msg_ref = e.what(); - nix_context_error(ctx); - } - ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR); - ASSERT_EQ(ctx->name, "nix::Error"); - ASSERT_EQ(*ctx->last_err, err_msg_ref); - ASSERT_EQ(ctx->info->msg.str(), "testing error"); - - try { - throw std::runtime_error("testing exception"); - } catch (std::exception & e) { - err_msg_ref = e.what(); - nix_context_error(ctx); - } - ASSERT_EQ(ctx->last_err_code, NIX_ERR_UNKNOWN); - ASSERT_EQ(*ctx->last_err, err_msg_ref); - - nix_clear_err(ctx); - ASSERT_EQ(ctx->last_err_code, NIX_OK); -} - -TEST_F(nix_api_util_context, nix_set_err_msg) -{ - ASSERT_EQ(ctx->last_err_code, NIX_OK); - nix_set_err_msg(ctx, NIX_ERR_UNKNOWN, "unknown test error"); - ASSERT_EQ(ctx->last_err_code, NIX_ERR_UNKNOWN); - ASSERT_EQ(*ctx->last_err, "unknown test error"); -} - TEST(nix_api_util, nix_version_get) { ASSERT_EQ(std::string(nix_version_get()), PACKAGE_VERSION); @@ -61,17 +25,9 @@ struct MySettings : nix::Config MySettings mySettings; static nix::GlobalConfig::Register rs(&mySettings); -static auto createOwnedNixContext() -{ - return std::unique_ptr(nix_c_context_create(), {}); -} - TEST_F(nix_api_util_context, nix_setting_get) { - ASSERT_EQ(ctx->last_err_code, NIX_OK); + ASSERT_EQ(nix_err_code(ctx), NIX_OK); std::string setting_value; nix_err result = nix_setting_get(ctx, "invalid-key", OBSERVE_STRING(setting_value)); ASSERT_EQ(result, NIX_ERR_KEY); @@ -114,40 +70,6 @@ TEST_F(nix_api_util_context, nix_err_msg) ASSERT_EQ(sz, err_msg.size()); } -TEST_F(nix_api_util_context, nix_err_info_msg) -{ - std::string err_info; - - // no error - EXPECT_THROW(nix_err_info_msg(NULL, ctx, OBSERVE_STRING(err_info)), nix::Error); - - try { - throw nix::Error("testing error"); - } catch (...) { - nix_context_error(ctx); - } - auto new_ctx = createOwnedNixContext(); - nix_err_info_msg(new_ctx.get(), ctx, OBSERVE_STRING(err_info)); - ASSERT_STREQ("testing error", err_info.c_str()); -} - -TEST_F(nix_api_util_context, nix_err_name) -{ - std::string err_name; - - // no error - EXPECT_THROW(nix_err_name(NULL, ctx, OBSERVE_STRING(err_name)), nix::Error); - - try { - throw nix::Error("testing error"); - } catch (...) { - nix_context_error(ctx); - } - auto new_ctx = createOwnedNixContext(); - nix_err_name(new_ctx.get(), ctx, OBSERVE_STRING(err_name)); - ASSERT_EQ(std::string(err_name), "nix::Error"); -} - TEST_F(nix_api_util_context, nix_err_code) { ASSERT_EQ(nix_err_code(ctx), NIX_OK); diff --git a/src/libutil-tests/nix_api_util_internal.cc b/src/libutil-tests/nix_api_util_internal.cc new file mode 100644 index 000000000..6fb0a623f --- /dev/null +++ b/src/libutil-tests/nix_api_util_internal.cc @@ -0,0 +1,85 @@ +#include "nix/util/config-global.hh" +#include "nix/util/args.hh" +#include "nix_api_util.h" +#include "nix_api_util_internal.h" +#include "nix/util/tests/nix_api_util.hh" +#include "nix/util/tests/string_callback.hh" + +#include + +#include + +#include "util-tests-config.hh" + +namespace nixC { + +TEST_F(nix_api_util_context, nix_context_error) +{ + std::string err_msg_ref; + try { + throw nix::Error("testing error"); + } catch (nix::Error & e) { + err_msg_ref = e.what(); + nix_context_error(ctx); + } + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_NIX_ERROR); + ASSERT_EQ(ctx->name, "nix::Error"); + ASSERT_EQ(*ctx->last_err, err_msg_ref); + ASSERT_EQ(ctx->info->msg.str(), "testing error"); + + try { + throw std::runtime_error("testing exception"); + } catch (std::exception & e) { + err_msg_ref = e.what(); + nix_context_error(ctx); + } + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_UNKNOWN); + ASSERT_EQ(*ctx->last_err, err_msg_ref); + + nix_clear_err(ctx); + ASSERT_EQ(nix_err_code(ctx), NIX_OK); +} + +TEST_F(nix_api_util_context, nix_set_err_msg) +{ + ASSERT_EQ(nix_err_code(ctx), NIX_OK); + nix_set_err_msg(ctx, NIX_ERR_UNKNOWN, "unknown test error"); + ASSERT_EQ(nix_err_code(ctx), NIX_ERR_UNKNOWN); + ASSERT_EQ(*ctx->last_err, "unknown test error"); +} + +TEST_F(nix_api_util_context, nix_err_info_msg) +{ + std::string err_info; + + // no error + EXPECT_THROW(nix_err_info_msg(NULL, ctx, OBSERVE_STRING(err_info)), nix::Error); + + try { + throw nix::Error("testing error"); + } catch (...) { + nix_context_error(ctx); + } + auto new_ctx = createOwnedNixContext(); + nix_err_info_msg(new_ctx.get(), ctx, OBSERVE_STRING(err_info)); + ASSERT_STREQ("testing error", err_info.c_str()); +} + +TEST_F(nix_api_util_context, nix_err_name) +{ + std::string err_name; + + // no error + EXPECT_THROW(nix_err_name(NULL, ctx, OBSERVE_STRING(err_name)), nix::Error); + + try { + throw nix::Error("testing error"); + } catch (...) { + nix_context_error(ctx); + } + auto new_ctx = createOwnedNixContext(); + nix_err_name(new_ctx.get(), ctx, OBSERVE_STRING(err_name)); + ASSERT_EQ(std::string(err_name), "nix::Error"); +} + +} // namespace nixC From 671c21db9f4d0342d8387ae6bf7a716bae837745 Mon Sep 17 00:00:00 2001 From: netadr <42688647+netadr@users.noreply.github.com> Date: Sun, 31 Aug 2025 19:07:03 -0400 Subject: [PATCH 325/382] libfetchers: Fix SSH key identifiers for sk type keys libfetchers: Mark ssh-ecdsa-sk key type mapping as a TODO for now --- src/libfetchers/git-utils.cc | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index b8d9b03ce..1861838ed 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -568,23 +568,34 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this void verifyCommit(const Hash & rev, const std::vector & publicKeys) override { + // Map of SSH key types to their internal OpenSSH representations + static const std::unordered_map keyTypeMap = { + {"ssh-dsa", "ssh-dsa"}, + {"ssh-ecdsa", "ssh-ecdsa"}, + {"ssh-ecdsa-sk", "sk-ecdsa-sha2-nistp256@openssh.com"}, + {"ssh-ed25519", "ssh-ed25519"}, + {"ssh-ed25519-sk", "sk-ssh-ed25519@openssh.com"}, + {"ssh-rsa", "ssh-rsa"}}; + // Create ad-hoc allowedSignersFile and populate it with publicKeys auto allowedSignersFile = createTempFile().second; std::string allowedSigners; + for (const fetchers::PublicKey & k : publicKeys) { - if (k.type != "ssh-dsa" && k.type != "ssh-ecdsa" && k.type != "ssh-ecdsa-sk" && k.type != "ssh-ed25519" - && k.type != "ssh-ed25519-sk" && k.type != "ssh-rsa") + auto it = keyTypeMap.find(k.type); + if (it == keyTypeMap.end()) { + std::string supportedTypes; + for (const auto & [type, _] : keyTypeMap) { + supportedTypes += fmt(" %s\n", type); + } throw Error( - "Unknown key type '%s'.\n" - "Please use one of\n" - "- ssh-dsa\n" - " ssh-ecdsa\n" - " ssh-ecdsa-sk\n" - " ssh-ed25519\n" - " ssh-ed25519-sk\n" - " ssh-rsa", - k.type); - allowedSigners += "* " + k.type + " " + k.key + "\n"; + "Invalid SSH key type '%s' in publicKeys.\n" + "Please use one of:\n%s", + k.type, + supportedTypes); + } + + allowedSigners += fmt("* %s %s\n", it->second, k.key); } writeFile(allowedSignersFile, allowedSigners); From 450633aa8cd0c0871164a24ac34eac2386218bc7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:49:58 -0400 Subject: [PATCH 326/382] Move `machineName` from `DerivationBuildingGoal` to `HookInstance` Exactly why is is correct is a little subtle, because sometimes the worker is owned by the worker. But the commit message in e437b0825018b1935f9a849382c12b1df0aeae06 explained the situation well enough: I made that commit message part of the ABI docs, and now it should be understandable to the next person. --- src/libstore/build/derivation-building-goal.cc | 6 +++--- .../nix/store/build/derivation-building-goal.hh | 5 ----- .../include/nix/store/build/hook-instance.hh | 17 +++++++++++++++++ 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 008549acb..75295eab7 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -450,7 +450,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() fmt("building '%s'", worker.store.printStorePath(drvPath)); #ifndef _WIN32 // TODO enable build hook on Windows if (hook) - msg += fmt(" on '%s'", machineName); + msg += fmt(" on '%s'", hook->machineName); #endif act = std::make_unique( *logger, @@ -460,7 +460,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() Logger::Fields{ worker.store.printStorePath(drvPath), #ifndef _WIN32 // TODO enable build hook on Windows - hook ? machineName : + hook ? hook->machineName : #endif "", 1, @@ -1027,7 +1027,7 @@ HookReply DerivationBuildingGoal::tryBuildHook() hook = std::move(worker.hook); try { - machineName = readLine(hook->fromHook.readSide.get()); + hook->machineName = readLine(hook->fromHook.readSide.get()); } catch (Error & e) { e.addTrace({}, "while reading the machine name from the build hook"); throw; diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 2cb111760..07f9b21ae 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -95,11 +95,6 @@ private: std::map builderActivities; - /** - * The remote machine on which we're building. - */ - std::string machineName; - void timedOut(Error && ex) override; std::string key() override; diff --git a/src/libstore/unix/include/nix/store/build/hook-instance.hh b/src/libstore/unix/include/nix/store/build/hook-instance.hh index 87e03665c..7657d5dbd 100644 --- a/src/libstore/unix/include/nix/store/build/hook-instance.hh +++ b/src/libstore/unix/include/nix/store/build/hook-instance.hh @@ -7,6 +7,14 @@ namespace nix { +/** + * @note Sometimes this is owned by the `Worker`, and sometimes it is + * owned by a `Goal`. This is for efficiency: rather than starting the + * hook every time we want to ask whether we can run a remote build + * (which can be very often), we reuse a hook process for answering + * those queries until it accepts a build. So if there are N + * derivations to be built, at most N hooks will be started. + */ struct HookInstance { /** @@ -29,6 +37,15 @@ struct HookInstance */ Pid pid; + /** + * The remote machine on which we're building. + * + * @Invariant When the hook instance is owned by the `Worker`, this + * is the empty string. When it is owned by a `Goal`, this should be + * set. + */ + std::string machineName; + FdSink sink; std::map activities; From c0c2a89f05a14b70363870408eee29f5a15cdff0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Sep 2025 16:51:53 -0400 Subject: [PATCH 327/382] `DerivationBuildingGoal::initialOutputs` move initialization down to `tryToBuild` Will help us make this a local variable. --- .../build/derivation-building-goal.cc | 57 ++++++++++--------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 75295eab7..95f0ee9d5 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -127,31 +127,6 @@ static void runPostBuildHook( produced using a substitute. So we have to build instead. */ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() { - /* Recheck at goal start. In particular, whereas before we were - given this information by the downstream goal, that cannot happen - anymore if the downstream goal only cares about one output, but - we care about all outputs. */ - auto outputHashes = staticOutputHashes(worker.evalStore, *drv); - for (auto & [outputName, outputHash] : outputHashes) { - InitialOutput v{.outputHash = outputHash}; - - /* TODO we might want to also allow randomizing the paths - for regular CA derivations, e.g. for sake of checking - determinism. */ - if (drv->type().isImpure()) { - v.known = InitialOutputStatus{ - .path = StorePath::random(outputPathName(drv->name, outputName)), - .status = PathStatus::Absent, - }; - } - - initialOutputs.insert({ - outputName, - std::move(v), - }); - } - checkPathValidity(); - Goals waitees; std::map, GoalPtr, value_comparison> inputGoals; @@ -334,14 +309,15 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() if (resolvedResult.success()) { SingleDrvOutputs builtOutputs; + auto outputHashes = staticOutputHashes(worker.evalStore, *drv); auto resolvedHashes = staticOutputHashes(worker.store, drvResolved); StorePathSet outputPaths; for (auto & outputName : drvResolved.outputNames()) { - auto initialOutput = get(initialOutputs, outputName); + auto outputHash = get(outputHashes, outputName); auto resolvedHash = get(resolvedHashes, outputName); - if ((!initialOutput) || (!resolvedHash)) + if ((!outputHash) || (!resolvedHash)) throw Error( "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolve)", worker.store.printStorePath(drvPath), @@ -368,7 +344,7 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() if (!drv->type().isImpure()) { auto newRealisation = realisation; - newRealisation.id = DrvOutput{initialOutput->outputHash, outputName}; + newRealisation.id = DrvOutput{*outputHash, outputName}; newRealisation.signatures.clear(); if (!drv->type().isFixed()) { auto & drvStore = worker.evalStore.isValidPath(drvPath) ? worker.evalStore : worker.store; @@ -441,6 +417,31 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() Goal::Co DerivationBuildingGoal::tryToBuild() { + /* Recheck at goal start. In particular, whereas before we were + given this information by the downstream goal, that cannot happen + anymore if the downstream goal only cares about one output, but + we care about all outputs. */ + auto outputHashes = staticOutputHashes(worker.evalStore, *drv); + for (auto & [outputName, outputHash] : outputHashes) { + InitialOutput v{.outputHash = outputHash}; + + /* TODO we might want to also allow randomizing the paths + for regular CA derivations, e.g. for sake of checking + determinism. */ + if (drv->type().isImpure()) { + v.known = InitialOutputStatus{ + .path = StorePath::random(outputPathName(drv->name, outputName)), + .status = PathStatus::Absent, + }; + } + + initialOutputs.insert({ + outputName, + std::move(v), + }); + } + checkPathValidity(); + auto started = [&]() { auto msg = fmt(buildMode == bmRepair ? "repairing outputs of '%s'" From a30bf96349604442265561ba305cb24793a09c79 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 29 Aug 2025 15:26:15 -0400 Subject: [PATCH 328/382] `DerivationBuildingGoal::initialOutputs` make local variable Also inline `assertPathValidity` in the process. --- .../build/derivation-building-goal.cc | 33 ++++++++++--------- .../store/build/derivation-building-goal.hh | 12 ++----- 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 95f0ee9d5..072bbfa93 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -417,7 +417,9 @@ Goal::Co DerivationBuildingGoal::gaveUpOnSubstitution() Goal::Co DerivationBuildingGoal::tryToBuild() { - /* Recheck at goal start. In particular, whereas before we were + std::map initialOutputs; + + /* Recheck at this point. In particular, whereas before we were given this information by the downstream goal, that cannot happen anymore if the downstream goal only cares about one output, but we care about all outputs. */ @@ -440,7 +442,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() std::move(v), }); } - checkPathValidity(); + checkPathValidity(initialOutputs); auto started = [&]() { auto msg = @@ -528,7 +530,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() omitted, but that would be less efficient.) Note that since we now hold the locks on the output paths, no other process can build this derivation, so no further checks are necessary. */ - auto [allValid, validOutputs] = checkPathValidity(); + auto [allValid, validOutputs] = checkPathValidity(initialOutputs); if (buildMode != bmCheck && allValid) { debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); @@ -556,7 +558,7 @@ Goal::Co DerivationBuildingGoal::tryToBuild() if (buildLocally) { useHook = false; } else { - switch (tryBuildHook()) { + switch (tryBuildHook(initialOutputs)) { case rpAccept: /* Yes, it has started doing so. Wait until we get EOF from the hook. */ @@ -644,8 +646,16 @@ Goal::Co DerivationBuildingGoal::tryToBuild() We can only early return when the outputs are known a priori. For floating content-addressing derivations this isn't the case. + + Aborts if any output is not valid or corrupt, and otherwise + returns a 'SingleDrvOutputs' structure containing all outputs. */ - assertPathValidity(); + [&] { + auto [allValid, validOutputs] = checkPathValidity(initialOutputs); + if (!allValid) + throw Error("some outputs are unexpectedly invalid"); + return validOutputs; + }(); StorePathSet outputPaths; for (auto & [_, output] : builtOutputs) @@ -960,7 +970,7 @@ BuildError DerivationBuildingGoal::fixupBuilderFailureErrorMessage(BuilderFailur return BuildError{e.status, msg}; } -HookReply DerivationBuildingGoal::tryBuildHook() +HookReply DerivationBuildingGoal::tryBuildHook(const std::map & initialOutputs) { #ifdef _WIN32 // TODO enable build hook on Windows return rpDecline; @@ -1239,7 +1249,8 @@ std::map> DerivationBuildingGoal::queryPar return res; } -std::pair DerivationBuildingGoal::checkPathValidity() +std::pair +DerivationBuildingGoal::checkPathValidity(std::map & initialOutputs) { if (drv->type().isImpure()) return {false, {}}; @@ -1296,14 +1307,6 @@ std::pair DerivationBuildingGoal::checkPathValidity() return {allValid, validOutputs}; } -SingleDrvOutputs DerivationBuildingGoal::assertPathValidity() -{ - auto [allValid, validOutputs] = checkPathValidity(); - if (!allValid) - throw Error("some outputs are unexpectedly invalid"); - return validOutputs; -} - Goal::Done DerivationBuildingGoal::doneSuccess(BuildResult::Status status, SingleDrvOutputs builtOutputs) { buildResult.status = status; diff --git a/src/libstore/include/nix/store/build/derivation-building-goal.hh b/src/libstore/include/nix/store/build/derivation-building-goal.hh index 07f9b21ae..d394eb3c9 100644 --- a/src/libstore/include/nix/store/build/derivation-building-goal.hh +++ b/src/libstore/include/nix/store/build/derivation-building-goal.hh @@ -55,8 +55,6 @@ private: */ StorePathSet inputPaths; - std::map initialOutputs; - /** * File descriptor for the log file. */ @@ -108,7 +106,7 @@ private: /** * Is the build hook willing to perform the build? */ - HookReply tryBuildHook(); + HookReply tryBuildHook(const std::map & initialOutputs); /** * Open a log file and a pipe to it. @@ -142,13 +140,7 @@ private: * whether all outputs are valid and non-corrupt, and a * 'SingleDrvOutputs' structure containing the valid outputs. */ - std::pair checkPathValidity(); - - /** - * Aborts if any output is not valid or corrupt, and otherwise - * returns a 'SingleDrvOutputs' structure containing all outputs. - */ - SingleDrvOutputs assertPathValidity(); + std::pair checkPathValidity(std::map & initialOutputs); /** * Forcibly kill the child process, if any. From 7f3314a68cf250163b2a61691100739536a6bb99 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Sep 2025 17:27:07 -0400 Subject: [PATCH 329/382] `DerivationBuilder::initialOutputs` make `const` At one point I remember it did mutatate `initialOutputs`, but not anymore! --- src/libstore/include/nix/store/build/derivation-builder.hh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index 45fbba3f5..deb4612b4 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -76,10 +76,7 @@ struct DerivationBuilderParams */ const StorePathSet & inputPaths; - /** - * @note we do in fact mutate this - */ - std::map & initialOutputs; + const std::map & initialOutputs; const BuildMode & buildMode; From 14c206f05a3f1b080cce457a67e54aa587867a5f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Sep 2025 17:33:48 -0400 Subject: [PATCH 330/382] `DerivationBuilder` no more callback soup for logging `startBuilder` just returns the descriptor for the pipe now. --- src/libstore/build/derivation-building-goal.cc | 10 ++++------ .../include/nix/store/build/derivation-builder.hh | 11 +++-------- src/libstore/unix/build/derivation-builder.cc | 7 ++++--- 3 files changed, 11 insertions(+), 17 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 072bbfa93..4760c039b 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -716,11 +716,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() ~DerivationBuildingGoalCallbacks() override = default; - void childStarted(Descriptor builderOut) override - { - goal.worker.childStarted(goal.shared_from_this(), {builderOut}, true, true); - } - void childTerminated() override { goal.worker.childTerminated(&goal); @@ -802,10 +797,11 @@ Goal::Co DerivationBuildingGoal::tryToBuild() actLock.reset(); + Descriptor builderOut; try { /* Okay, we have to build. */ - builder->startBuilder(); + builderOut = builder->startBuilder(); } catch (BuildError & e) { builder.reset(); @@ -814,6 +810,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() co_return doneFailure(std::move(e)); // InputRejected } + worker.childStarted(shared_from_this(), {builderOut}, true, true); + started(); co_await Suspend{}; diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index deb4612b4..e8aefa377 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -114,13 +114,6 @@ struct DerivationBuilderCallbacks */ virtual void closeLogFile() = 0; - /** - * Hook up `builderOut` to some mechanism to ingest the log - * - * @todo this should be reworked - */ - virtual void childStarted(Descriptor builderOut) = 0; - /** * @todo this should be reworked */ @@ -161,8 +154,10 @@ struct DerivationBuilder : RestrictionContext /** * Start building a derivation. + * + * @return logging pipe */ - virtual void startBuilder() = 0; + virtual Descriptor startBuilder() = 0; /** * Tear down build environment after the builder exits (either on diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index de0b46295..577385093 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -216,7 +216,7 @@ public: bool prepareBuild() override; - void startBuilder() override; + Descriptor startBuilder() override; SingleDrvOutputs unprepareBuild() override; @@ -679,7 +679,7 @@ static bool checkNotWorldWritable(std::filesystem::path path) return true; } -void DerivationBuilderImpl::startBuilder() +Descriptor DerivationBuilderImpl::startBuilder() { /* Make sure that no other processes are executing under the sandbox uids. This must be done before any chownToBuilder() @@ -841,9 +841,10 @@ void DerivationBuilderImpl::startBuilder() startChild(); pid.setSeparatePG(true); - miscMethods->childStarted(builderOut.get()); processSandboxSetupMessages(); + + return builderOut.get(); } PathsInChroot DerivationBuilderImpl::getPathsInSandbox() From 2acb9559d531a952d779970fc5f2ccd536d8d272 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Sep 2025 17:58:50 -0400 Subject: [PATCH 331/382] Combine `DerivationBuilder::{prepareBuild,startBuilder}` After many other cleanups, it turns out there is no reason for these to be separate methods. We can combine them to simplify things. --- .../build/derivation-building-goal.cc | 31 ++++++++++--------- .../nix/store/build/derivation-builder.hh | 16 +++++----- src/libstore/unix/build/derivation-builder.cc | 27 ++++++---------- 3 files changed, 33 insertions(+), 41 deletions(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index 4760c039b..ebef2a375 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -689,6 +689,8 @@ Goal::Co DerivationBuildingGoal::tryToBuild() #else assert(!hook); + Descriptor builderOut; + // Will continue here while waiting for a build user below while (true) { @@ -781,7 +783,17 @@ Goal::Co DerivationBuildingGoal::tryToBuild() }); } - if (!builder->prepareBuild()) { + std::optional builderOutOpt; + try { + /* Okay, we have to build. */ + builderOutOpt = builder->startBuild(); + } catch (BuildError & e) { + builder.reset(); + outputLocks.unlock(); + worker.permanentFailure = true; + co_return doneFailure(std::move(e)); // InputRejected + } + if (!builderOutOpt) { if (!actLock) actLock = std::make_unique( *logger, @@ -790,26 +802,15 @@ Goal::Co DerivationBuildingGoal::tryToBuild() fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); co_await waitForAWhile(); continue; - } + } else { + builderOut = *std::move(builderOutOpt); + }; break; } actLock.reset(); - Descriptor builderOut; - try { - - /* Okay, we have to build. */ - builderOut = builder->startBuilder(); - - } catch (BuildError & e) { - builder.reset(); - outputLocks.unlock(); - worker.permanentFailure = true; - co_return doneFailure(std::move(e)); // InputRejected - } - worker.childStarted(shared_from_this(), {builderOut}, true, true); started(); diff --git a/src/libstore/include/nix/store/build/derivation-builder.hh b/src/libstore/include/nix/store/build/derivation-builder.hh index e8aefa377..7fad2837a 100644 --- a/src/libstore/include/nix/store/build/derivation-builder.hh +++ b/src/libstore/include/nix/store/build/derivation-builder.hh @@ -147,17 +147,15 @@ struct DerivationBuilder : RestrictionContext * locks as needed). After this is run, the builder should be * started. * - * @returns true if successful, false if we could not acquire a build - * user. In that case, the caller must wait and then try again. - */ - virtual bool prepareBuild() = 0; - - /** - * Start building a derivation. + * @returns logging pipe if successful, `std::nullopt` if we could + * not acquire a build user. In that case, the caller must wait and + * then try again. * - * @return logging pipe + * @note "success" just means that we were able to set up the environment + * and start the build. The builder could have immediately exited with + * failure, and that would still be considered a successful start. */ - virtual Descriptor startBuilder() = 0; + virtual std::optional startBuild() = 0; /** * Tear down build environment after the builder exits (either on diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index 577385093..d6979ab5f 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -214,9 +214,7 @@ protected: public: - bool prepareBuild() override; - - Descriptor startBuilder() override; + std::optional startBuild() override; SingleDrvOutputs unprepareBuild() override; @@ -470,19 +468,6 @@ bool DerivationBuilderImpl::killChild() return ret; } -bool DerivationBuilderImpl::prepareBuild() -{ - if (useBuildUsers()) { - if (!buildUser) - buildUser = getBuildUser(); - - if (!buildUser) - return false; - } - - return true; -} - SingleDrvOutputs DerivationBuilderImpl::unprepareBuild() { /* Since we got an EOF on the logger pipe, the builder is presumed @@ -679,8 +664,16 @@ static bool checkNotWorldWritable(std::filesystem::path path) return true; } -Descriptor DerivationBuilderImpl::startBuilder() +std::optional DerivationBuilderImpl::startBuild() { + if (useBuildUsers()) { + if (!buildUser) + buildUser = getBuildUser(); + + if (!buildUser) + return std::nullopt; + } + /* Make sure that no other processes are executing under the sandbox uids. This must be done before any chownToBuilder() calls. */ From 3513ab13dc45f9025cebc6f8f694a2963d44556a Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 5 Sep 2025 02:56:28 +0300 Subject: [PATCH 332/382] libstore: Do not normalize daemon -> unix://, local -> local:// This is relied upon (specifically the `local` store) by existing tooling [1] and we broke this in 3e7879e6dfb75d5c39058b8c2fd6619db8df9b95 (which was first released in 2.31). To lessen the scope of the breakage we should not normalize "auto" references and explicitly specified references like "local" or "daemon". It also makes sense to canonicalize local://,daemon:// to be more compatible with prior behavior. [1]: https://github.com/maralorn/nix-output-monitor/blob/05e1b3cba2fa328a1781390a4e4515e9c432229e/lib/NOM/Builds.hs#L60-L64 --- .../data/store-reference/daemon_shorthand.txt | 1 + .../store-reference/local_shorthand_3.txt | 1 + src/libstore-tests/local-store.cc | 6 +++++ src/libstore-tests/store-reference.cc | 14 +++++++++++ src/libstore-tests/uds-remote-store.cc | 6 +++++ .../include/nix/store/store-reference.hh | 24 ++++++++++++++++++- src/libstore/local-store.cc | 7 +++++- src/libstore/store-api.cc | 8 ++++++- src/libstore/store-reference.cc | 18 +++++++------- src/libstore/uds-remote-store.cc | 11 +++++---- tests/functional/store-info.sh | 18 +++++++++----- 11 files changed, 90 insertions(+), 24 deletions(-) create mode 100644 src/libstore-tests/data/store-reference/daemon_shorthand.txt create mode 100644 src/libstore-tests/data/store-reference/local_shorthand_3.txt diff --git a/src/libstore-tests/data/store-reference/daemon_shorthand.txt b/src/libstore-tests/data/store-reference/daemon_shorthand.txt new file mode 100644 index 000000000..bd8c0f8c4 --- /dev/null +++ b/src/libstore-tests/data/store-reference/daemon_shorthand.txt @@ -0,0 +1 @@ +daemon \ No newline at end of file diff --git a/src/libstore-tests/data/store-reference/local_shorthand_3.txt b/src/libstore-tests/data/store-reference/local_shorthand_3.txt new file mode 100644 index 000000000..c2c027fec --- /dev/null +++ b/src/libstore-tests/data/store-reference/local_shorthand_3.txt @@ -0,0 +1 @@ +local \ No newline at end of file diff --git a/src/libstore-tests/local-store.cc b/src/libstore-tests/local-store.cc index cdbc29b03..d00888897 100644 --- a/src/libstore-tests/local-store.cc +++ b/src/libstore-tests/local-store.cc @@ -33,4 +33,10 @@ TEST(LocalStore, constructConfig_rootPath) EXPECT_EQ(config.rootDir.get(), std::optional{"/foo/bar"}); } +TEST(LocalStore, constructConfig_to_string) +{ + LocalStoreConfig config{"local", "", {}}; + EXPECT_EQ(config.getReference().to_string(), "local"); +} + } // namespace nix diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index 01b75f3d2..d9f040ab6 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -107,6 +107,13 @@ URI_TEST_READ(local_shorthand_1, localExample_1) URI_TEST_READ(local_shorthand_2, localExample_2) +URI_TEST( + local_shorthand_3, + (StoreReference{ + .variant = StoreReference::Local{}, + .params = {}, + })) + static StoreReference unixExample{ .variant = StoreReference::Specified{ @@ -134,4 +141,11 @@ URI_TEST( .params = {}, })) +URI_TEST( + daemon_shorthand, + (StoreReference{ + .variant = StoreReference::Daemon{}, + .params = {}, + })) + } // namespace nix diff --git a/src/libstore-tests/uds-remote-store.cc b/src/libstore-tests/uds-remote-store.cc index c215d6e18..11e6b04a3 100644 --- a/src/libstore-tests/uds-remote-store.cc +++ b/src/libstore-tests/uds-remote-store.cc @@ -16,4 +16,10 @@ TEST(UDSRemoteStore, constructConfigWrongScheme) EXPECT_THROW(UDSRemoteStoreConfig("http", "/tmp/socket", {}), UsageError); } +TEST(UDSRemoteStore, constructConfig_to_string) +{ + UDSRemoteStoreConfig config{"unix", "", {}}; + EXPECT_EQ(config.getReference().to_string(), "daemon"); +} + } // namespace nix diff --git a/src/libstore/include/nix/store/store-reference.hh b/src/libstore/include/nix/store/store-reference.hh index 1df333947..dc34500d9 100644 --- a/src/libstore/include/nix/store/store-reference.hh +++ b/src/libstore/include/nix/store/store-reference.hh @@ -64,7 +64,29 @@ struct StoreReference auto operator<=>(const Specified & rhs) const = default; }; - typedef std::variant Variant; + /** + * Special case for `daemon` to avoid normalization. + */ + struct Daemon : Specified + { + Daemon() + : Specified({.scheme = "unix"}) + { + } + }; + + /** + * Special case for `local` to avoid normalization. + */ + struct Local : Specified + { + Local() + : Specified({.scheme = "local"}) + { + } + }; + + typedef std::variant Variant; Variant variant; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7872d4f93..112d5b14c 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -456,12 +456,17 @@ LocalStore::~LocalStore() StoreReference LocalStoreConfig::getReference() const { + auto params = getQueryParams(); + /* Back-compatibility kludge. Tools like nix-output-monitor expect 'local' + and can't parse 'local://'. */ + if (params.empty()) + return {.variant = StoreReference::Local{}}; return { .variant = StoreReference::Specified{ .scheme = *uriSchemes().begin(), }, - .params = getQueryParams(), + .params = std::move(params), }; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index d96be5965..78d2bbd54 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -818,7 +818,13 @@ makeCopyPathMessage(const StoreConfig & srcCfg, const StoreConfig & dstCfg, std: auto isShorthand = [](const StoreReference & ref) { /* At this point StoreReference **must** be resolved. */ - const auto & specified = std::get(ref.variant); + const auto & specified = std::visit( + overloaded{ + [](const StoreReference::Auto &) -> const StoreReference::Specified & { unreachable(); }, + [](const StoreReference::Specified & specified) -> const StoreReference::Specified & { + return specified; + }}, + ref.variant); const auto & scheme = specified.scheme; return (scheme == "local" || scheme == "unix") && specified.authority.empty(); }; diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index 8b4c19600..2c54e497e 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -25,6 +25,8 @@ std::string StoreReference::render(bool withParams) const std::visit( overloaded{ [&](const StoreReference::Auto &) { res = "auto"; }, + [&](const StoreReference::Daemon &) { res = "daemon"; }, + [&](const StoreReference::Local &) { res = "local"; }, [&](const StoreReference::Specified & g) { res = g.scheme; res += "://"; @@ -66,21 +68,17 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen .params = std::move(params), }; } else if (baseURI == "daemon") { + if (params.empty()) + return {.variant = Daemon{}}; return { - .variant = - Specified{ - .scheme = "unix", - .authority = "", - }, + .variant = Specified{.scheme = "unix", .authority = ""}, .params = std::move(params), }; } else if (baseURI == "local") { + if (params.empty()) + return {.variant = Local{}}; return { - .variant = - Specified{ - .scheme = "local", - .authority = "", - }, + .variant = Specified{.scheme = "local", .authority = ""}, .params = std::move(params), }; } else if (isNonUriPath(baseURI)) { diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index 4871b4913..9725fe8a0 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -57,15 +57,16 @@ UDSRemoteStore::UDSRemoteStore(ref config) StoreReference UDSRemoteStoreConfig::getReference() const { + /* We specifically return "daemon" here instead of "unix://" or "unix://${path}" + * to be more compatible with older versions of nix. Some tooling out there + * tries hard to parse store references and it might not be able to handle "unix://". */ + if (path == settings.nixDaemonSocketFile) + return {.variant = StoreReference::Daemon{}}; return { .variant = StoreReference::Specified{ .scheme = *uriSchemes().begin(), - // We return the empty string when the path looks like the - // default path, but we could also just return the path - // verbatim always, to be robust to overall config changes - // at the cost of some verbosity. - .authority = path == settings.nixDaemonSocketFile ? "" : path, + .authority = path, }, }; } diff --git a/tests/functional/store-info.sh b/tests/functional/store-info.sh index 7c9257215..adaee5dfe 100755 --- a/tests/functional/store-info.sh +++ b/tests/functional/store-info.sh @@ -13,14 +13,20 @@ normalize_nix_store_url () { # Need to actually ask Nix in this case echo "$defaultStore" ;; + local | 'local://' ) + echo 'local' + ;; + daemon | 'unix://' ) + echo 'daemon' + ;; 'local://'* ) # To not be captured by next pattern echo "$url" ;; - local | 'local?'* ) + 'local?'* ) echo "local://${url#local}" ;; - daemon | 'daemon?'* ) + 'daemon?'* ) echo "unix://${url#daemon}" ;; * ) @@ -38,13 +44,13 @@ defaultStore="$(normalize_nix_store_url "$(echo "$STORE_INFO_JSON" | jq -r ".url # Test cases for `normalize_nix_store_url` itself # Normalize local store -[[ "$(normalize_nix_store_url "local://")" = "local://" ]] -[[ "$(normalize_nix_store_url "local")" = "local://" ]] +[[ "$(normalize_nix_store_url "local://")" = "local" ]] +[[ "$(normalize_nix_store_url "local")" = "local" ]] [[ "$(normalize_nix_store_url "local?foo=bar")" = "local://?foo=bar" ]] # Normalize unix domain socket remote store -[[ "$(normalize_nix_store_url "unix://")" = "unix://" ]] -[[ "$(normalize_nix_store_url "daemon")" = "unix://" ]] +[[ "$(normalize_nix_store_url "unix://")" = "daemon" ]] +[[ "$(normalize_nix_store_url "daemon")" = "daemon" ]] [[ "$(normalize_nix_store_url "daemon?x=y")" = "unix://?x=y" ]] # otherwise unchanged From 211cbe4abf0a6d1c48cf52eac97f4f92ca364e64 Mon Sep 17 00:00:00 2001 From: sinanmohd Date: Fri, 5 Sep 2025 18:32:42 +0530 Subject: [PATCH 333/382] nix/develop: pass down the interactive shell to subshells --- src/nix/develop.cc | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index ed25e655d..f78eee59a 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -627,13 +627,12 @@ struct CmdDevelop : Common, MixEnvironment fmt("[ -n \"$PS1\" ] && PS1+=%s;\n", escapeShellArgAlways(developSettings.bashPromptSuffix.get())); } - writeFull(rcFileFd.get(), script); - setEnviron(); // prevent garbage collection until shell exits setEnv("NIX_GCROOT", gcroot.c_str()); Path shell = "bash"; + bool foundInteractive = false; try { auto state = getEvalState(); @@ -656,19 +655,17 @@ struct CmdDevelop : Common, MixEnvironment Strings{"legacyPackages." + settings.thisSystem.get() + "."}, nixpkgsLockFlags); - bool found = false; - for (auto & path : Installable::toStorePathSet( getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { auto s = store->printStorePath(path) + "/bin/bash"; if (pathExists(s)) { shell = s; - found = true; + foundInteractive = true; break; } } - if (!found) + if (!foundInteractive) throw Error("package 'nixpkgs#bashInteractive' does not provide a 'bin/bash'"); } catch (Error &) { @@ -678,6 +675,11 @@ struct CmdDevelop : Common, MixEnvironment // Override SHELL with the one chosen for this environment. // This is to make sure the system shell doesn't leak into the build environment. setEnv("SHELL", shell.c_str()); + // https://github.com/NixOS/nix/issues/5873 + script += fmt("SHELL=\"%s\"\n", shell); + if (foundInteractive) + script += fmt("PATH=\"%s${PATH:+:$PATH}\"\n", std::filesystem::path(shell).parent_path()); + writeFull(rcFileFd.get(), script); #ifdef _WIN32 // TODO re-enable on Windows throw UnimplementedError("Cannot yet spawn processes on Windows"); From 738924b70564294e0ecb361a795ec7780a6e8bf6 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 6 Sep 2025 00:23:54 +0300 Subject: [PATCH 334/382] libexpr: Slim down Bindings to 8 bytes (on 64 bit systems) Since the only construction and push_back() calls to Bindings happen through the `BindingsBuilder` [1] we don't need to keep `capacity` around on the heap anymore. This saves 8 bytes (because of the member alignment padding) per one Bindings allocation. This isn't that much, but it does save significant memory. This also shows that the Bindings don't necessarily have to be mutable, which opens up opportunities for doing small bindings optimization and storing a 1-element Bindings directly in Value. For the following scenario: nix-env --query --available --out-path --file ../nixpkgs --eval-system x86_64-linux (nixpkgs revision: ddcddd7b09a417ca9a88899f4bd43a8edb72308d) This patch results in reduction of `sets.bytes` 13115104016 -> 12653087640, which amounts to 462 MB less bytes allocated for Bindings. [1]: Not actually, `getBuiltins` does mutate bindings, but this is pretty inconsequential and doesn't lead to problems. --- src/libexpr-c/nix_api_value.cc | 2 +- src/libexpr-tests/value/print.cc | 32 ++++++++-------- src/libexpr/attr-set.cc | 6 +-- src/libexpr/eval.cc | 4 +- src/libexpr/include/nix/expr/attr-set.hh | 49 ++++++++++++------------ src/libexpr/include/nix/expr/eval.hh | 2 +- 6 files changed, 47 insertions(+), 48 deletions(-) diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index 0f6595e49..093daf2f8 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -594,7 +594,7 @@ nix_err nix_bindings_builder_insert(nix_c_context * context, BindingsBuilder * b context->last_err_code = NIX_OK; try { auto & v = check_value_not_null(value); - nix::Symbol s = bb->builder.state.symbols.create(name); + nix::Symbol s = bb->builder.state.get().symbols.create(name); bb->builder.insert(s, &v); } NIXC_CATCH_ERRS diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index 739d4e40b..1959fddf2 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -61,7 +61,7 @@ TEST_F(ValuePrintingTests, tAttrs) Value vTwo; vTwo.mkInt(2); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); @@ -196,11 +196,11 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vTwo; vTwo.mkInt(2); - BindingsBuilder builderEmpty(state, state.allocBindings(0)); + BindingsBuilder builderEmpty = state.buildBindings(0); Value vAttrsEmpty; vAttrsEmpty.mkAttrs(builderEmpty.finish()); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); builder.insert(state.symbols.create("nested"), &vAttrsEmpty); @@ -208,7 +208,7 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vAttrs; vAttrs.mkAttrs(builder.finish()); - BindingsBuilder builder2(state, state.allocBindings(10)); + BindingsBuilder builder2 = state.buildBindings(10); builder2.insert(state.symbols.create("one"), &vOne); builder2.insert(state.symbols.create("two"), &vTwo); builder2.insert(state.symbols.create("nested"), &vAttrs); @@ -233,14 +233,14 @@ TEST_F(ValuePrintingTests, depthList) Value vTwo; vTwo.mkInt(2); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); Value vAttrs; vAttrs.mkAttrs(builder.finish()); - BindingsBuilder builder2(state, state.allocBindings(10)); + BindingsBuilder builder2 = state.buildBindings(10); builder2.insert(state.symbols.create("one"), &vOne); builder2.insert(state.symbols.create("two"), &vTwo); builder2.insert(state.symbols.create("nested"), &vAttrs); @@ -295,7 +295,7 @@ TEST_F(ValuePrintingTests, attrsTypeFirst) Value vApple; vApple.mkStringNoCopy("apple"); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("type"), &vType); builder.insert(state.symbols.create("apple"), &vApple); @@ -374,7 +374,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs) Value vTwo; vTwo.mkInt(2); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); @@ -392,7 +392,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivation) Value vDerivation; vDerivation.mkStringNoCopy("derivation"); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); Value vAttrs; @@ -437,7 +437,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) Value vDerivation; vDerivation.mkStringNoCopy("derivation"); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); builder.insert(state.s.drvPath, &vError); @@ -553,12 +553,12 @@ TEST_F(ValuePrintingTests, ansiColorsBlackhole) TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) { - BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + BindingsBuilder emptyBuilder = state.buildBindings(1); Value vEmpty; vEmpty.mkAttrs(emptyBuilder.finish()); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("a"), &vEmpty); builder.insert(state.symbols.create("b"), &vEmpty); @@ -570,7 +570,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) TEST_F(ValuePrintingTests, ansiColorsListRepeated) { - BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + BindingsBuilder emptyBuilder = state.buildBindings(1); Value vEmpty; vEmpty.mkAttrs(emptyBuilder.finish()); @@ -586,7 +586,7 @@ TEST_F(ValuePrintingTests, ansiColorsListRepeated) TEST_F(ValuePrintingTests, listRepeated) { - BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + BindingsBuilder emptyBuilder = state.buildBindings(1); Value vEmpty; vEmpty.mkAttrs(emptyBuilder.finish()); @@ -609,7 +609,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) Value vTwo; vTwo.mkInt(2); - BindingsBuilder builder(state, state.allocBindings(10)); + BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); @@ -635,8 +635,6 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) TEST_F(ValuePrintingTests, ansiColorsListElided) { - BindingsBuilder emptyBuilder(state, state.allocBindings(1)); - Value vOne; vOne.mkInt(1); diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 3a06441e9..eb44b0dd9 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -16,19 +16,19 @@ Bindings * EvalState::allocBindings(size_t capacity) throw Error("attribute set of size %d is too big", capacity); nrAttrsets++; nrAttrsInAttrsets += capacity; - return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity); + return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings(); } Value & BindingsBuilder::alloc(Symbol name, PosIdx pos) { - auto value = state.allocValue(); + auto value = state.get().allocValue(); bindings->push_back(Attr(name, value, pos)); return *value; } Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos) { - return alloc(state.symbols.create(name), pos); + return alloc(state.get().symbols.create(name), pos); } void Bindings::sort() diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index fd2108537..9d740c717 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -205,7 +205,7 @@ EvalState::EvalState( , settings{settings} , symbols(StaticEvalSymbols::staticSymbolTable()) , repair(NoRepair) - , emptyBindings(0) + , emptyBindings(Bindings()) , storeFS(makeMountedSourceAccessor({ {CanonPath::root, makeEmptySourceAccessor()}, /* In the pure eval case, we can simply require @@ -1218,7 +1218,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) *vOverrides, [&]() { return vOverrides->determinePos(noPos); }, "while evaluating the `__overrides` attribute"); - bindings.grow(state.allocBindings(bindings.capacity() + vOverrides->attrs()->size())); + bindings.grow(state.buildBindings(bindings.capacity() + vOverrides->attrs()->size())); for (auto & i : *vOverrides->attrs()) { AttrDefs::iterator j = attrs.find(i.name); if (j != attrs.end()) { diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 85bba1099..b5e927a7e 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -5,6 +5,7 @@ #include "nix/expr/symbol-table.hh" #include +#include namespace nix { @@ -54,16 +55,14 @@ public: PosIdx pos; private: - size_t size_, capacity_; + size_t size_ = 0; Attr attrs[0]; - Bindings(size_t capacity) - : size_(0) - , capacity_(capacity) - { - } - - Bindings(const Bindings & bindings) = delete; + Bindings() = default; + Bindings(const Bindings &) = delete; + Bindings(Bindings &&) = delete; + Bindings & operator=(const Bindings &) = delete; + Bindings & operator=(Bindings &&) = delete; public: size_t size() const @@ -82,7 +81,6 @@ public: void push_back(const Attr & attr) { - assert(size_ < capacity_); attrs[size_++] = attr; } @@ -136,11 +134,6 @@ public: void sort(); - size_t capacity() const - { - return capacity_; - } - /** * Returns the attributes in lexicographically sorted order. */ @@ -165,22 +158,29 @@ public: * order at the end. The only way to consume a BindingsBuilder is to * call finish(), which sorts the bindings. */ -class BindingsBuilder +class BindingsBuilder final { - Bindings * bindings; - public: // needed by std::back_inserter using value_type = Attr; + using size_type = Bindings::size_t; - EvalState & state; +private: + Bindings * bindings; + Bindings::size_t capacity_; - BindingsBuilder(EvalState & state, Bindings * bindings) + friend class EvalState; + + BindingsBuilder(EvalState & state, Bindings * bindings, size_type capacity) : bindings(bindings) + , capacity_(capacity) , state(state) { } +public: + std::reference_wrapper state; + void insert(Symbol name, Value * value, PosIdx pos = noPos) { insert(Attr(name, value, pos)); @@ -193,6 +193,7 @@ public: void push_back(const Attr & attr) { + assert(bindings->size() < capacity_); bindings->push_back(attr); } @@ -211,16 +212,16 @@ public: return bindings; } - size_t capacity() + size_t capacity() const noexcept { - return bindings->capacity(); + return capacity_; } - void grow(Bindings * newBindings) + void grow(BindingsBuilder newBindings) { for (auto & i : *bindings) - newBindings->push_back(i); - bindings = newBindings; + newBindings.push_back(i); + std::swap(*this, newBindings); } friend struct ExprAttrs; diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 04729b100..5015a009b 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -879,7 +879,7 @@ public: BindingsBuilder buildBindings(size_t capacity) { - return BindingsBuilder(*this, allocBindings(capacity)); + return BindingsBuilder(*this, allocBindings(capacity), capacity); } ListBuilder buildList(size_t size) From bbdabe497391399bb1e78e18be25ba3c479b1f0f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 6 Sep 2025 16:36:16 +0300 Subject: [PATCH 335/382] libexpr: Remove decl for undefined overload of Value::mkPath --- src/libexpr/include/nix/expr/value.hh | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 9d0cf1e54..55ab797c7 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -972,7 +972,6 @@ public: void mkStringMove(const char * s, const NixStringContext & context); void mkPath(const SourcePath & path); - void mkPath(std::string_view path); inline void mkPath(SourceAccessor * accessor, const char * path) noexcept { From a44dcbff13b7c70aaefd9e99517b30c0546f36d9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 6 Sep 2025 16:32:19 +0200 Subject: [PATCH 336/382] Remove unused function setChildSignalMask() --- .../unix/include/nix/util/signals-impl.hh | 9 --------- src/libutil/unix/signals.cc | 20 ------------------- 2 files changed, 29 deletions(-) diff --git a/src/libutil/unix/include/nix/util/signals-impl.hh b/src/libutil/unix/include/nix/util/signals-impl.hh index 1bcc90cdf..2456119be 100644 --- a/src/libutil/unix/include/nix/util/signals-impl.hh +++ b/src/libutil/unix/include/nix/util/signals-impl.hh @@ -42,13 +42,6 @@ extern thread_local std::function interruptCheck; void _interrupted(); -/** - * Sets the signal mask. Like saveSignalMask() but for a signal set that doesn't - * necessarily match the current thread's mask. - * See saveSignalMask() to set the saved mask to the current mask. - */ -void setChildSignalMask(sigset_t * sigs); - /** * Start a thread that handles various signals. Also block those signals * on the current thread (and thus any threads created by it). @@ -60,8 +53,6 @@ void startSignalHandlerThread(); /** * Saves the signal mask, which is the signal mask that nix will restore * before creating child processes. - * See setChildSignalMask() to set an arbitrary signal mask instead of the - * current mask. */ void saveSignalMask(); diff --git a/src/libutil/unix/signals.cc b/src/libutil/unix/signals.cc index 8a94cc2b1..d6efd6aa7 100644 --- a/src/libutil/unix/signals.cc +++ b/src/libutil/unix/signals.cc @@ -99,26 +99,6 @@ void unix::triggerInterrupt() static sigset_t savedSignalMask; static bool savedSignalMaskIsSet = false; -void unix::setChildSignalMask(sigset_t * sigs) -{ - assert(sigs); // C style function, but think of sigs as a reference - -#if (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE >= 1) || (defined(_XOPEN_SOURCE) && _XOPEN_SOURCE) \ - || (defined(_POSIX_SOURCE) && _POSIX_SOURCE) - sigemptyset(&savedSignalMask); - // There's no "assign" or "copy" function, so we rely on (math) idempotence - // of the or operator: a or a = a. - sigorset(&savedSignalMask, sigs, sigs); -#else - // Without sigorset, our best bet is to assume that sigset_t is a type that - // can be assigned directly, such as is the case for a sigset_t defined as - // an integer type. - savedSignalMask = *sigs; -#endif - - savedSignalMaskIsSet = true; -} - void unix::saveSignalMask() { if (sigprocmask(SIG_BLOCK, nullptr, &savedSignalMask)) From f363d958a7e245a684aef3802449d4f67959761d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 Sep 2025 16:55:04 +0200 Subject: [PATCH 337/382] Fix hang in enterChroot() draining userNamespaceSync Calling `drainFD()` will hang if another process has the write side open, since then the child won't get an EOF. This can happen if we have multiple threads doing a build, since in that case another thread may fork a child process that inherits the write side of the first thread. We could set O_CLOEXEC on the write side (using pipe2()) but it won't help here since we don't always do an exec() in the child, e.g. in the case of builtin builders. (We need a "close-on-fork", not a "close-on-exec".) --- .../unix/build/linux-derivation-builder.cc | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index d474c001e..35730644b 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -362,9 +362,21 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu userNamespaceSync.readSide = -1; - /* Close the write side to prevent runChild() from hanging - reading from this. */ - Finally cleanup([&]() { userNamespaceSync.writeSide = -1; }); + /* Make sure that we write *something* to the child in case of + an exception. Note that merely closing + `userNamespaceSync.writeSide` doesn't work in + multi-threaded Nix, since several child processes may have + inherited `writeSide` (and O_CLOEXEC doesn't help because + the children may not do an execve). */ + bool userNamespaceSyncDone = false; + Finally cleanup([&]() { + try { + if (!userNamespaceSyncDone) + writeFull(userNamespaceSync.writeSide.get(), "0\n"); + } catch (...) { + } + userNamespaceSync.writeSide = -1; + }); auto ss = tokenizeString>(readLine(sendPid.readSide.get())); assert(ss.size() == 1); @@ -419,14 +431,15 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu writeFile(*cgroup + "/cgroup.procs", fmt("%d", (pid_t) pid)); /* Signal the builder that we've updated its user namespace. */ - writeFull(userNamespaceSync.writeSide.get(), "1"); + writeFull(userNamespaceSync.writeSide.get(), "1\n"); + userNamespaceSyncDone = true; } void enterChroot() override { userNamespaceSync.writeSide = -1; - if (drainFD(userNamespaceSync.readSide.get()) != "1") + if (readLine(userNamespaceSync.readSide.get()) != "1") throw Error("user namespace initialisation failed"); userNamespaceSync.readSide = -1; From a73cf447ac4611023f78f01e6a81ac94dc513f0a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 Sep 2025 11:54:59 +0200 Subject: [PATCH 338/382] Reduce false sharing between pathInfoCache and Store `perf c2c` shows a lot of cacheline conflicts between purely read-only Store methods (like `parseStorePath()`) and the Sync classes. So allocate pathInfoCache separately to avoid that. --- src/libstore/binary-cache-store.cc | 7 +-- src/libstore/include/nix/store/store-api.hh | 11 ++-- src/libstore/local-store.cc | 13 ++--- src/libstore/remote-store.cc | 5 +- src/libstore/store-api.cc | 59 ++++++++------------- src/libutil/include/nix/util/ref.hh | 3 ++ src/libutil/include/nix/util/sync.hh | 2 + 7 files changed, 37 insertions(+), 63 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 0a44b0cf0..f4e06305a 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -125,11 +125,8 @@ void BinaryCacheStore::writeNarInfo(ref narInfo) upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo"); - { - auto state_(state.lock()); - state_->pathInfoCache.upsert( - std::string(narInfo->path.to_string()), PathInfoCacheValue{.value = std::shared_ptr(narInfo)}); - } + pathInfoCache->lock()->upsert( + std::string(narInfo->path.to_string()), PathInfoCacheValue{.value = std::shared_ptr(narInfo)}); if (diskCache) diskCache->upsertNarInfo( diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 7d019ea21..dad5c9e8d 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -310,14 +310,11 @@ protected: } }; - struct State - { - LRUCache pathInfoCache; - }; - void invalidatePathInfoCacheFor(const StorePath & path); - SharedSync state; + // Note: this is a `ref` to avoid false sharing with immutable + // bits of `Store`. + ref>> pathInfoCache; std::shared_ptr diskCache; @@ -860,7 +857,7 @@ public: */ void clearPathInfoCache() { - state.lock()->pathInfoCache.clear(); + pathInfoCache->lock()->clear(); } /** diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7872d4f93..1d3dd48b0 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -716,12 +716,8 @@ uint64_t LocalStore::addValidPath(State & state, const ValidPathInfo & info, boo } } - { - auto state_(Store::state.lock()); - state_->pathInfoCache.upsert( - std::string(info.path.to_string()), - PathInfoCacheValue{.value = std::make_shared(info)}); - } + pathInfoCache->lock()->upsert( + std::string(info.path.to_string()), PathInfoCacheValue{.value = std::make_shared(info)}); return id; } @@ -1023,10 +1019,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path) /* Note that the foreign key constraints on the Refs table take care of deleting the references entries for `path'. */ - { - auto state_(Store::state.lock()); - state_->pathInfoCache.erase(std::string(path.to_string())); - } + pathInfoCache->lock()->erase(std::string(path.to_string())); } const PublicKeys & LocalStore::getPublicKeys() diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 5694fa466..8c0a815d8 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -764,10 +764,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) results.bytesFreed = readLongLong(conn->from); readLongLong(conn->from); // obsolete - { - auto state_(Store::state.lock()); - state_->pathInfoCache.clear(); - } + pathInfoCache->lock()->clear(); } void RemoteStore::optimiseStore() diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index d96be5965..acc6da912 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -306,7 +306,7 @@ StringSet Store::Config::getDefaultSystemFeatures() Store::Store(const Store::Config & config) : StoreDirConfig{config} , config{config} - , state({(size_t) config.pathInfoCacheSize}) + , pathInfoCache(make_ref((size_t) config.pathInfoCacheSize)) { assertLibStoreInitialized(); } @@ -326,7 +326,7 @@ bool Store::PathInfoCacheValue::isKnownNow() void Store::invalidatePathInfoCacheFor(const StorePath & path) { - state.lock()->pathInfoCache.erase(path.to_string()); + pathInfoCache->lock()->erase(path.to_string()); } std::map> Store::queryStaticPartialDerivationOutputMap(const StorePath & path) @@ -448,13 +448,10 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta bool Store::isValidPath(const StorePath & storePath) { - { - auto state_(state.lock()); - auto res = state_->pathInfoCache.get(storePath.to_string()); - if (res && res->isKnownNow()) { - stats.narInfoReadAverted++; - return res->didExist(); - } + auto res = pathInfoCache->lock()->get(storePath.to_string()); + if (res && res->isKnownNow()) { + stats.narInfoReadAverted++; + return res->didExist(); } if (diskCache) { @@ -462,8 +459,7 @@ bool Store::isValidPath(const StorePath & storePath) config.getReference().render(/*FIXME withParams=*/false), std::string(storePath.hashPart())); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; - auto state_(state.lock()); - state_->pathInfoCache.upsert( + pathInfoCache->lock()->upsert( storePath.to_string(), res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{.value = res.second}); @@ -518,30 +514,25 @@ std::optional> Store::queryPathInfoFromClie { auto hashPart = std::string(storePath.hashPart()); - { - auto res = state.lock()->pathInfoCache.get(storePath.to_string()); - if (res && res->isKnownNow()) { - stats.narInfoReadAverted++; - if (res->didExist()) - return std::make_optional(res->value); - else - return std::make_optional(nullptr); - } + auto res = pathInfoCache->lock()->get(storePath.to_string()); + if (res && res->isKnownNow()) { + stats.narInfoReadAverted++; + if (res->didExist()) + return std::make_optional(res->value); + else + return std::make_optional(nullptr); } if (diskCache) { auto res = diskCache->lookupNarInfo(config.getReference().render(/*FIXME withParams=*/false), hashPart); if (res.first != NarInfoDiskCache::oUnknown) { stats.narInfoReadAverted++; - { - auto state_(state.lock()); - state_->pathInfoCache.upsert( - storePath.to_string(), - res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} - : PathInfoCacheValue{.value = res.second}); - if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) - return std::make_optional(nullptr); - } + pathInfoCache->lock()->upsert( + storePath.to_string(), + res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} + : PathInfoCacheValue{.value = res.second}); + if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) + return std::make_optional(nullptr); assert(res.second); return std::make_optional(res.second); } @@ -577,10 +568,7 @@ void Store::queryPathInfo(const StorePath & storePath, CallbackupsertNarInfo(config.getReference().render(/*FIXME withParams=*/false), hashPart, info); - { - auto state_(state.lock()); - state_->pathInfoCache.upsert(storePath.to_string(), PathInfoCacheValue{.value = info}); - } + pathInfoCache->lock()->upsert(storePath.to_string(), PathInfoCacheValue{.value = info}); if (!info || !goodStorePath(storePath, info->path)) { stats.narInfoMissing++; @@ -803,10 +791,7 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor const Store::Stats & Store::getStats() { - { - auto state_(state.readLock()); - stats.pathInfoCacheSize = state_->pathInfoCache.size(); - } + stats.pathInfoCacheSize = pathInfoCache->readLock()->size(); return stats; } diff --git a/src/libutil/include/nix/util/ref.hh b/src/libutil/include/nix/util/ref.hh index fb27949c0..7cf5ef25e 100644 --- a/src/libutil/include/nix/util/ref.hh +++ b/src/libutil/include/nix/util/ref.hh @@ -18,6 +18,9 @@ private: std::shared_ptr p; public: + + using element_type = T; + explicit ref(const std::shared_ptr & p) : p(p) { diff --git a/src/libutil/include/nix/util/sync.hh b/src/libutil/include/nix/util/sync.hh index 262fc328b..3a41d1bd8 100644 --- a/src/libutil/include/nix/util/sync.hh +++ b/src/libutil/include/nix/util/sync.hh @@ -36,6 +36,8 @@ private: public: + using element_type = T; + SyncBase() {} SyncBase(const T & data) From e791ede495a47762a5b6150a056ec40a2f7c380f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 Sep 2025 12:08:25 +0200 Subject: [PATCH 339/382] LocalStore::State: Put behind a ref to reduce false sharing --- src/libstore/gc.cc | 6 +-- src/libstore/include/nix/store/local-store.hh | 2 +- src/libstore/local-store.cc | 54 ++++++++----------- 3 files changed, 25 insertions(+), 37 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 0366fe0b0..385215fe0 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -931,7 +931,7 @@ void LocalStore::autoGC(bool sync) std::shared_future future; { - auto state(_state.lock()); + auto state(_state->lock()); if (state->gcRunning) { future = state->gcFuture; @@ -964,7 +964,7 @@ void LocalStore::autoGC(bool sync) /* Wake up any threads waiting for the auto-GC to finish. */ Finally wakeup([&]() { - auto state(_state.lock()); + auto state(_state->lock()); state->gcRunning = false; state->lastGCCheck = std::chrono::steady_clock::now(); promise.set_value(); @@ -979,7 +979,7 @@ void LocalStore::autoGC(bool sync) collectGarbage(options, results); - _state.lock()->availAfterGC = getAvail(); + _state->lock()->availAfterGC = getAvail(); } catch (...) { // FIXME: we could propagate the exception to the diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index f7dfcb5ad..444d1b28f 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -174,7 +174,7 @@ private: std::unique_ptr publicKeys; }; - Sync _state; + ref> _state; public: diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 112d5b14c..058814f33 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -118,6 +118,7 @@ LocalStore::LocalStore(ref config) : Store{*config} , LocalFSStore{*config} , config{config} + , _state(make_ref>()) , dbDir(config->stateDir + "/db") , linksDir(config->realStoreDir + "/.links") , reservedPath(dbDir + "/reserved") @@ -125,7 +126,7 @@ LocalStore::LocalStore(ref config) , tempRootsDir(config->stateDir + "/temproots") , fnTempRoots(fmt("%s/%d", tempRootsDir, getpid())) { - auto state(_state.lock()); + auto state(_state->lock()); state->stmts = std::make_unique(); /* Create missing state directories if they don't already exist. */ @@ -433,7 +434,7 @@ LocalStore::~LocalStore() std::shared_future future; { - auto state(_state.lock()); + auto state(_state->lock()); if (state->gcRunning) future = state->gcFuture; } @@ -629,7 +630,7 @@ void LocalStore::registerDrvOutput(const Realisation & info) { experimentalFeatureSettings.require(Xp::CaDerivations); retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); if (auto oldR = queryRealisation_(*state, info.id)) { if (info.isCompatibleWith(*oldR)) { auto combinedSignatures = oldR->signatures; @@ -736,8 +737,7 @@ void LocalStore::queryPathInfoUncached( { try { callback(retrySQLite>([&]() { - auto state(_state.lock()); - return queryPathInfoInternal(*state, path); + return queryPathInfoInternal(*_state->lock(), path); })); } catch (...) { @@ -819,10 +819,7 @@ bool LocalStore::isValidPath_(State & state, const StorePath & path) bool LocalStore::isValidPathUncached(const StorePath & path) { - return retrySQLite([&]() { - auto state(_state.lock()); - return isValidPath_(*state, path); - }); + return retrySQLite([&]() { return isValidPath_(*_state->lock(), path); }); } StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) @@ -837,7 +834,7 @@ StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteF StorePathSet LocalStore::queryAllValidPaths() { return retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); auto use(state->stmts->QueryValidPaths.use()); StorePathSet res; while (use.next()) @@ -856,16 +853,13 @@ void LocalStore::queryReferrers(State & state, const StorePath & path, StorePath void LocalStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { - return retrySQLite([&]() { - auto state(_state.lock()); - queryReferrers(*state, path, referrers); - }); + return retrySQLite([&]() { queryReferrers(*_state->lock(), path, referrers); }); } StorePathSet LocalStore::queryValidDerivers(const StorePath & path) { return retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); auto useQueryValidDerivers(state->stmts->QueryValidDerivers.use()(printStorePath(path))); @@ -881,7 +875,7 @@ std::map> LocalStore::queryStaticPartialDerivationOutputMap(const StorePath & path) { return retrySQLite>>([&]() { - auto state(_state.lock()); + auto state(_state->lock()); std::map> outputs; uint64_t drvId; drvId = queryValidPathId(*state, path); @@ -901,7 +895,7 @@ std::optional LocalStore::queryPathFromHashPart(const std::string & h Path prefix = storeDir + "/" + hashPart; return retrySQLite>([&]() -> std::optional { - auto state(_state.lock()); + auto state(_state->lock()); auto useQueryPathFromHashPart(state->stmts->QueryPathFromHashPart.use()(prefix)); @@ -966,7 +960,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) #endif return retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); SQLiteTxn txn(state->db); StorePathSet paths; @@ -1036,7 +1030,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path) const PublicKeys & LocalStore::getPublicKeys() { - auto state(_state.lock()); + auto state(_state->lock()); if (!state->publicKeys) state->publicKeys = std::make_unique(getDefaultPublicKeys()); return *state->publicKeys; @@ -1359,7 +1353,7 @@ std::pair LocalStore::createTempDirInStore() void LocalStore::invalidatePathChecked(const StorePath & path) { retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); SQLiteTxn txn(state->db); @@ -1459,10 +1453,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) update = true; } - if (update) { - auto state(_state.lock()); - updatePathInfo(*state, *info); - } + if (update) + updatePathInfo(*_state->lock(), *info); } } catch (Error & e) { @@ -1549,8 +1541,7 @@ void LocalStore::verifyPath( if (canInvalidate) { printInfo("path '%s' disappeared, removing from database...", pathS); - auto state(_state.lock()); - invalidatePath(*state, path); + invalidatePath(*_state->lock(), path); } else { printError("path '%s' disappeared, but it still has valid referrers!", pathS); if (repair) @@ -1582,14 +1573,13 @@ std::optional LocalStore::isTrustedClient() void LocalStore::vacuumDB() { - auto state(_state.lock()); - state->db.exec("vacuum"); + _state->lock()->db.exec("vacuum"); } void LocalStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { retrySQLite([&]() { - auto state(_state.lock()); + auto state(_state->lock()); SQLiteTxn txn(state->db); @@ -1651,10 +1641,8 @@ void LocalStore::queryRealisationUncached( const DrvOutput & id, Callback> callback) noexcept { try { - auto maybeRealisation = retrySQLite>([&]() { - auto state(_state.lock()); - return queryRealisation_(*state, id); - }); + auto maybeRealisation = + retrySQLite>([&]() { return queryRealisation_(*_state->lock(), id); }); if (maybeRealisation) callback(std::make_shared(maybeRealisation.value())); else From 14c001d6133b5dadc16b086ebaf940eb9f6ffe32 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 26 Aug 2025 21:33:12 +0200 Subject: [PATCH 340/382] Add a test for `nix flake check` building checks --- tests/functional/flakes/check.sh | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 27e73444a..50a2b21c9 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -135,3 +135,35 @@ EOF checkRes=$(nix flake check --all-systems $flakeDir 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "formatter.system-1" + +# Test whether `nix flake check` builds checks. +cat > $flakeDir/flake.nix < $flakeDir/flake.nix < Date: Sun, 7 Sep 2025 15:22:20 +0200 Subject: [PATCH 341/382] Generalize recognized git url schemas (#13925) Use `parseUrlScheme` instead of manually parsing `url.scheme`. --- src/libfetchers/git.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index a7acc316e..f750d907d 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -163,8 +163,8 @@ struct GitInputScheme : InputScheme { std::optional inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override { - if (url.scheme != "git" && url.scheme != "git+http" && url.scheme != "git+https" && url.scheme != "git+ssh" - && url.scheme != "git+file") + auto parsedScheme = parseUrlScheme(url.scheme); + if (parsedScheme.application != "git") return {}; auto url2(url); From 258d41bfb6da190c88614b33faa0261e8d585b9a Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Sun, 7 Sep 2025 16:21:22 +0200 Subject: [PATCH 342/382] Test that `dir` is propagated from registry entry --- tests/functional/flakes/flakes.sh | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index 7fd9dc9b5..8fb7ce8e2 100755 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -470,3 +470,20 @@ cat > "$flake3Dir/flake.nix" < "$subdirFlakeDir"/flake.nix < Date: Sun, 7 Sep 2025 15:27:14 +0200 Subject: [PATCH 343/382] Fix flake registry ignoring `dir` parameter This broke in e3042f10afb5f4e64ef9a5e08bef52b168cb4bf1. --- src/libfetchers/include/nix/fetchers/input-cache.hh | 1 + src/libfetchers/input-cache.cc | 6 ++++-- src/libflake/flake.cc | 5 +++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/include/nix/fetchers/input-cache.hh b/src/libfetchers/include/nix/fetchers/input-cache.hh index b2fc84245..ee2fa20c4 100644 --- a/src/libfetchers/include/nix/fetchers/input-cache.hh +++ b/src/libfetchers/include/nix/fetchers/input-cache.hh @@ -11,6 +11,7 @@ struct InputCache ref accessor; Input resolvedInput; Input lockedInput; + Attrs extraAttrs; }; CachedResult getAccessor(ref store, const Input & originalInput, UseRegistries useRegistries); diff --git a/src/libfetchers/input-cache.cc b/src/libfetchers/input-cache.cc index 1422c1d9a..c415b5417 100644 --- a/src/libfetchers/input-cache.cc +++ b/src/libfetchers/input-cache.cc @@ -8,6 +8,7 @@ namespace nix::fetchers { InputCache::CachedResult InputCache::getAccessor(ref store, const Input & originalInput, UseRegistries useRegistries) { + Attrs extraAttrs; auto fetched = lookup(originalInput); Input resolvedInput = originalInput; @@ -17,7 +18,8 @@ InputCache::getAccessor(ref store, const Input & originalInput, UseRegist fetched.emplace(CachedInput{.lockedInput = lockedInput, .accessor = accessor}); } else { if (useRegistries != UseRegistries::No) { - auto [res, extraAttrs] = lookupInRegistries(store, originalInput, useRegistries); + auto [res, extraAttrs_] = lookupInRegistries(store, originalInput, useRegistries); + extraAttrs = extraAttrs_; resolvedInput = std::move(res); fetched = lookup(resolvedInput); if (!fetched) { @@ -36,7 +38,7 @@ InputCache::getAccessor(ref store, const Input & originalInput, UseRegist debug("got tree '%s' from '%s'", fetched->accessor, fetched->lockedInput.to_string()); - return {fetched->accessor, resolvedInput, fetched->lockedInput}; + return {fetched->accessor, resolvedInput, fetched->lockedInput, extraAttrs}; } struct InputCacheImpl : InputCache diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index 56e455cb6..3acf589a5 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -341,8 +341,9 @@ static Flake getFlake( // Fetch a lazy tree first. auto cachedInput = state.inputCache->getAccessor(state.store, originalRef.input, useRegistries); - auto resolvedRef = FlakeRef(std::move(cachedInput.resolvedInput), originalRef.subdir); - auto lockedRef = FlakeRef(std::move(cachedInput.lockedInput), originalRef.subdir); + auto subdir = fetchers::maybeGetStrAttr(cachedInput.extraAttrs, "dir").value_or(originalRef.subdir); + auto resolvedRef = FlakeRef(std::move(cachedInput.resolvedInput), subdir); + auto lockedRef = FlakeRef(std::move(cachedInput.lockedInput), subdir); // Parse/eval flake.nix to get at the input.self attributes. auto flake = readFlake(state, originalRef, resolvedRef, lockedRef, {cachedInput.accessor}, lockRootAttrPath); From 9c832a08b07f8b87a689e877357ff4a4875cab5f Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Sun, 7 Sep 2025 17:06:29 +0200 Subject: [PATCH 344/382] fixup: cached case I couldn't come up with a test that failed before this, but my existing test still passes so :shrug: --- src/libfetchers/include/nix/fetchers/input-cache.hh | 1 + src/libfetchers/input-cache.cc | 9 ++++----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libfetchers/include/nix/fetchers/input-cache.hh b/src/libfetchers/include/nix/fetchers/input-cache.hh index ee2fa20c4..402412071 100644 --- a/src/libfetchers/include/nix/fetchers/input-cache.hh +++ b/src/libfetchers/include/nix/fetchers/input-cache.hh @@ -20,6 +20,7 @@ struct InputCache { Input lockedInput; ref accessor; + Attrs extraAttrs; }; virtual std::optional lookup(const Input & originalInput) const = 0; diff --git a/src/libfetchers/input-cache.cc b/src/libfetchers/input-cache.cc index c415b5417..c44f1a236 100644 --- a/src/libfetchers/input-cache.cc +++ b/src/libfetchers/input-cache.cc @@ -8,7 +8,6 @@ namespace nix::fetchers { InputCache::CachedResult InputCache::getAccessor(ref store, const Input & originalInput, UseRegistries useRegistries) { - Attrs extraAttrs; auto fetched = lookup(originalInput); Input resolvedInput = originalInput; @@ -18,13 +17,13 @@ InputCache::getAccessor(ref store, const Input & originalInput, UseRegist fetched.emplace(CachedInput{.lockedInput = lockedInput, .accessor = accessor}); } else { if (useRegistries != UseRegistries::No) { - auto [res, extraAttrs_] = lookupInRegistries(store, originalInput, useRegistries); - extraAttrs = extraAttrs_; + auto [res, extraAttrs] = lookupInRegistries(store, originalInput, useRegistries); resolvedInput = std::move(res); fetched = lookup(resolvedInput); if (!fetched) { auto [accessor, lockedInput] = resolvedInput.getAccessor(store); - fetched.emplace(CachedInput{.lockedInput = lockedInput, .accessor = accessor}); + fetched.emplace( + CachedInput{.lockedInput = lockedInput, .accessor = accessor, .extraAttrs = extraAttrs}); } upsert(resolvedInput, *fetched); } else { @@ -38,7 +37,7 @@ InputCache::getAccessor(ref store, const Input & originalInput, UseRegist debug("got tree '%s' from '%s'", fetched->accessor, fetched->lockedInput.to_string()); - return {fetched->accessor, resolvedInput, fetched->lockedInput, extraAttrs}; + return {fetched->accessor, resolvedInput, fetched->lockedInput, fetched->extraAttrs}; } struct InputCacheImpl : InputCache From 9302ec5e0e27984676a7598b1dc08d122d3e15db Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Sep 2025 05:57:02 +0200 Subject: [PATCH 345/382] Add comment --- src/libstore/include/nix/store/local-store.hh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index 444d1b28f..1184be8ed 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -174,6 +174,10 @@ private: std::unique_ptr publicKeys; }; + /** + * Mutable state. It's behind a `ref` to reduce false sharing + * between immutable and mutable fields. + */ ref> _state; public: From ed6ef7cdf4ffc82f20b9cca37015f8c8f64dff61 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 8 Sep 2025 08:08:51 +0200 Subject: [PATCH 346/382] Test that using --inputs-from with a flakeref that has a dir works Will not pass until the next commit. --- tests/functional/flakes/flakes.sh | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index 8fb7ce8e2..7b5be112e 100755 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -472,12 +472,12 @@ EOF [[ "$(nix flake metadata --json "$flake3Dir" | jq -r .locks.nodes.flake1.locked.rev)" = $prevFlake1Rev ]] baseDir=$TEST_ROOT/$RANDOM -subdirFlakeDir=$baseDir/foo -mkdir -p "$subdirFlakeDir" +subdirFlakeDir1=$baseDir/foo1 +mkdir -p "$subdirFlakeDir1" writeSimpleFlake "$baseDir" -cat > "$subdirFlakeDir"/flake.nix < "$subdirFlakeDir1"/flake.nix < "$subdirFlakeDir"/flake.nix < "$subdirFlakeDir2"/flake.nix < Date: Mon, 8 Sep 2025 08:24:26 +0200 Subject: [PATCH 347/382] Pass `dir` in extraAttrs when overriding the registry This is handled similarly in the handler for `--override-flake` in `MixEvalArgs`. --- src/libcmd/installables.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 0e6a204a7..96ff06ad3 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -178,10 +178,16 @@ MixFlakeOptions::MixFlakeOptions() for (auto & [inputName, input] : flake.lockFile.root->inputs) { auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes if (auto input3 = std::dynamic_pointer_cast(input2)) { + fetchers::Attrs extraAttrs; + + if (!input3->lockedRef.subdir.empty()) { + extraAttrs["dir"] = input3->lockedRef.subdir; + } + overrideRegistry( fetchers::Input::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", inputName}}), input3->lockedRef.input, - {}); + extraAttrs); } } }}, From 7cc654afa996db8eb5e67df8972084d3f5e7bf87 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 9 Sep 2025 00:18:41 +0300 Subject: [PATCH 348/382] libstore: Reallow unbracketed IPv6 addresses in store references This implements a special back-compat shim to specifically allow unbracketed IPv6 addresses in store references. This is something that is relied upon in the wild and the old parsing logic accepted both ways (brackets were optional). This patch restores this behavior. As always, we didn't have any tests for this. Addresses #13937. --- .../ssh_unbracketed_ipv6_1.txt | 1 + .../ssh_unbracketed_ipv6_2.txt | 1 + .../ssh_unbracketed_ipv6_3.txt | 1 + src/libstore-tests/store-reference.cc | 35 ++++++++++++ src/libstore/meson.build | 1 + src/libstore/store-reference.cc | 54 ++++++++++++++++++- 6 files changed, 91 insertions(+), 2 deletions(-) create mode 100644 src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_1.txt create mode 100644 src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_2.txt create mode 100644 src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_3.txt diff --git a/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_1.txt b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_1.txt new file mode 100644 index 000000000..861b5bb35 --- /dev/null +++ b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_1.txt @@ -0,0 +1 @@ +ssh://::1 \ No newline at end of file diff --git a/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_2.txt b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_2.txt new file mode 100644 index 000000000..952d5a55d --- /dev/null +++ b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_2.txt @@ -0,0 +1 @@ +ssh://userinfo@fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e \ No newline at end of file diff --git a/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_3.txt b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_3.txt new file mode 100644 index 000000000..d1f17adac --- /dev/null +++ b/src/libstore-tests/data/store-reference/ssh_unbracketed_ipv6_3.txt @@ -0,0 +1 @@ +ssh://userinfo@fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e?a=b&c=d \ No newline at end of file diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index d9f040ab6..7b42b45a2 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -148,4 +148,39 @@ URI_TEST( .params = {}, })) +static StoreReference sshLoopbackIPv6{ + .variant = + StoreReference::Specified{ + .scheme = "ssh", + .authority = "[::1]", + }, +}; + +URI_TEST_READ(ssh_unbracketed_ipv6_1, sshLoopbackIPv6) + +static StoreReference sshIPv6AuthorityWithUserinfo{ + .variant = + StoreReference::Specified{ + .scheme = "ssh", + .authority = "userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e]", + }, +}; + +URI_TEST_READ(ssh_unbracketed_ipv6_2, sshIPv6AuthorityWithUserinfo) + +static StoreReference sshIPv6AuthorityWithUserinfoAndParams{ + .variant = + StoreReference::Specified{ + .scheme = "ssh", + .authority = "userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e]", + }, + .params = + { + {"a", "b"}, + {"c", "d"}, + }, +}; + +URI_TEST_READ(ssh_unbracketed_ipv6_3, sshIPv6AuthorityWithUserinfoAndParams) + } // namespace nix diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 253152772..7aeacbab7 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -105,6 +105,7 @@ boost = dependency( 'container', # Shouldn't list, because can header-only, and Meson currently looks for libs #'regex', + 'url', ], include_type : 'system', ) diff --git a/src/libstore/store-reference.cc b/src/libstore/store-reference.cc index 2c54e497e..96ee829d0 100644 --- a/src/libstore/store-reference.cc +++ b/src/libstore/store-reference.cc @@ -1,11 +1,12 @@ -#include - #include "nix/util/error.hh" +#include "nix/util/split.hh" #include "nix/util/url.hh" #include "nix/store/store-reference.hh" #include "nix/util/file-system.hh" #include "nix/util/util.hh" +#include + namespace nix { static bool isNonUriPath(const std::string & spec) @@ -43,6 +44,29 @@ std::string StoreReference::render(bool withParams) const return res; } +namespace { + +struct SchemeAndAuthorityWithPath +{ + std::string_view scheme; + std::string_view authority; +}; + +} // namespace + +/** + * Return the 'scheme' and remove the '://' or ':' separator. + */ +static std::optional splitSchemePrefixTo(std::string_view string) +{ + auto scheme = splitPrefixTo(string, ':'); + if (!scheme) + return std::nullopt; + + splitPrefix(string, "//"); + return SchemeAndAuthorityWithPath{.scheme = *scheme, .authority = string}; +} + StoreReference StoreReference::parse(const std::string & uri, const StoreReference::Params & extraParams) { auto params = extraParams; @@ -90,6 +114,32 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen }, .params = std::move(params), }; + } else if (auto schemeAndAuthority = splitSchemePrefixTo(baseURI)) { + /* Back-compatibility shim to accept unbracketed IPv6 addresses after the scheme. + * Old versions of nix allowed that. Note that this is ambiguous and does not allow + * specifying the port number. For that the address must be bracketed, otherwise it's + * greedily assumed to be the part of the host address. */ + auto authorityString = schemeAndAuthority->authority; + auto userinfo = splitPrefixTo(authorityString, '@'); + auto maybeIpv6 = boost::urls::parse_ipv6_address(authorityString); + if (maybeIpv6) { + std::string fixedAuthority; + if (userinfo) { + fixedAuthority += *userinfo; + fixedAuthority += '@'; + } + fixedAuthority += '['; + fixedAuthority += authorityString; + fixedAuthority += ']'; + return { + .variant = + Specified{ + .scheme = std::string(schemeAndAuthority->scheme), + .authority = fixedAuthority, + }, + .params = std::move(params), + }; + } } } From 7128abd217a4c6166e21b0622d04ebdf14afc751 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 22:00:58 +0000 Subject: [PATCH 349/382] build(deps): bump actions/labeler from 5 to 6 Bumps [actions/labeler](https://github.com/actions/labeler) from 5 to 6. - [Release notes](https://github.com/actions/labeler/releases) - [Commits](https://github.com/actions/labeler/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/labeler dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 23a5d9e51..16038cb21 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-24.04 if: github.repository_owner == 'NixOS' steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@v6 with: repo-token: ${{ secrets.GITHUB_TOKEN }} sync-labels: false From 137a55122c8d2044ad6d5cc701865e65eec6b3b0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 13:33:32 +0200 Subject: [PATCH 350/382] Remove support for daemon protocol version < 18 Version 18 was introduced in November 2016 (4b8f1b0ec066a5b994747b1afd050f5f62d857f6). --- src/libstore/daemon.cc | 49 ++++------ src/libstore/remote-store.cc | 173 +++++++++++------------------------ 2 files changed, 74 insertions(+), 148 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 4f28a1e0d..87bfe5187 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -572,21 +572,19 @@ static void performOp( case WorkerProto::Op::BuildPaths: { auto drvs = WorkerProto::Serialise::read(*store, rconn); BuildMode mode = bmNormal; - if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 15) { - mode = WorkerProto::Serialise::read(*store, rconn); + mode = WorkerProto::Serialise::read(*store, rconn); - /* Repairing is not atomic, so disallowed for "untrusted" - clients. + /* Repairing is not atomic, so disallowed for "untrusted" + clients. - FIXME: layer violation in this message: the daemon code (i.e. - this file) knows whether a client/connection is trusted, but it - does not how how the client was authenticated. The mechanism - need not be getting the UID of the other end of a Unix Domain - Socket. - */ - if (mode == bmRepair && !trusted) - throw Error("repairing is not allowed because you are not in 'trusted-users'"); - } + FIXME: layer violation in this message: the daemon code (i.e. + this file) knows whether a client/connection is trusted, but it + does not how how the client was authenticated. The mechanism + need not be getting the UID of the other end of a Unix Domain + Socket. + */ + if (mode == bmRepair && !trusted) + throw Error("repairing is not allowed because you are not in 'trusted-users'"); logger->startWork(); store->buildPaths(drvs, mode); logger->stopWork(); @@ -805,13 +803,11 @@ static void performOp( clientSettings.buildCores = readInt(conn.from); clientSettings.useSubstitutes = readInt(conn.from); - if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 12) { - unsigned int n = readInt(conn.from); - for (unsigned int i = 0; i < n; i++) { - auto name = readString(conn.from); - auto value = readString(conn.from); - clientSettings.overrides.emplace(name, value); - } + unsigned int n = readInt(conn.from); + for (unsigned int i = 0; i < n; i++) { + auto name = readString(conn.from); + auto value = readString(conn.from); + clientSettings.overrides.emplace(name, value); } logger->startWork(); @@ -876,19 +872,12 @@ static void performOp( auto path = store->parseStorePath(readString(conn.from)); std::shared_ptr info; logger->startWork(); - try { - info = store->queryPathInfo(path); - } catch (InvalidPath &) { - if (GET_PROTOCOL_MINOR(conn.protoVersion) < 17) - throw; - } + info = store->queryPathInfo(path); logger->stopWork(); if (info) { - if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 17) - conn.to << 1; + conn.to << 1; WorkerProto::write(*store, wconn, static_cast(*info)); } else { - assert(GET_PROTOCOL_MINOR(conn.protoVersion) >= 17); conn.to << 0; } break; @@ -1063,7 +1052,7 @@ void processConnection(ref store, FdSource && from, FdSink && to, Trusted auto [protoVersion, features] = WorkerProto::BasicServerConnection::handshake(to, from, PROTOCOL_VERSION, WorkerProto::allFeatures); - if (protoVersion < 0x10a) + if (protoVersion < 256 + 18) throw Error("the Nix client version is too old"); WorkerProto::BasicServerConnection conn; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 5694fa466..8f11af91f 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -73,6 +73,8 @@ void RemoteStore::initConnection(Connection & conn) try { auto [protoVersion, features] = WorkerProto::BasicClientConnection::handshake(conn.to, tee, PROTOCOL_VERSION, WorkerProto::allFeatures); + if (protoVersion < 256 + 18) + throw Error("the Nix daemon version is too old"); conn.protoVersion = protoVersion; conn.features = features; } catch (SerialisationError & e) { @@ -109,24 +111,22 @@ void RemoteStore::setOptions(Connection & conn) << 0 /* obsolete print build trace */ << settings.buildCores << settings.useSubstitutes; - if (GET_PROTOCOL_MINOR(conn.protoVersion) >= 12) { - std::map overrides; - settings.getSettings(overrides, true); // libstore settings - fileTransferSettings.getSettings(overrides, true); - overrides.erase(settings.keepFailed.name); - overrides.erase(settings.keepGoing.name); - overrides.erase(settings.tryFallback.name); - overrides.erase(settings.maxBuildJobs.name); - overrides.erase(settings.maxSilentTime.name); - overrides.erase(settings.buildCores.name); - overrides.erase(settings.useSubstitutes.name); - overrides.erase(loggerSettings.showTrace.name); - overrides.erase(experimentalFeatureSettings.experimentalFeatures.name); - overrides.erase("plugin-files"); - conn.to << overrides.size(); - for (auto & i : overrides) - conn.to << i.first << i.second.value; - } + std::map overrides; + settings.getSettings(overrides, true); // libstore settings + fileTransferSettings.getSettings(overrides, true); + overrides.erase(settings.keepFailed.name); + overrides.erase(settings.keepGoing.name); + overrides.erase(settings.tryFallback.name); + overrides.erase(settings.maxBuildJobs.name); + overrides.erase(settings.maxSilentTime.name); + overrides.erase(settings.buildCores.name); + overrides.erase(settings.useSubstitutes.name); + overrides.erase(loggerSettings.showTrace.name); + overrides.erase(experimentalFeatureSettings.experimentalFeatures.name); + overrides.erase("plugin-files"); + conn.to << overrides.size(); + for (auto & i : overrides) + conn.to << i.first << i.second.value; auto ex = conn.processStderrReturn(); if (ex) @@ -167,15 +167,7 @@ bool RemoteStore::isValidPathUncached(const StorePath & path) StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(getConnection()); - if (GET_PROTOCOL_MINOR(conn->protoVersion) < 12) { - StorePathSet res; - for (auto & i : paths) - if (isValidPath(i)) - res.insert(i); - return res; - } else { - return conn->queryValidPaths(*this, &conn.daemonException, paths, maybeSubstitute); - } + return conn->queryValidPaths(*this, &conn.daemonException, paths, maybeSubstitute); } StorePathSet RemoteStore::queryAllValidPaths() @@ -189,21 +181,10 @@ StorePathSet RemoteStore::queryAllValidPaths() StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) { auto conn(getConnection()); - if (GET_PROTOCOL_MINOR(conn->protoVersion) < 12) { - StorePathSet res; - for (auto & i : paths) { - conn->to << WorkerProto::Op::HasSubstitutes << printStorePath(i); - conn.processStderr(); - if (readInt(conn->from)) - res.insert(i); - } - return res; - } else { - conn->to << WorkerProto::Op::QuerySubstitutablePaths; - WorkerProto::write(*this, *conn, paths); - conn.processStderr(); - return WorkerProto::Serialise::read(*this, *conn); - } + conn->to << WorkerProto::Op::QuerySubstitutablePaths; + WorkerProto::write(*this, *conn, paths); + conn.processStderr(); + return WorkerProto::Serialise::read(*this, *conn); } void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, SubstitutablePathInfos & infos) @@ -213,45 +194,24 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto conn(getConnection()); - if (GET_PROTOCOL_MINOR(conn->protoVersion) < 12) { - - for (auto & i : pathsMap) { - SubstitutablePathInfo info; - conn->to << WorkerProto::Op::QuerySubstitutablePathInfo << printStorePath(i.first); - conn.processStderr(); - unsigned int reply = readInt(conn->from); - if (reply == 0) - continue; - auto deriver = readString(conn->from); - if (deriver != "") - info.deriver = parseStorePath(deriver); - info.references = WorkerProto::Serialise::read(*this, *conn); - info.downloadSize = readLongLong(conn->from); - info.narSize = readLongLong(conn->from); - infos.insert_or_assign(i.first, std::move(info)); - } - - } else { - - conn->to << WorkerProto::Op::QuerySubstitutablePathInfos; - if (GET_PROTOCOL_MINOR(conn->protoVersion) < 22) { - StorePathSet paths; - for (auto & path : pathsMap) - paths.insert(path.first); - WorkerProto::write(*this, *conn, paths); - } else - WorkerProto::write(*this, *conn, pathsMap); - conn.processStderr(); - size_t count = readNum(conn->from); - for (size_t n = 0; n < count; n++) { - SubstitutablePathInfo & info(infos[parseStorePath(readString(conn->from))]); - auto deriver = readString(conn->from); - if (deriver != "") - info.deriver = parseStorePath(deriver); - info.references = WorkerProto::Serialise::read(*this, *conn); - info.downloadSize = readLongLong(conn->from); - info.narSize = readLongLong(conn->from); - } + conn->to << WorkerProto::Op::QuerySubstitutablePathInfos; + if (GET_PROTOCOL_MINOR(conn->protoVersion) < 22) { + StorePathSet paths; + for (auto & path : pathsMap) + paths.insert(path.first); + WorkerProto::write(*this, *conn, paths); + } else + WorkerProto::write(*this, *conn, pathsMap); + conn.processStderr(); + size_t count = readNum(conn->from); + for (size_t n = 0; n < count; n++) { + SubstitutablePathInfo & info(infos[parseStorePath(readString(conn->from))]); + auto deriver = readString(conn->from); + if (deriver != "") + info.deriver = parseStorePath(deriver); + info.references = WorkerProto::Serialise::read(*this, *conn); + info.downloadSize = readLongLong(conn->from); + info.narSize = readLongLong(conn->from); } } @@ -466,36 +426,20 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, Repair { auto conn(getConnection()); - if (GET_PROTOCOL_MINOR(conn->protoVersion) < 18) { - auto source2 = sinkToSource([&](Sink & sink) { - sink << 1 // == path follows - ; - copyNAR(source, sink); - sink << exportMagic << printStorePath(info.path); - WorkerProto::write(*this, *conn, info.references); - sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature - << 0 // == no path follows - ; - }); - conn->importPaths(*this, &conn.daemonException, *source2); - } + conn->to << WorkerProto::Op::AddToStoreNar << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(HashFormat::Base16, false); + WorkerProto::write(*this, *conn, info.references); + conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) + << repair << !checkSigs; - else { - conn->to << WorkerProto::Op::AddToStoreNar << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); - WorkerProto::write(*this, *conn, info.references); - conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) - << repair << !checkSigs; - - if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 23) { - conn.withFramedSink([&](Sink & sink) { copyNAR(source, sink); }); - } else if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 21) { - conn.processStderr(0, &source); - } else { - copyNAR(source, conn->to); - conn.processStderr(0, nullptr); - } + if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 23) { + conn.withFramedSink([&](Sink & sink) { copyNAR(source, sink); }); + } else if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 21) { + conn.processStderr(0, &source); + } else { + copyNAR(source, conn->to); + conn.processStderr(0, nullptr); } } @@ -618,15 +562,8 @@ void RemoteStore::buildPaths( auto conn(getConnection()); conn->to << WorkerProto::Op::BuildPaths; - assert(GET_PROTOCOL_MINOR(conn->protoVersion) >= 13); WorkerProto::write(*this, *conn, drvPaths); - if (GET_PROTOCOL_MINOR(conn->protoVersion) >= 15) - conn->to << buildMode; - else - /* Old daemons did not take a 'buildMode' parameter, so we - need to validate it here on the client side. */ - if (buildMode != bmNormal) - throw Error("repairing or checking is not supported when building through the Nix daemon"); + conn->to << buildMode; conn.processStderr(); readInt(conn->from); } From 4fb61bc5afabe671b6a7b5d615f2572390fa5bd0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 13:36:01 +0200 Subject: [PATCH 351/382] Remove WorkerProto::Op::ExportPath This was obsoleted in May 2016 (538a64e8c314f23ba0c5d76201f1c20e71884a21). --- src/libstore/daemon.cc | 11 ----------- src/libstore/include/nix/store/worker-protocol.hh | 1 - 2 files changed, 12 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 87bfe5187..ebe0c2ab4 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -546,17 +546,6 @@ static void performOp( break; } - case WorkerProto::Op::ExportPath: { - auto path = store->parseStorePath(readString(conn.from)); - readInt(conn.from); // obsolete - logger->startWork(); - TunnelSink sink(conn.to); - store->exportPath(path, sink); - logger->stopWork(); - conn.to << 1; - break; - } - case WorkerProto::Op::ImportPaths: { logger->startWork(); TunnelSource source(conn.from, conn.to); diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index c7f8d5891..3920089fa 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -152,7 +152,6 @@ enum struct WorkerProto::Op : uint64_t { AddIndirectRoot = 12, SyncWithGC = 13, FindRoots = 14, - ExportPath = 16, // obsolete QueryDeriver = 18, // obsolete SetOptions = 19, CollectGarbage = 20, From 86d19956f25db21919ac0afabca01cd7399ff238 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 13:44:07 +0200 Subject: [PATCH 352/382] Remove WorkerProto::Op::ImportPaths This was obsoleted in May 2016 (538a64e8c314f23ba0c5d76201f1c20e71884a21). --- src/libstore/daemon.cc | 12 ------------ .../include/nix/store/worker-protocol-connection.hh | 2 -- src/libstore/include/nix/store/worker-protocol.hh | 1 - src/libstore/worker-protocol-connection.cc | 8 -------- 4 files changed, 23 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index ebe0c2ab4..2bd0698a0 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -546,18 +546,6 @@ static void performOp( break; } - case WorkerProto::Op::ImportPaths: { - logger->startWork(); - TunnelSource source(conn.from, conn.to); - auto paths = store->importPaths(source, trusted ? NoCheckSigs : CheckSigs); - logger->stopWork(); - Strings paths2; - for (auto & i : paths) - paths2.push_back(store->printStorePath(i)); - conn.to << paths2; - break; - } - case WorkerProto::Op::BuildPaths: { auto drvs = WorkerProto::Serialise::read(*store, rconn); BuildMode mode = bmNormal; diff --git a/src/libstore/include/nix/store/worker-protocol-connection.hh b/src/libstore/include/nix/store/worker-protocol-connection.hh index 73dd50719..31436395f 100644 --- a/src/libstore/include/nix/store/worker-protocol-connection.hh +++ b/src/libstore/include/nix/store/worker-protocol-connection.hh @@ -130,8 +130,6 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection bool * daemonException, const StorePath & path, std::function fun); - - void importPaths(const StoreDirConfig & store, bool * daemonException, Source & source); }; struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index 3920089fa..29d4828c2 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -161,7 +161,6 @@ enum struct WorkerProto::Op : uint64_t { QueryFailedPaths = 24, ClearFailedPaths = 25, QueryPathInfo = 26, - ImportPaths = 27, // obsolete QueryDerivationOutputNames = 28, // obsolete QueryPathFromHashPart = 29, QuerySubstitutablePathInfos = 30, diff --git a/src/libstore/worker-protocol-connection.cc b/src/libstore/worker-protocol-connection.cc index 987d0c8dd..8a3766290 100644 --- a/src/libstore/worker-protocol-connection.cc +++ b/src/libstore/worker-protocol-connection.cc @@ -313,12 +313,4 @@ void WorkerProto::BasicClientConnection::narFromPath( fun(from); } -void WorkerProto::BasicClientConnection::importPaths( - const StoreDirConfig & store, bool * daemonException, Source & source) -{ - to << WorkerProto::Op::ImportPaths; - processStderr(daemonException, 0, &source); - auto importedPaths = WorkerProto::Serialise::read(store, *this); - assert(importedPaths.size() <= importedPaths.size()); -} } // namespace nix From 4524235af46b7c57008101afdbe00fd2d3cbfbbd Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 9 Sep 2025 22:18:52 +0300 Subject: [PATCH 353/382] libexpr: Overalign Value to 16 bytes This is necessary to make use of 128 bit atomics on x86_64 [1], since MOVAPD, MOVAPS, and MOVDQA need memory operands to be 16-byte aligned. We are not losing anything here, because Value is already 16-byte wide and Boehm allocates memory in granules that are 16 bytes by default on 64 bit systems [2]. [1]: https://patchwork.sourceware.org/project/gcc/patch/YhxkfzGEEQ9KHbBC@tucnak/ [2]: https://github.com/bdwgc/bdwgc/blob/54ac18ccbc5a833dd7edaff94a10ab9b65044d61/include/gc/gc_tiny_fl.h#L31-L33 --- src/libexpr/eval-gc.cc | 12 ++++++++++++ src/libexpr/include/nix/expr/value.hh | 5 +++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index b17336a90..28aed7c37 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -16,6 +16,7 @@ # endif # include +# include // For GC_GRANULE_BYTES # include # include @@ -23,6 +24,17 @@ #endif +/* + * Ensure that Boehm satisfies our alignment requirements. This is the default configuration [^] + * and this assertion should never break for any platform. Let's assert it just in case. + * + * This alignment is particularly useful to be able to use aligned + * load/store instructions for loading/writing Values. + * + * [^]: https://github.com/bdwgc/bdwgc/blob/54ac18ccbc5a833dd7edaff94a10ab9b65044d61/include/gc/gc_tiny_fl.h#L31-L33 + */ +static_assert(sizeof(void *) * 2 == GC_GRANULE_BYTES, "Boehm GC must use GC_GRANULE_WORDS = 2"); + namespace nix { #if NIX_USE_BOEHMGC diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 55ab797c7..228b23a7a 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -369,7 +369,7 @@ namespace detail { /* Whether to use a specialization of ValueStorage that does bitpacking into alignment niches. */ template -inline constexpr bool useBitPackedValueStorage = (ptrSize == 8) && (__STDCPP_DEFAULT_NEW_ALIGNMENT__ >= 8); +inline constexpr bool useBitPackedValueStorage = (ptrSize == 8) && (__STDCPP_DEFAULT_NEW_ALIGNMENT__ >= 16); } // namespace detail @@ -378,7 +378,8 @@ inline constexpr bool useBitPackedValueStorage = (ptrSize == 8) && (__STDCPP_DEF * Packs discriminator bits into the pointer alignment niches. */ template -class ValueStorage>> : public detail::ValueBase +class alignas(16) ValueStorage>> + : public detail::ValueBase { /* Needs a dependent type name in order for member functions (and * potentially ill-formed bit casts) to be SFINAE'd out. From 2ed2c79721177662a45b473b174d7dc35c867a66 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 10 Sep 2025 00:21:57 +0300 Subject: [PATCH 354/382] libexpr: Fix Value::mkList for empty lists This code used to save the pointer to a small list allocated on the stack to the Value, which is unintended. --- src/libexpr/include/nix/expr/value.hh | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 228b23a7a..82db1a775 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -993,12 +993,20 @@ public: void mkList(const ListBuilder & builder) noexcept { - if (builder.size == 1) + switch (builder.size) { + case 0: + setStorage(List{.size = 0, .elems = nullptr}); + break; + case 1: setStorage(std::array{builder.inlineElems[0], nullptr}); - else if (builder.size == 2) + break; + case 2: setStorage(std::array{builder.inlineElems[0], builder.inlineElems[1]}); - else + break; + default: setStorage(List{.size = builder.size, .elems = builder.elems}); + break; + } } inline void mkThunk(Env * e, Expr * ex) noexcept From d1d3ed62410d56f6a6b316dfe47147a4d3b13820 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 Sep 2025 10:20:37 +0200 Subject: [PATCH 355/382] Add release note --- doc/manual/rl-next/dropped-compat.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/manual/rl-next/dropped-compat.md diff --git a/doc/manual/rl-next/dropped-compat.md b/doc/manual/rl-next/dropped-compat.md new file mode 100644 index 000000000..d6cc7704a --- /dev/null +++ b/doc/manual/rl-next/dropped-compat.md @@ -0,0 +1,6 @@ +--- +synopsis: "Removed support for daemons and clients older than Nix 2.0" +prs: [13951] +--- + +We have dropped support in the daemon worker protocol for daemons and clients that don't speak at least version 18 of the protocol. This first Nix release that supports this version is Nix 2.0, released in February 2018. From 5013b38df42353707cbd2b08a3db3a1eb925ae9c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 14:16:56 +0200 Subject: [PATCH 356/382] Drop unused LegacySSHStore::addMultipleToStoreLegacy() --- .../include/nix/store/legacy-ssh-store.hh | 6 ------ src/libstore/legacy-ssh-store.cc | 16 ---------------- 2 files changed, 22 deletions(-) diff --git a/src/libstore/include/nix/store/legacy-ssh-store.hh b/src/libstore/include/nix/store/legacy-ssh-store.hh index 91e021433..ac31506d0 100644 --- a/src/libstore/include/nix/store/legacy-ssh-store.hh +++ b/src/libstore/include/nix/store/legacy-ssh-store.hh @@ -179,12 +179,6 @@ public: */ StorePathSet queryValidPaths(const StorePathSet & paths, bool lock, SubstituteFlag maybeSubstitute = NoSubstitute); - /** - * Just exists because this is exactly what Hydra was doing, and we - * don't yet want an algorithmic change. - */ - void addMultipleToStoreLegacy(Store & srcStore, const StorePathSet & paths); - void connect() override; unsigned int getProtocol() override; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 0e9ee35bf..d42dca74a 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -302,22 +302,6 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, bool lo return conn->queryValidPaths(*this, lock, paths, maybeSubstitute); } -void LegacySSHStore::addMultipleToStoreLegacy(Store & srcStore, const StorePathSet & paths) -{ - auto conn(connections->get()); - conn->to << ServeProto::Command::ImportPaths; - try { - srcStore.exportPaths(paths, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to.flush(); - - if (readInt(conn->from) != 1) - throw Error("remote machine failed to import closure"); -} - void LegacySSHStore::connect() { auto conn(connections->get()); From fa048e4383f5d0f5621007e6798172fcfa441e8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 14:56:06 +0200 Subject: [PATCH 357/382] Remove support for serve protocol < 5 This was introduced in August 2018 (2825e05d21ecabc8b8524836baf0b9b05da993c6). --- .../serve-protocol/handshake-to-client.bin | Bin 16 -> 16 bytes src/libstore-tests/serve-protocol.cc | 4 +- .../nix/store/serve-protocol-connection.hh | 2 - .../include/nix/store/serve-protocol.hh | 1 - src/libstore/legacy-ssh-store.cc | 50 +++++------------- src/libstore/serve-protocol-connection.cc | 12 +---- src/nix/nix-store/nix-store.cc | 8 --- 7 files changed, 17 insertions(+), 60 deletions(-) diff --git a/src/libstore-tests/data/serve-protocol/handshake-to-client.bin b/src/libstore-tests/data/serve-protocol/handshake-to-client.bin index 15ba4b5e3d96e388637107542f6eb9f7e94ac708..465daa532c4bf88e1811456aed254613b752446c 100644 GIT binary patch literal 16 VcmX^8E+~Y7fq{XQ2}Ck5001MX0%rgK literal 16 ScmX^8E+~Wn1eibs0|NjffC6Fw diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index 01d6058cb..4cd7f101b 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -20,9 +20,9 @@ struct ServeProtoTest : VersionedProtoTest { /** * For serializers that don't care about the minimum version, we - * used the oldest one: 1.0. + * used the oldest one: 2.5. */ - ServeProto::Version defaultVersion = 2 << 8 | 0; + ServeProto::Version defaultVersion = 2 << 8 | 5; }; VERSIONED_CHARACTERIZATION_TEST( diff --git a/src/libstore/include/nix/store/serve-protocol-connection.hh b/src/libstore/include/nix/store/serve-protocol-connection.hh index fa50132c8..873277db9 100644 --- a/src/libstore/include/nix/store/serve-protocol-connection.hh +++ b/src/libstore/include/nix/store/serve-protocol-connection.hh @@ -82,8 +82,6 @@ struct ServeProto::BasicClientConnection BuildResult getBuildDerivationResponse(const StoreDirConfig & store); void narFromPath(const StoreDirConfig & store, const StorePath & path, std::function fun); - - void importPaths(const StoreDirConfig & store, std::function fun); }; struct ServeProto::BasicServerConnection diff --git a/src/libstore/include/nix/store/serve-protocol.hh b/src/libstore/include/nix/store/serve-protocol.hh index c8f3560d1..92e0b9a25 100644 --- a/src/libstore/include/nix/store/serve-protocol.hh +++ b/src/libstore/include/nix/store/serve-protocol.hh @@ -108,7 +108,6 @@ enum struct ServeProto::Command : uint64_t { QueryValidPaths = 1, QueryPathInfos = 2, DumpStorePath = 3, - ImportPaths = 4, ExportPaths = 5, BuildPaths = 6, QueryClosure = 7, diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index d42dca74a..f935de206 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -105,9 +105,6 @@ std::map LegacySSHStore::queryPathInfosUncached { auto conn(connections->get()); - /* No longer support missing NAR hash */ - assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); - debug( "querying remote host '%s' for info on '%s'", config->authority.host, @@ -152,40 +149,21 @@ void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, Rep auto conn(connections->get()); - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { - - conn->to << ServeProto::Command::AddToStoreNar << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); - ServeProto::write(*this, *conn, info.references); - conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs - << renderContentAddress(info.ca); - try { - copyNAR(source, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to.flush(); - - if (readInt(conn->from) != 1) - throw Error( - "failed to add path '%s' to remote host '%s'", printStorePath(info.path), config->authority.host); - - } else { - - conn->importPaths(*this, [&](Sink & sink) { - try { - copyNAR(source, sink); - } catch (...) { - conn->good = false; - throw; - } - sink << exportMagic << printStorePath(info.path); - ServeProto::write(*this, *conn, info.references); - sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 << 0; - }); + conn->to << ServeProto::Command::AddToStoreNar << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(HashFormat::Base16, false); + ServeProto::write(*this, *conn, info.references); + conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca); + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; } + conn->to.flush(); + + if (readInt(conn->from) != 1) + throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), config->authority.host); } void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) diff --git a/src/libstore/serve-protocol-connection.cc b/src/libstore/serve-protocol-connection.cc index 908994f4e..a90b104a6 100644 --- a/src/libstore/serve-protocol-connection.cc +++ b/src/libstore/serve-protocol-connection.cc @@ -15,7 +15,7 @@ ServeProto::Version ServeProto::BasicClientConnection::handshake( if (magic != SERVE_MAGIC_2) throw Error("'nix-store --serve' protocol mismatch from '%s'", host); auto remoteVersion = readInt(from); - if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200) + if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200 || GET_PROTOCOL_MINOR(remoteVersion) < 5) throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); return std::min(remoteVersion, localVersion); } @@ -93,14 +93,4 @@ void ServeProto::BasicClientConnection::narFromPath( fun(from); } -void ServeProto::BasicClientConnection::importPaths(const StoreDirConfig & store, std::function fun) -{ - to << ServeProto::Command::ImportPaths; - fun(to); - to.flush(); - - if (readInt(from) != 1) - throw Error("remote machine failed to import closure"); -} - } // namespace nix diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 4191ea0d6..31b288817 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -985,14 +985,6 @@ static void opServe(Strings opFlags, Strings opArgs) store->narFromPath(store->parseStorePath(readString(in)), out); break; - case ServeProto::Command::ImportPaths: { - if (!writeAllowed) - throw Error("importing paths is not allowed"); - store->importPaths(in, NoCheckSigs); // FIXME: should we skip sig checking? - out << 1; // indicate success - break; - } - case ServeProto::Command::ExportPaths: { readInt(in); // obsolete store->exportPaths(ServeProto::Serialise::read(*store, rconn), out); From 9df99e0658bc4429abb463496db07d1adf7b6941 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 15:37:12 +0200 Subject: [PATCH 358/382] Remove ServeProto::Command::ExportPaths This seems to have been unused since the build-remote.pl removal in February 2017 (27dc76c1a5dbe654465245ff5f6bc22e2c8902da). --- src/libstore/include/nix/store/serve-protocol.hh | 1 - src/nix/nix-store/nix-store.cc | 6 ------ 2 files changed, 7 deletions(-) diff --git a/src/libstore/include/nix/store/serve-protocol.hh b/src/libstore/include/nix/store/serve-protocol.hh index 92e0b9a25..4c2043f17 100644 --- a/src/libstore/include/nix/store/serve-protocol.hh +++ b/src/libstore/include/nix/store/serve-protocol.hh @@ -108,7 +108,6 @@ enum struct ServeProto::Command : uint64_t { QueryValidPaths = 1, QueryPathInfos = 2, DumpStorePath = 3, - ExportPaths = 5, BuildPaths = 6, QueryClosure = 7, BuildDerivation = 8, diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 31b288817..3ab9b7583 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -985,12 +985,6 @@ static void opServe(Strings opFlags, Strings opArgs) store->narFromPath(store->parseStorePath(readString(in)), out); break; - case ServeProto::Command::ExportPaths: { - readInt(in); // obsolete - store->exportPaths(ServeProto::Serialise::read(*store, rconn), out); - break; - } - case ServeProto::Command::BuildPaths: { if (!writeAllowed) From fe5b6695345bd9bfab2b34af33f87f0259c9ae28 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 9 Sep 2025 16:06:47 +0200 Subject: [PATCH 359/382] Move exportPaths() / importPaths() out of the Store class --- src/libstore/export-import.cc | 51 ++++++++++--------- .../include/nix/store/export-import.hh | 24 +++++++++ src/libstore/include/nix/store/meson.build | 1 + src/libstore/include/nix/store/store-api.hh | 20 -------- src/nix/nix-store/nix-store.cc | 5 +- src/perl/lib/Nix/Store.xs | 5 +- 6 files changed, 57 insertions(+), 49 deletions(-) create mode 100644 src/libstore/include/nix/store/export-import.hh diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 13444deb2..a343b5837 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -1,3 +1,4 @@ +#include "nix/store/export-import.hh" #include "nix/util/serialise.hh" #include "nix/store/store-api.hh" #include "nix/util/archive.hh" @@ -8,27 +9,14 @@ namespace nix { -void Store::exportPaths(const StorePathSet & paths, Sink & sink) +static void exportPath(Store & store, const StorePath & path, Sink & sink) { - auto sorted = topoSortPaths(paths); - std::reverse(sorted.begin(), sorted.end()); - - for (auto & path : sorted) { - sink << 1; - exportPath(path, sink); - } - - sink << 0; -} - -void Store::exportPath(const StorePath & path, Sink & sink) -{ - auto info = queryPathInfo(path); + auto info = store.queryPathInfo(path); HashSink hashSink(HashAlgorithm::SHA256); TeeSink teeSink(sink, hashSink); - narFromPath(path, teeSink); + store.narFromPath(path, teeSink); /* Refuse to export paths that have changed. This prevents filesystem corruption from spreading to other machines. @@ -37,16 +25,29 @@ void Store::exportPath(const StorePath & path, Sink & sink) if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) throw Error( "hash of path '%s' has changed from '%s' to '%s'!", - printStorePath(path), + store.printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)); - teeSink << exportMagic << printStorePath(path); - CommonProto::write(*this, CommonProto::WriteConn{.to = teeSink}, info->references); - teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; + teeSink << exportMagic << store.printStorePath(path); + CommonProto::write(store, CommonProto::WriteConn{.to = teeSink}, info->references); + teeSink << (info->deriver ? store.printStorePath(*info->deriver) : "") << 0; } -StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) +void exportPaths(Store & store, const StorePathSet & paths, Sink & sink) +{ + auto sorted = store.topoSortPaths(paths); + std::reverse(sorted.begin(), sorted.end()); + + for (auto & path : sorted) { + sink << 1; + exportPath(store, path, sink); + } + + sink << 0; +} + +StorePaths importPaths(Store & store, Source & source, CheckSigsFlag checkSigs) { StorePaths res; while (true) { @@ -66,17 +67,17 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) if (magic != exportMagic) throw Error("Nix archive cannot be imported; wrong format"); - auto path = parseStorePath(readString(source)); + auto path = store.parseStorePath(readString(source)); // Activity act(*logger, lvlInfo, "importing path '%s'", info.path); - auto references = CommonProto::Serialise::read(*this, CommonProto::ReadConn{.from = source}); + auto references = CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = source}); auto deriver = readString(source); auto narHash = hashString(HashAlgorithm::SHA256, saved.s); ValidPathInfo info{path, narHash}; if (deriver != "") - info.deriver = parseStorePath(deriver); + info.deriver = store.parseStorePath(deriver); info.references = references; info.narSize = saved.s.size(); @@ -86,7 +87,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) // Can't use underlying source, which would have been exhausted auto source = StringSource(saved.s); - addToStore(info, source, NoRepair, checkSigs); + store.addToStore(info, source, NoRepair, checkSigs); res.push_back(info.path); } diff --git a/src/libstore/include/nix/store/export-import.hh b/src/libstore/include/nix/store/export-import.hh new file mode 100644 index 000000000..15092202f --- /dev/null +++ b/src/libstore/include/nix/store/export-import.hh @@ -0,0 +1,24 @@ +#pragma once + +#include "nix/store/store-api.hh" + +namespace nix { + +/** + * Magic header of exportPath() output (obsolete). + */ +const uint32_t exportMagic = 0x4558494e; + +/** + * Export multiple paths in the format expected by `nix-store + * --import`. The paths will be sorted topologically. + */ +void exportPaths(Store & store, const StorePathSet & paths, Sink & sink); + +/** + * Import a sequence of NAR dumps created by `exportPaths()` into the + * Nix store. + */ +StorePaths importPaths(Store & store, Source & source, CheckSigsFlag checkSigs = CheckSigs); + +} // namespace nix diff --git a/src/libstore/include/nix/store/meson.build b/src/libstore/include/nix/store/meson.build index 776c7521d..60af5ff53 100644 --- a/src/libstore/include/nix/store/meson.build +++ b/src/libstore/include/nix/store/meson.build @@ -34,6 +34,7 @@ headers = [ config_pub_h ] + files( 'derived-path-map.hh', 'derived-path.hh', 'downstream-placeholder.hh', + 'export-import.hh', 'filetransfer.hh', 'gc-store.hh', 'globals.hh', diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index dad5c9e8d..2519002b3 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -48,11 +48,6 @@ enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true }; enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; -/** - * Magic header of exportPath() output (obsolete). - */ -const uint32_t exportMagic = 0x4558494e; - enum BuildMode : uint8_t { bmNormal, bmRepair, bmCheck }; enum TrustedFlag : bool { NotTrusted = false, Trusted = true }; @@ -804,21 +799,6 @@ public: */ StorePaths topoSortPaths(const StorePathSet & paths); - /** - * Export multiple paths in the format expected by ‘nix-store - * --import’. - */ - void exportPaths(const StorePathSet & paths, Sink & sink); - - void exportPath(const StorePath & path, Sink & sink); - - /** - * Import a sequence of NAR dumps created by exportPaths() into the - * Nix store. Optionally, the contents of the NARs are preloaded - * into the specified FS accessor to speed up subsequent access. - */ - StorePaths importPaths(Source & source, CheckSigsFlag checkSigs = CheckSigs); - struct Stats { std::atomic narInfoRead{0}; diff --git a/src/nix/nix-store/nix-store.cc b/src/nix/nix-store/nix-store.cc index 3ab9b7583..5f85e06f0 100644 --- a/src/nix/nix-store/nix-store.cc +++ b/src/nix/nix-store/nix-store.cc @@ -14,6 +14,7 @@ #include "nix/util/posix-source-accessor.hh" #include "nix/store/globals.hh" #include "nix/store/path-with-outputs.hh" +#include "nix/store/export-import.hh" #include "man-pages.hh" @@ -774,7 +775,7 @@ static void opExport(Strings opFlags, Strings opArgs) paths.insert(store->followLinksToStorePath(i)); FdSink sink(getStandardOutput()); - store->exportPaths(paths, sink); + exportPaths(*store, paths, sink); sink.flush(); } @@ -787,7 +788,7 @@ static void opImport(Strings opFlags, Strings opArgs) throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); - auto paths = store->importPaths(source, NoCheckSigs); + auto paths = importPaths(*store, source, NoCheckSigs); for (auto & i : paths) cout << fmt("%s\n", store->printStorePath(i)) << std::flush; diff --git a/src/perl/lib/Nix/Store.xs b/src/perl/lib/Nix/Store.xs index edcb6d72a..7aa918ba0 100644 --- a/src/perl/lib/Nix/Store.xs +++ b/src/perl/lib/Nix/Store.xs @@ -11,6 +11,7 @@ #include "nix/store/globals.hh" #include "nix/store/store-open.hh" #include "nix/util/posix-source-accessor.hh" +#include "nix/store/export-import.hh" #include #include @@ -233,7 +234,7 @@ StoreWrapper::exportPaths(int fd, ...) StorePathSet paths; for (int n = 2; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); - THIS->store->exportPaths(paths, sink); + exportPaths(*THIS->store, paths, sink); } catch (Error & e) { croak("%s", e.what()); } @@ -244,7 +245,7 @@ StoreWrapper::importPaths(int fd, int dontCheckSigs) PPCODE: try { FdSource source(fd); - THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); + importPaths(*THIS->store, source, dontCheckSigs ? NoCheckSigs : CheckSigs); } catch (Error & e) { croak("%s", e.what()); } From 37eec84bc1fab6723024152b34ce0f5aa3b32f0f Mon Sep 17 00:00:00 2001 From: Marie Ramlow Date: Wed, 27 Aug 2025 21:28:42 +0200 Subject: [PATCH 360/382] meson: link to libatomic on powerpc-linux Like 32-bit Arm, 32-bit PowerPC also needs linking against libatomic because it doesn't support some atomic instructions in hardware. --- nix-meson-build-support/libatomic/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-meson-build-support/libatomic/meson.build b/nix-meson-build-support/libatomic/meson.build index d16d23817..1c014bee7 100644 --- a/nix-meson-build-support/libatomic/meson.build +++ b/nix-meson-build-support/libatomic/meson.build @@ -3,6 +3,6 @@ # This is needed for std::atomic on some platforms # We did not manage to test this reliably on all platforms, so we hardcode # it for now. -if host_machine.cpu_family() == 'arm' +if host_machine.cpu_family() in [ 'arm', 'ppc' ] deps_other += cxx.find_library('atomic') endif From 4f8c50fb77facc9cc1e574130fdca3ea502ab518 Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Tue, 5 Aug 2025 00:55:32 +0200 Subject: [PATCH 361/382] libexpr: replace std::unordered_* types by faster boost hash maps --- src/libexpr/eval.cc | 24 +++++++-------- src/libexpr/include/nix/expr/eval.hh | 24 ++++++++------- src/libexpr/include/nix/expr/parser-state.hh | 2 +- src/libexpr/include/nix/expr/value.hh | 3 +- src/libexpr/parser.y | 2 +- src/libexpr/primops.cc | 32 ++++++++------------ 6 files changed, 42 insertions(+), 45 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index df4e52e5d..69d7ba380 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1090,7 +1090,9 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) void EvalState::resetFileCache() { fileEvalCache.clear(); + fileEvalCache.rehash(0); fileParseCache.clear(); + fileParseCache.rehash(0); inputCache->clear(); } @@ -2375,10 +2377,9 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat if (nix::isDerivation(path.path.abs())) error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); - auto dstPathCached = get(*srcToStore.lock(), path); - - auto dstPath = dstPathCached ? *dstPathCached : [&]() { - auto dstPath = fetchToStore( + std::optional dstPath; + if (!srcToStore.cvisit(path, [&dstPath](const auto & kv) { dstPath.emplace(kv.second); })) { + dstPath.emplace(fetchToStore( fetchSettings, *store, path.resolveSymlinks(SymlinkResolution::Ancestors), @@ -2386,15 +2387,14 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat path.baseName(), ContentAddressMethod::Raw::NixArchive, nullptr, - repair); - allowPath(dstPath); - srcToStore.lock()->try_emplace(path, dstPath); - printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); - return dstPath; - }(); + repair)); + allowPath(*dstPath); + srcToStore.try_emplace(path, *dstPath); + printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(*dstPath)); + } - context.insert(NixStringContextElem::Opaque{.path = dstPath}); - return dstPath; + context.insert(NixStringContextElem::Opaque{.path = *dstPath}); + return *dstPath; } SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 5015a009b..75ed12664 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -20,6 +20,8 @@ // For `NIX_USE_BOEHMGC`, and if that's set, `GC_THREADS` #include "nix/expr/config.hh" +#include +#include #include #include #include @@ -162,7 +164,7 @@ typedef std:: map, traceable_allocator>> ValMap; -typedef std::unordered_map DocCommentMap; +typedef boost::unordered_flat_map> DocCommentMap; struct Env { @@ -395,7 +397,7 @@ public: bool inDebugger = false; int trylevel; std::list debugTraces; - std::map> exprEnvs; + boost::unordered_flat_map> exprEnvs; const std::shared_ptr getStaticEnv(const Expr & expr) const { @@ -438,12 +440,12 @@ private: /* Cache for calls to addToStore(); maps source paths to the store paths. */ - Sync> srcToStore; + boost::concurrent_flat_map> srcToStore; /** * A cache from path names to parse trees. */ - typedef std::unordered_map< + typedef boost::unordered_flat_map< SourcePath, Expr *, std::hash, @@ -455,7 +457,7 @@ private: /** * A cache from path names to values. */ - typedef std::unordered_map< + typedef boost::unordered_flat_map< SourcePath, Value, std::hash, @@ -468,11 +470,11 @@ private: * Associate source positions of certain AST nodes with their preceding doc comment, if they have one. * Grouped by file. */ - std::unordered_map positionToDocComment; + boost::unordered_flat_map> positionToDocComment; LookupPath lookupPath; - std::map> lookupPathResolved; + boost::unordered_flat_map> lookupPathResolved; /** * Cache used by prim_match(). @@ -746,7 +748,7 @@ public: /** * Internal primops not exposed to the user. */ - std::unordered_map< + boost::unordered_flat_map< std::string, Value *, std::hash, @@ -1017,10 +1019,10 @@ private: bool countCalls; - typedef std::map PrimOpCalls; + typedef boost::unordered_flat_map PrimOpCalls; PrimOpCalls primOpCalls; - typedef std::map FunctionCalls; + typedef boost::unordered_flat_map FunctionCalls; FunctionCalls functionCalls; /** Evaluation/call profiler. */ @@ -1028,7 +1030,7 @@ private: void incrFunctionCall(ExprLambda * fun); - typedef std::map AttrSelects; + typedef boost::unordered_flat_map> AttrSelects; AttrSelects attrSelects; friend struct ExprOpUpdate; diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 836cc9861..e689678de 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -71,7 +71,7 @@ struct LexerState /** * @brief Maps some positions to a DocComment, where the comment is relevant to the location. */ - std::unordered_map & positionToDocComment; + DocCommentMap & positionToDocComment; PosTable & positions; PosTable::Origin origin; diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 82db1a775..d3aeac157 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -12,6 +12,7 @@ #include "nix/expr/print-options.hh" #include "nix/util/checked-arithmetic.hh" +#include #include namespace nix { @@ -1166,7 +1167,7 @@ void Value::mkBlackhole() } typedef std::vector> ValueVector; -typedef std::unordered_map< +typedef boost::unordered_flat_map< Symbol, Value *, std::hash, diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 35fe929d9..89da001ef 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -57,7 +57,7 @@ namespace nix { -typedef std::unordered_map DocCommentMap; +typedef boost::unordered_flat_map> DocCommentMap; Expr * parseExprFromBuf( char * text, diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 9ba417c32..c107c6bc2 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -18,6 +18,8 @@ #include "nix/util/sort.hh" #include +#include +#include #include #include @@ -1750,7 +1752,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName read them later. */ { auto h = hashDerivationModulo(*state.store, drv, false); - drvHashes.lock()->insert_or_assign(drvPath, h); + drvHashes.insert_or_assign(drvPath, std::move(h)); } auto result = state.buildBindings(1 + drv.outputs.size()); @@ -4027,7 +4029,7 @@ static void prim_groupBy(EvalState & state, const PosIdx pos, Value ** args, Val auto name = state.forceStringNoCtx( res, pos, "while evaluating the return value of the grouping function passed to builtins.groupBy"); auto sym = state.symbols.create(name); - auto vector = attrs.try_emplace(sym, ValueVector()).first; + auto vector = attrs.try_emplace(sym, {}).first; vector->second.push_back(vElem); } @@ -4562,27 +4564,19 @@ static RegisterPrimOp primop_convertHash({ struct RegexCache { - struct State - { - std::unordered_map> cache; - }; - - Sync state_; + boost::concurrent_flat_map> cache; std::regex get(std::string_view re) { - auto state(state_.lock()); - auto it = state->cache.find(re); - if (it != state->cache.end()) - return it->second; + std::regex regex; /* No std::regex constructor overload from std::string_view, but can be constructed from a pointer + size or an iterator range. */ - return state->cache - .emplace( - std::piecewise_construct, - std::forward_as_tuple(re), - std::forward_as_tuple(/*s=*/re.data(), /*count=*/re.size(), std::regex::extended)) - .first->second; + cache.try_emplace_and_cvisit(re, + /*s=*/re.data(), /*count=*/re.size(), std::regex::extended, + [®ex](const auto & kv) { regex = kv.second; }, + [®ex](const auto & kv) { regex = kv.second; } + ); + return regex; } }; @@ -4826,7 +4820,7 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value ** ar from.emplace_back(state.forceString( *elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings")); - std::unordered_map cache; + boost::unordered_flat_map cache; auto to = args[1]->listView(); NixStringContext context; From 9f2b6a1b94141dc50deb6e1976beb6cf0872b33c Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Sat, 6 Sep 2025 14:21:48 +0200 Subject: [PATCH 362/382] replace more std::unordered_* types by faster boost hash maps --- src/libexpr-c/nix_api_expr.cc | 28 +++++++------------ src/libexpr/primops.cc | 10 ++++--- src/libexpr/print.cc | 5 ++-- src/libfetchers/filtering-source-accessor.cc | 8 ++++-- src/libfetchers/git-utils.cc | 11 ++++---- .../nix/fetchers/filtering-source-accessor.hh | 4 +-- src/libflake/lockfile.cc | 5 ++-- src/libstore/derivations.cc | 14 ++++------ src/libstore/gc.cc | 17 +++++------ src/libstore/include/nix/store/derivations.hh | 16 +++++++++-- src/libstore/include/nix/store/gc-store.hh | 6 ++-- src/libstore/include/nix/store/local-store.hh | 4 +-- src/libstore/misc.cc | 6 ++-- src/libutil/include/nix/util/canon-path.hh | 4 ++- src/libutil/linux/cgroup.cc | 4 +-- src/libutil/posix-source-accessor.cc | 2 +- src/nix/env.cc | 2 +- 17 files changed, 75 insertions(+), 71 deletions(-) diff --git a/src/libexpr-c/nix_api_expr.cc b/src/libexpr-c/nix_api_expr.cc index a028202ae..46e08b5f7 100644 --- a/src/libexpr-c/nix_api_expr.cc +++ b/src/libexpr-c/nix_api_expr.cc @@ -16,7 +16,7 @@ #include "nix_api_util_internal.h" #if NIX_USE_BOEHMGC -# include +# include #endif /** @@ -207,28 +207,20 @@ void nix_state_free(EvalState * state) } #if NIX_USE_BOEHMGC -std::unordered_map< +boost::concurrent_flat_map< const void *, unsigned int, std::hash, std::equal_to, traceable_allocator>> - nix_refcounts; - -std::mutex nix_refcount_lock; + nix_refcounts{}; nix_err nix_gc_incref(nix_c_context * context, const void * p) { if (context) context->last_err_code = NIX_OK; try { - std::scoped_lock lock(nix_refcount_lock); - auto f = nix_refcounts.find(p); - if (f != nix_refcounts.end()) { - f->second++; - } else { - nix_refcounts[p] = 1; - } + nix_refcounts.insert_or_visit({p, 1}, [](auto & kv) { kv.second++; }); } NIXC_CATCH_ERRS } @@ -239,12 +231,12 @@ nix_err nix_gc_decref(nix_c_context * context, const void * p) if (context) context->last_err_code = NIX_OK; try { - std::scoped_lock lock(nix_refcount_lock); - auto f = nix_refcounts.find(p); - if (f != nix_refcounts.end()) { - if (--f->second == 0) - nix_refcounts.erase(f); - } else + bool fail = true; + nix_refcounts.erase_if(p, [&](auto & kv) { + fail = false; + return !--kv.second; + }); + if (fail) throw std::runtime_error("nix_gc_decref: object was not referenced"); } NIXC_CATCH_ERRS diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index c107c6bc2..f2520bcda 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4571,11 +4571,13 @@ struct RegexCache std::regex regex; /* No std::regex constructor overload from std::string_view, but can be constructed from a pointer + size or an iterator range. */ - cache.try_emplace_and_cvisit(re, - /*s=*/re.data(), /*count=*/re.size(), std::regex::extended, + cache.try_emplace_and_cvisit( + re, + /*s=*/re.data(), + /*count=*/re.size(), + std::regex::extended, [®ex](const auto & kv) { regex = kv.second; }, - [®ex](const auto & kv) { regex = kv.second; } - ); + [®ex](const auto & kv) { regex = kv.second; }); return regex; } }; diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 5338e365e..071addc1a 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -1,5 +1,4 @@ #include -#include #include #include "nix/expr/print.hh" @@ -10,6 +9,8 @@ #include "nix/util/english.hh" #include "nix/expr/eval.hh" +#include + namespace nix { void printElided( @@ -81,7 +82,7 @@ std::ostream & printLiteralBool(std::ostream & str, bool boolean) // For example `or' doesn't need to be quoted. bool isReservedKeyword(const std::string_view str) { - static const std::unordered_set reservedKeywords = { + static const boost::unordered_flat_set reservedKeywords = { "if", "then", "else", "assert", "with", "let", "in", "rec", "inherit"}; return reservedKeywords.contains(str); } diff --git a/src/libfetchers/filtering-source-accessor.cc b/src/libfetchers/filtering-source-accessor.cc index 17f224ad2..d0991ae23 100644 --- a/src/libfetchers/filtering-source-accessor.cc +++ b/src/libfetchers/filtering-source-accessor.cc @@ -1,5 +1,7 @@ #include "nix/fetchers/filtering-source-accessor.hh" +#include + namespace nix { std::optional FilteringSourceAccessor::getPhysicalPath(const CanonPath & path) @@ -57,12 +59,12 @@ void FilteringSourceAccessor::checkAccess(const CanonPath & path) struct AllowListSourceAccessorImpl : AllowListSourceAccessor { std::set allowedPrefixes; - std::unordered_set allowedPaths; + boost::unordered_flat_set> allowedPaths; AllowListSourceAccessorImpl( ref next, std::set && allowedPrefixes, - std::unordered_set && allowedPaths, + boost::unordered_flat_set> && allowedPaths, MakeNotAllowedError && makeNotAllowedError) : AllowListSourceAccessor(SourcePath(next), std::move(makeNotAllowedError)) , allowedPrefixes(std::move(allowedPrefixes)) @@ -84,7 +86,7 @@ struct AllowListSourceAccessorImpl : AllowListSourceAccessor ref AllowListSourceAccessor::create( ref next, std::set && allowedPrefixes, - std::unordered_set && allowedPaths, + boost::unordered_flat_set> && allowedPaths, MakeNotAllowedError && makeNotAllowedError) { return make_ref( diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 1861838ed..4ed94a4ed 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -30,8 +30,9 @@ #include #include +#include +#include #include -#include #include #include #include @@ -315,7 +316,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this uint64_t getRevCount(const Hash & rev) override { - std::unordered_set done; + boost::unordered_flat_set> done; std::queue todo; todo.push(peelObject(lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT)); @@ -569,7 +570,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this void verifyCommit(const Hash & rev, const std::vector & publicKeys) override { // Map of SSH key types to their internal OpenSSH representations - static const std::unordered_map keyTypeMap = { + static const boost::unordered_flat_map keyTypeMap = { {"ssh-dsa", "ssh-dsa"}, {"ssh-ecdsa", "ssh-ecdsa"}, {"ssh-ecdsa-sk", "sk-ecdsa-sha2-nistp256@openssh.com"}, @@ -816,7 +817,7 @@ struct GitSourceAccessor : SourceAccessor return toHash(*git_tree_entry_id(entry)); } - std::unordered_map lookupCache; + boost::unordered_flat_map> lookupCache; /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(State & state, const CanonPath & path) @@ -1253,7 +1254,7 @@ GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllow makeFSSourceAccessor(path), std::set{wd.files}, // Always allow access to the root, but not its children. - std::unordered_set{CanonPath::root}, + boost::unordered_flat_set>{CanonPath::root}, std::move(makeNotAllowedError)) .cast(); if (exportIgnore) diff --git a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh index 70e837ff4..1d4028be5 100644 --- a/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh +++ b/src/libfetchers/include/nix/fetchers/filtering-source-accessor.hh @@ -2,7 +2,7 @@ #include "nix/util/source-path.hh" -#include +#include namespace nix { @@ -72,7 +72,7 @@ struct AllowListSourceAccessor : public FilteringSourceAccessor static ref create( ref next, std::set && allowedPrefixes, - std::unordered_set && allowedPaths, + boost::unordered_flat_set> && allowedPaths, MakeNotAllowedError && makeNotAllowedError); using FilteringSourceAccessor::FilteringSourceAccessor; diff --git a/src/libflake/lockfile.cc b/src/libflake/lockfile.cc index 94e7f11f1..f381a57e6 100644 --- a/src/libflake/lockfile.cc +++ b/src/libflake/lockfile.cc @@ -1,5 +1,3 @@ -#include - #include "nix/fetchers/fetch-settings.hh" #include "nix/flake/settings.hh" #include "nix/flake/lockfile.hh" @@ -9,6 +7,7 @@ #include #include +#include #include #include @@ -162,7 +161,7 @@ std::pair LockFile::toJSON() const { nlohmann::json nodes; KeyMap nodeKeys; - std::unordered_set keys; + boost::unordered_flat_set keys; std::function node)> dumpNode; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index a1831efc6..84889ceac 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -11,6 +11,7 @@ #include "nix/util/json-utils.hh" #include +#include #include namespace nix { @@ -834,7 +835,7 @@ DerivationType BasicDerivation::type() const throw Error("can't mix derivation output types"); } -Sync drvHashes; +DrvHashes drvHashes; /* pathDerivationModulo and hashDerivationModulo are mutually recursive */ @@ -844,16 +845,13 @@ Sync drvHashes; */ static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPath) { - { - auto hashes = drvHashes.lock(); - auto h = hashes->find(drvPath); - if (h != hashes->end()) { - return h->second; - } + std::optional hash; + if (drvHashes.cvisit(drvPath, [&hash](const auto & kv) { hash.emplace(kv.second); })) { + return *hash; } auto h = hashDerivationModulo(store, store.readInvalidDerivation(drvPath), false); // Cache it - drvHashes.lock()->insert_or_assign(drvPath, h); + drvHashes.insert_or_assign(drvPath, h); return h; } diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 385215fe0..dd33f5f84 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -1,6 +1,7 @@ #include "nix/store/derivations.hh" #include "nix/store/globals.hh" #include "nix/store/local-store.hh" +#include "nix/store/path.hh" #include "nix/util/finally.hh" #include "nix/util/unix-domain-socket.hh" #include "nix/util/signals.hh" @@ -13,14 +14,10 @@ # include "nix/util/processes.hh" #endif +#include +#include #include - -#include #include -#include -#include - -#include #include #include #include @@ -314,7 +311,7 @@ Roots LocalStore::findRoots(bool censor) /** * Key is a mere string because cannot has path with macOS's libc++ */ -typedef std::unordered_map> UncheckedRoots; +typedef boost::unordered_flat_map> UncheckedRoots; static void readProcLink(const std::filesystem::path & file, UncheckedRoots & roots) { @@ -463,13 +460,13 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) bool gcKeepOutputs = settings.gcKeepOutputs; bool gcKeepDerivations = settings.gcKeepDerivations; - std::unordered_set roots, dead, alive; + boost::unordered_flat_set> roots, dead, alive; struct Shared { // The temp roots only store the hash part to make it easier to // ignore suffixes like '.lock', '.chroot' and '.check'. - std::unordered_set tempRoots; + boost::unordered_flat_set tempRoots; // Hash part of the store path currently being deleted, if // any. @@ -672,7 +669,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - std::unordered_map referrersCache; + boost::unordered_flat_map> referrersCache; /* Helper function that visits all paths reachable from `start` via the referrers edges and optionally derivers and derivation diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index 18479b425..08bb7183f 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -11,7 +11,7 @@ #include "nix/util/sync.hh" #include "nix/util/variant-wrapper.hh" -#include +#include #include namespace nix { @@ -507,13 +507,23 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut */ std::map staticOutputHashes(Store & store, const Derivation & drv); +struct DrvHashFct +{ + using is_avalanching = std::true_type; + + std::size_t operator()(const StorePath & path) const noexcept + { + return std::hash{}(path.to_string()); + } +}; + /** * Memoisation of hashDerivationModulo(). */ -typedef std::map DrvHashes; +typedef boost::concurrent_flat_map DrvHashes; // FIXME: global, though at least thread-safe. -extern Sync drvHashes; +extern DrvHashes drvHashes; struct Source; struct Sink; diff --git a/src/libstore/include/nix/store/gc-store.hh b/src/libstore/include/nix/store/gc-store.hh index 9f2255025..fba9d6079 100644 --- a/src/libstore/include/nix/store/gc-store.hh +++ b/src/libstore/include/nix/store/gc-store.hh @@ -1,13 +1,13 @@ #pragma once ///@file -#include - #include "nix/store/store-api.hh" +#include +#include namespace nix { -typedef std::unordered_map> Roots; +typedef boost::unordered_flat_map, std::hash> Roots; struct GCOptions { diff --git a/src/libstore/include/nix/store/local-store.hh b/src/libstore/include/nix/store/local-store.hh index 1184be8ed..b871aaee2 100644 --- a/src/libstore/include/nix/store/local-store.hh +++ b/src/libstore/include/nix/store/local-store.hh @@ -11,7 +11,7 @@ #include #include #include -#include +#include namespace nix { @@ -442,7 +442,7 @@ private: std::pair createTempDirInStore(); - typedef std::unordered_set InodeHash; + typedef boost::unordered_flat_set InodeHash; InodeHash loadInodeHash(); Strings readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 8de41fe19..c5e1747c1 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -1,5 +1,3 @@ -#include - #include "nix/store/derivations.hh" #include "nix/store/parsed-derivations.hh" #include "nix/store/derivation-options.hh" @@ -13,6 +11,8 @@ #include "nix/store/filetransfer.hh" #include "nix/util/strings.hh" +#include + namespace nix { void Store::computeFSClosure( @@ -106,7 +106,7 @@ MissingPaths Store::queryMissing(const std::vector & targets) struct State { - std::unordered_set done; + boost::unordered_flat_set done; MissingPaths res; }; diff --git a/src/libutil/include/nix/util/canon-path.hh b/src/libutil/include/nix/util/canon-path.hh index cb8b4325d..334c9e332 100644 --- a/src/libutil/include/nix/util/canon-path.hh +++ b/src/libutil/include/nix/util/canon-path.hh @@ -258,7 +258,7 @@ public: */ std::string makeRelative(const CanonPath & path) const; - friend class std::hash; + friend struct std::hash; }; std::ostream & operator<<(std::ostream & stream, const CanonPath & path); @@ -268,6 +268,8 @@ std::ostream & operator<<(std::ostream & stream, const CanonPath & path); template<> struct std::hash { + using is_avalanching = std::true_type; + std::size_t operator()(const nix::CanonPath & s) const noexcept { return std::hash{}(s.path); diff --git a/src/libutil/linux/cgroup.cc b/src/libutil/linux/cgroup.cc index 20d19ae7d..9e78ac6d2 100644 --- a/src/libutil/linux/cgroup.cc +++ b/src/libutil/linux/cgroup.cc @@ -4,10 +4,10 @@ #include "nix/util/file-system.hh" #include "nix/util/finally.hh" +#include #include #include #include -#include #include #include @@ -76,7 +76,7 @@ static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool retu int round = 1; - std::unordered_set pidsShown; + boost::unordered_flat_set pidsShown; while (true) { auto pids = tokenizeString>(readFile(procsFile)); diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index b932f6ab5..877c63331 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -104,7 +104,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa if (cache.size() >= 16384) cache.clear(); - cache.emplace(absPath, st); + cache.emplace(std::move(absPath), st); return st; } diff --git a/src/nix/env.cc b/src/nix/env.cc index d91ee72d7..c8fb5bee0 100644 --- a/src/nix/env.cc +++ b/src/nix/env.cc @@ -71,7 +71,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment auto outPaths = Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, installables); - std::unordered_set done; + boost::unordered_flat_set> done; std::queue todo; for (auto & path : outPaths) todo.push(path); From 9dbc2cae4f70f2243fedc618fe90b132a67d6441 Mon Sep 17 00:00:00 2001 From: Philipp Otterbein Date: Sun, 7 Sep 2025 14:16:32 +0200 Subject: [PATCH 363/382] hashmaps with string keys: add transparent lookups --- src/libexpr/include/nix/expr/eval.hh | 9 +++++---- src/libstore/gc.cc | 19 ++++++++++++------- src/libstore/include/nix/store/gc-store.hh | 6 +++++- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 75ed12664..f1cead47b 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -474,7 +474,8 @@ private: LookupPath lookupPath; - boost::unordered_flat_map> lookupPathResolved; + boost::unordered_flat_map, StringViewHash, std::equal_to<>> + lookupPathResolved; /** * Cache used by prim_match(). @@ -751,8 +752,8 @@ public: boost::unordered_flat_map< std::string, Value *, - std::hash, - std::equal_to, + StringViewHash, + std::equal_to<>, traceable_allocator>> internalPrimOps; @@ -1019,7 +1020,7 @@ private: bool countCalls; - typedef boost::unordered_flat_map PrimOpCalls; + typedef boost::unordered_flat_map> PrimOpCalls; PrimOpCalls primOpCalls; typedef boost::unordered_flat_map FunctionCalls; diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index dd33f5f84..fdbc670df 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -311,7 +311,12 @@ Roots LocalStore::findRoots(bool censor) /** * Key is a mere string because cannot has path with macOS's libc++ */ -typedef boost::unordered_flat_map> UncheckedRoots; +typedef boost::unordered_flat_map< + std::string, + boost::unordered_flat_set>, + StringViewHash, + std::equal_to<>> + UncheckedRoots; static void readProcLink(const std::filesystem::path & file, UncheckedRoots & roots) { @@ -325,7 +330,7 @@ static void readProcLink(const std::filesystem::path & file, UncheckedRoots & ro throw; } if (buf.is_absolute()) - roots[buf.string()].emplace(file.string()); + roots[buf].emplace(file.string()); } static std::string quoteRegexChars(const std::string & raw) @@ -466,7 +471,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) { // The temp roots only store the hash part to make it easier to // ignore suffixes like '.lock', '.chroot' and '.check'. - boost::unordered_flat_set tempRoots; + boost::unordered_flat_set> tempRoots; // Hash part of the store path currently being deleted, if // any. @@ -575,9 +580,9 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) auto storePath = maybeParseStorePath(path); if (storePath) { debug("got new GC root '%s'", path); - auto hashPart = std::string(storePath->hashPart()); + auto hashPart = storePath->hashPart(); auto shared(_shared.lock()); - shared->tempRoots.insert(hashPart); + shared->tempRoots.emplace(hashPart); /* If this path is currently being deleted, then we have to wait until deletion is finished to ensure that @@ -629,7 +634,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) Roots tempRoots; findTempRoots(tempRoots, true); for (auto & root : tempRoots) { - _shared.lock()->tempRoots.insert(std::string(root.first.hashPart())); + _shared.lock()->tempRoots.emplace(root.first.hashPart()); roots.insert(root.first); } @@ -736,7 +741,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) return; { - auto hashPart = std::string(path->hashPart()); + auto hashPart = path->hashPart(); auto shared(_shared.lock()); if (shared->tempRoots.count(hashPart)) { debug("cannot delete '%s' because it's a temporary root", printStorePath(*path)); diff --git a/src/libstore/include/nix/store/gc-store.hh b/src/libstore/include/nix/store/gc-store.hh index fba9d6079..5a4a6db14 100644 --- a/src/libstore/include/nix/store/gc-store.hh +++ b/src/libstore/include/nix/store/gc-store.hh @@ -7,7 +7,11 @@ namespace nix { -typedef boost::unordered_flat_map, std::hash> Roots; +typedef boost::unordered_flat_map< + StorePath, + boost::unordered_flat_set>, + std::hash> + Roots; struct GCOptions { From 4df1a3ca7661ee4aa1f0c626c577f60a487d30f3 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 11 Sep 2025 01:51:48 +0300 Subject: [PATCH 364/382] libexpr: Make emptyBindings a global constant This object is always constant and will never get modified. Having it as a global (constant) static is much easier and unclutters the EvalState. Same idea as in https://git.lix.systems/lix-project/lix/commit/f017f9ddd336e32a5ed1ee835f1c6c7e73a052ae. Co-authored-by: eldritch horrors --- src/libexpr/attr-path.cc | 4 ++-- src/libexpr/attr-set.cc | 4 +++- src/libexpr/eval.cc | 1 - src/libexpr/include/nix/expr/attr-set.hh | 6 ++++++ src/libexpr/include/nix/expr/eval.hh | 2 -- src/libexpr/primops.cc | 4 ++-- src/nix/flake.cc | 2 +- src/nix/nix-env/user-env.cc | 2 +- src/nix/upgrade-nix.cc | 2 +- 9 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index b02b08db4..58705bfa1 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -110,8 +110,8 @@ std::pair findPackageFilename(EvalState & state, Value & v { Value * v2; try { - auto dummyArgs = state.allocBindings(0); - v2 = findAlongAttrPath(state, "meta.position", *dummyArgs, v).first; + auto & dummyArgs = Bindings::emptyBindings; + v2 = findAlongAttrPath(state, "meta.position", dummyArgs, v).first; } catch (Error &) { throw NoPositionInfo("package '%s' has no source location information", what); } diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index eb44b0dd9..48d4c4d4a 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -5,13 +5,15 @@ namespace nix { +Bindings Bindings::emptyBindings; + /* Allocate a new array of attributes for an attribute set with a specific capacity. The space is implicitly reserved after the Bindings structure. */ Bindings * EvalState::allocBindings(size_t capacity) { if (capacity == 0) - return &emptyBindings; + return &Bindings::emptyBindings; if (capacity > std::numeric_limits::max()) throw Error("attribute set of size %d is too big", capacity); nrAttrsets++; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index df4e52e5d..b586c3409 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -202,7 +202,6 @@ EvalState::EvalState( , settings{settings} , symbols(StaticEvalSymbols::staticSymbolTable()) , repair(NoRepair) - , emptyBindings(Bindings()) , storeFS(makeMountedSourceAccessor({ {CanonPath::root, makeEmptySourceAccessor()}, /* In the pure eval case, we can simply require diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index b5e927a7e..4ab54c8eb 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -54,6 +54,12 @@ public: typedef uint32_t size_t; PosIdx pos; + /** + * An instance of bindings objects with 0 attributes. + * This object must never be modified. + */ + static Bindings emptyBindings; + private: size_t size_ = 0; Attr attrs[0]; diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 5015a009b..0b91645ea 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -313,8 +313,6 @@ public: */ RepairFlag repair; - Bindings emptyBindings; - /** * Empty list constant. */ diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 9ba417c32..2a3eec672 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3326,14 +3326,14 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value ** args { state.forceValue(*args[0], pos); if (args[0]->isPrimOpApp() || args[0]->isPrimOp()) { - v.mkAttrs(&state.emptyBindings); + v.mkAttrs(&Bindings::emptyBindings); return; } if (!args[0]->isLambda()) state.error("'functionArgs' requires a function").atPos(pos).debugThrow(); if (!args[0]->lambda().fun->hasFormals()) { - v.mkAttrs(&state.emptyBindings); + v.mkAttrs(&Bindings::emptyBindings); return; } diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 8d6387c9d..3b1e2f5e4 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -522,7 +522,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, fmt("checking NixOS configuration '%s'", attrPath)); - Bindings & bindings(*state->allocBindings(0)); + Bindings & bindings = Bindings::emptyBindings; auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; state->forceValue(*vToplevel, pos); if (!state->isDerivation(*vToplevel)) diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 4ed93135d..552172825 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -24,7 +24,7 @@ PackageInfos queryInstalled(EvalState & state, const Path & userEnv) if (pathExists(manifestFile)) { Value v; state.evalFile(state.rootPath(CanonPath(manifestFile)).resolveSymlinks(), v); - Bindings & bindings(*state.allocBindings(0)); + Bindings & bindings = Bindings::emptyBindings; getDerivations(state, v, "", bindings, elems, false); } return elems; diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 48235a27f..f26613bf8 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -162,7 +162,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand auto state = std::make_unique(LookupPath{}, store, fetchSettings, evalSettings); auto v = state->allocValue(); state->eval(state->parseExprFromString(res.data, state->rootPath(CanonPath("/no-such-path"))), *v); - Bindings & bindings(*state->allocBindings(0)); + Bindings & bindings = Bindings::emptyBindings; auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first; return store->parseStorePath( From 462b9ac49c14c4751c2f56b79297124427fb71f8 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 8 Sep 2025 01:20:31 +0300 Subject: [PATCH 365/382] libexpr: Make Value::isa and Value::getStorage private methods This was always intended to be the case, but accidentally left in the public interface. --- src/libexpr/include/nix/expr/value.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 82db1a775..0b10b78b5 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -833,6 +833,7 @@ struct Value : public ValueStorage { friend std::string showType(const Value & v); +private: template bool isa() const noexcept { From 5db4b0699ce880e8a4a2e836dd536834718da7a3 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 11 Sep 2025 01:53:41 +0300 Subject: [PATCH 366/382] libexpr: Make constant Values global constants, move out of EvalState These constant Values have no business being in the EvalState in the first place. The ultimate goal is to get rid of the ugly `getBuiltins` and its relience (in `createBaseEnv`) on these global constants is getting in the way. Same idea as in https://git.lix.systems/lix-project/lix/commit/f017f9ddd336e32a5ed1ee835f1c6c7e73a052ae. Co-authored-by: eldritch horrors --- src/libexpr/eval.cc | 8 ++------ src/libexpr/include/nix/expr/eval.hh | 26 ------------------------ src/libexpr/include/nix/expr/value.hh | 28 ++++++++++++++++++++++++++ src/libexpr/meson.build | 1 + src/libexpr/primops.cc | 12 +++++------ src/libexpr/value.cc | 29 +++++++++++++++++++++++++++ src/nix/nix-env/nix-env.cc | 2 +- 7 files changed, 67 insertions(+), 39 deletions(-) create mode 100644 src/libexpr/value.cc diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b586c3409..3a53ecf79 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -284,10 +284,6 @@ EvalState::EvalState( static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); - vEmptyList.mkList(buildList(0)); - vNull.mkNull(); - vTrue.mkBool(true); - vFalse.mkBool(false); vStringRegular.mkStringNoCopy("regular"); vStringDirectory.mkStringNoCopy("directory"); vStringSymlink.mkStringNoCopy("symlink"); @@ -894,7 +890,7 @@ ListBuilder::ListBuilder(EvalState & state, size_t size) Value * EvalState::getBool(bool b) { - return b ? &vTrue : &vFalse; + return b ? &Value::vTrue : &Value::vFalse; } unsigned long nrThunks = 0; @@ -1300,7 +1296,7 @@ void ExprList::eval(EvalState & state, Env & env, Value & v) Value * ExprList::maybeThunk(EvalState & state, Env & env) { if (elems.empty()) { - return &state.vEmptyList; + return &Value::vEmptyList; } return Expr::maybeThunk(state, env); } diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 0b91645ea..430e334b8 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -313,32 +313,6 @@ public: */ RepairFlag repair; - /** - * Empty list constant. - */ - Value vEmptyList; - - /** - * `null` constant. - * - * This is _not_ a singleton. Pointer equality is _not_ sufficient. - */ - Value vNull; - - /** - * `true` constant. - * - * This is _not_ a singleton. Pointer equality is _not_ sufficient. - */ - Value vTrue; - - /** - * `true` constant. - * - * This is _not_ a singleton. Pointer equality is _not_ sufficient. - */ - Value vFalse; - /** `"regular"` */ Value vStringRegular; /** `"directory"` */ diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 0b10b78b5..c74588a31 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -833,6 +833,34 @@ struct Value : public ValueStorage { friend std::string showType(const Value & v); + /** + * Empty list constant. + * + * This is _not_ a singleton. Pointer equality is _not_ sufficient. + */ + static Value vEmptyList; + + /** + * `null` constant. + * + * This is _not_ a singleton. Pointer equality is _not_ sufficient. + */ + static Value vNull; + + /** + * `true` constant. + * + * This is _not_ a singleton. Pointer equality is _not_ sufficient. + */ + static Value vTrue; + + /** + * `true` constant. + * + * This is _not_ a singleton. Pointer equality is _not_ sufficient. + */ + static Value vFalse; + private: template bool isa() const noexcept diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 00fb82e3c..40d3f390b 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -163,6 +163,7 @@ sources = files( 'search-path.cc', 'value-to-json.cc', 'value-to-xml.cc', + 'value.cc', 'value/context.cc', ) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 2a3eec672..f099e060e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1075,11 +1075,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value ** args, Val try { state.forceValue(*args[0], pos); attrs.insert(state.s.value, args[0]); - attrs.insert(state.symbols.create("success"), &state.vTrue); + attrs.insert(state.symbols.create("success"), &Value::vTrue); } catch (AssertionError & e) { // `value = false;` is unfortunate but removing it is a breaking change. - attrs.insert(state.s.value, &state.vFalse); - attrs.insert(state.symbols.create("success"), &state.vFalse); + attrs.insert(state.s.value, &Value::vFalse); + attrs.insert(state.symbols.create("success"), &Value::vFalse); } // restore the debugRepl pointer if we saved it earlier. @@ -4613,7 +4613,7 @@ void prim_match(EvalState & state, const PosIdx pos, Value ** args, Value & v) auto list = state.buildList(match.size() - 1); for (const auto & [i, v2] : enumerate(list)) if (!match[i + 1].matched) - v2 = &state.vNull; + v2 = &Value::vNull; else v2 = mkString(state, match[i + 1]); v.mkList(list); @@ -4705,7 +4705,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value ** args, Value & v) auto list2 = state.buildList(slen); for (const auto & [si, v2] : enumerate(list2)) { if (!match[si + 1].matched) - v2 = &state.vNull; + v2 = &Value::vNull; else v2 = mkString(state, match[si + 1]); } @@ -5059,7 +5059,7 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) addConstant( "null", - &vNull, + &Value::vNull, { .type = nNull, .doc = R"( diff --git a/src/libexpr/value.cc b/src/libexpr/value.cc new file mode 100644 index 000000000..07d036b0d --- /dev/null +++ b/src/libexpr/value.cc @@ -0,0 +1,29 @@ +#include "nix/expr/value.hh" + +namespace nix { + +Value Value::vEmptyList = []() { + Value res; + res.setStorage(List{.size = 0, .elems = nullptr}); + return res; +}(); + +Value Value::vNull = []() { + Value res; + res.mkNull(); + return res; +}(); + +Value Value::vTrue = []() { + Value res; + res.mkBool(true); + return res; +}(); + +Value Value::vFalse = []() { + Value res; + res.mkBool(false); + return res; +}(); + +} // namespace nix diff --git a/src/nix/nix-env/nix-env.cc b/src/nix/nix-env/nix-env.cc index f165c069c..01c8ccf4b 100644 --- a/src/nix/nix-env/nix-env.cc +++ b/src/nix/nix-env/nix-env.cc @@ -158,7 +158,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v directory). */ else if (st.type == SourceAccessor::tDirectory) { auto attrs = state.buildBindings(maxAttrs); - attrs.insert(state.symbols.create("_combineChannels"), &state.vEmptyList); + attrs.insert(state.symbols.create("_combineChannels"), &Value::vEmptyList); StringSet seen; getAllExprs(state, path, seen, attrs); v.mkAttrs(attrs); From c0b35c71cdb0470596f9e88d05063aa8faed6e10 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 12 Sep 2025 04:00:51 +0300 Subject: [PATCH 367/382] libexpr: Fix build without Boehm This should have been placed under the ifdef. --- src/libexpr/eval-gc.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 28aed7c37..0d25f38f6 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -24,6 +24,10 @@ #endif +namespace nix { + +#if NIX_USE_BOEHMGC + /* * Ensure that Boehm satisfies our alignment requirements. This is the default configuration [^] * and this assertion should never break for any platform. Let's assert it just in case. @@ -35,9 +39,6 @@ */ static_assert(sizeof(void *) * 2 == GC_GRANULE_BYTES, "Boehm GC must use GC_GRANULE_WORDS = 2"); -namespace nix { - -#if NIX_USE_BOEHMGC /* Called when the Boehm GC runs out of memory. */ static void * oomHandler(size_t requested) { From bdbc739d6e87f2abf2ded4d38bb0e161f457eb68 Mon Sep 17 00:00:00 2001 From: Jens Petersen Date: Fri, 12 Sep 2025 11:15:29 +0800 Subject: [PATCH 368/382] meson: add soversion to libraries (#13960) --- src/libcmd/meson.build | 1 + src/libexpr-c/meson.build | 1 + src/libexpr-test-support/meson.build | 1 + src/libexpr/meson.build | 1 + src/libfetchers-c/meson.build | 1 + src/libfetchers/meson.build | 1 + src/libflake-c/meson.build | 1 + src/libflake/meson.build | 1 + src/libmain-c/meson.build | 1 + src/libmain/meson.build | 1 + src/libstore-c/meson.build | 1 + src/libstore-test-support/meson.build | 1 + src/libstore/meson.build | 1 + src/libutil-c/meson.build | 1 + src/libutil-test-support/meson.build | 1 + src/libutil/meson.build | 1 + 16 files changed, 16 insertions(+) diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 24e075246..6478fb226 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -95,6 +95,7 @@ this_library = library( 'nixcmd', sources, config_priv_h, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index 7c014d61d..01e60680b 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -50,6 +50,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixexprc', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index d762eb85e..1bc173ee4 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -44,6 +44,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-expr-test-support', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 40d3f390b..409f4fac8 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -181,6 +181,7 @@ this_library = library( parser_tab, lexer_tab, generated_headers, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index 8542744b4..81b63780b 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -53,6 +53,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixfetchersc', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 922a2c491..7c5ce1bc9 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -61,6 +61,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixfetchers', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index 933e06d90..e72694c2e 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -53,6 +53,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixflakec', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libflake/meson.build b/src/libflake/meson.build index 191d8f068..cb5f128a4 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -58,6 +58,7 @@ this_library = library( 'nixflake', sources, generated_headers, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 9e26ad8ad..20b77aef2 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -45,6 +45,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixmainc', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libmain/meson.build b/src/libmain/meson.build index 4a90d2d83..e70967462 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -77,6 +77,7 @@ this_library = library( 'nixmain', sources, config_priv_h, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index f8eaef803..a48885780 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -46,6 +46,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixstorec', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index b2977941f..3a3ffe36e 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -44,6 +44,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-store-test-support', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 7aeacbab7..77517bdfe 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -363,6 +363,7 @@ this_library = library( generated_headers, sources, config_priv_h, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 8131c517c..9e1a43e80 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -53,6 +53,7 @@ this_library = library( 'nixutilc', sources, config_priv_h, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index 910f1d881..9ad139edb 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -41,6 +41,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-util-test-support', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libutil/meson.build b/src/libutil/meson.build index cdffc892a..131f71034 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -197,6 +197,7 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixutil', sources, + soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, From 0db2b8c8fe3d944a289a12fee3b3d8ecbeec5240 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 12 Sep 2025 20:43:34 +0300 Subject: [PATCH 369/382] Revert "meson: add soversion to libraries (#13960)" This reverts commit bdbc739d6e87f2abf2ded4d38bb0e161f457eb68. Such a change needs more thought put into it. By versioning shared libraries we'd make a false impression that libraries themselves are actually versioned and have some sort of stable ABI, which is not the case. This will be useful when C bindings become stable, but as long as they are experimental it does not make sense to set SONAME. Also this change should not have been backported, since it's severely breaking. --- src/libcmd/meson.build | 1 - src/libexpr-c/meson.build | 1 - src/libexpr-test-support/meson.build | 1 - src/libexpr/meson.build | 1 - src/libfetchers-c/meson.build | 1 - src/libfetchers/meson.build | 1 - src/libflake-c/meson.build | 1 - src/libflake/meson.build | 1 - src/libmain-c/meson.build | 1 - src/libmain/meson.build | 1 - src/libstore-c/meson.build | 1 - src/libstore-test-support/meson.build | 1 - src/libstore/meson.build | 1 - src/libutil-c/meson.build | 1 - src/libutil-test-support/meson.build | 1 - src/libutil/meson.build | 1 - 16 files changed, 16 deletions(-) diff --git a/src/libcmd/meson.build b/src/libcmd/meson.build index 6478fb226..24e075246 100644 --- a/src/libcmd/meson.build +++ b/src/libcmd/meson.build @@ -95,7 +95,6 @@ this_library = library( 'nixcmd', sources, config_priv_h, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libexpr-c/meson.build b/src/libexpr-c/meson.build index 01e60680b..7c014d61d 100644 --- a/src/libexpr-c/meson.build +++ b/src/libexpr-c/meson.build @@ -50,7 +50,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixexprc', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libexpr-test-support/meson.build b/src/libexpr-test-support/meson.build index 1bc173ee4..d762eb85e 100644 --- a/src/libexpr-test-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -44,7 +44,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-expr-test-support', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 409f4fac8..40d3f390b 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -181,7 +181,6 @@ this_library = library( parser_tab, lexer_tab, generated_headers, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libfetchers-c/meson.build b/src/libfetchers-c/meson.build index 81b63780b..8542744b4 100644 --- a/src/libfetchers-c/meson.build +++ b/src/libfetchers-c/meson.build @@ -53,7 +53,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixfetchersc', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libfetchers/meson.build b/src/libfetchers/meson.build index 7c5ce1bc9..922a2c491 100644 --- a/src/libfetchers/meson.build +++ b/src/libfetchers/meson.build @@ -61,7 +61,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixfetchers', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libflake-c/meson.build b/src/libflake-c/meson.build index e72694c2e..933e06d90 100644 --- a/src/libflake-c/meson.build +++ b/src/libflake-c/meson.build @@ -53,7 +53,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixflakec', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libflake/meson.build b/src/libflake/meson.build index cb5f128a4..191d8f068 100644 --- a/src/libflake/meson.build +++ b/src/libflake/meson.build @@ -58,7 +58,6 @@ this_library = library( 'nixflake', sources, generated_headers, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 20b77aef2..9e26ad8ad 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -45,7 +45,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixmainc', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libmain/meson.build b/src/libmain/meson.build index e70967462..4a90d2d83 100644 --- a/src/libmain/meson.build +++ b/src/libmain/meson.build @@ -77,7 +77,6 @@ this_library = library( 'nixmain', sources, config_priv_h, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index a48885780..f8eaef803 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -46,7 +46,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixstorec', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libstore-test-support/meson.build b/src/libstore-test-support/meson.build index 3a3ffe36e..b2977941f 100644 --- a/src/libstore-test-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -44,7 +44,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-store-test-support', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 77517bdfe..7aeacbab7 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -363,7 +363,6 @@ this_library = library( generated_headers, sources, config_priv_h, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index 9e1a43e80..8131c517c 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -53,7 +53,6 @@ this_library = library( 'nixutilc', sources, config_priv_h, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, diff --git a/src/libutil-test-support/meson.build b/src/libutil-test-support/meson.build index 9ad139edb..910f1d881 100644 --- a/src/libutil-test-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -41,7 +41,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nix-util-test-support', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, # TODO: Remove `-lrapidcheck` when https://github.com/emil-e/rapidcheck/pull/326 diff --git a/src/libutil/meson.build b/src/libutil/meson.build index 131f71034..cdffc892a 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -197,7 +197,6 @@ subdir('nix-meson-build-support/windows-version') this_library = library( 'nixutil', sources, - soversion : 0, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, From f4c38278ca6634ea2a99c17cc191932238d0ee7b Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 12 Sep 2025 23:44:52 +0300 Subject: [PATCH 370/382] libexpr: Remove vString* Values from EvalState EvalState is too big and cluttered. These strings can be private constant statics. --- src/libexpr/eval.cc | 5 ---- src/libexpr/include/nix/expr/eval.hh | 9 ------ src/libexpr/primops.cc | 44 +++++++++++++++++++++++----- 3 files changed, 36 insertions(+), 22 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 4fe9e9e3a..f855dc67e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -284,11 +284,6 @@ EvalState::EvalState( static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); - vStringRegular.mkStringNoCopy("regular"); - vStringDirectory.mkStringNoCopy("directory"); - vStringSymlink.mkStringNoCopy("symlink"); - vStringUnknown.mkStringNoCopy("unknown"); - /* Construct the Nix expression search path. */ assert(lookupPath.elements.empty()); if (!settings.pureEval) { diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 9e0638de8..3639eab15 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -315,15 +315,6 @@ public: */ RepairFlag repair; - /** `"regular"` */ - Value vStringRegular; - /** `"directory"` */ - Value vStringDirectory; - /** `"symlink"` */ - Value vStringSymlink; - /** `"unknown"` */ - Value vStringUnknown; - /** * The accessor corresponding to `store`. */ diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 47909af3b..0d5eb23ae 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2243,19 +2243,45 @@ static RegisterPrimOp primop_hashFile({ .fun = prim_hashFile, }); -static Value * fileTypeToString(EvalState & state, SourceAccessor::Type type) +static const Value & fileTypeToString(EvalState & state, SourceAccessor::Type type) { - return type == SourceAccessor::Type::tRegular ? &state.vStringRegular - : type == SourceAccessor::Type::tDirectory ? &state.vStringDirectory - : type == SourceAccessor::Type::tSymlink ? &state.vStringSymlink - : &state.vStringUnknown; + struct Constants + { + Value regular; + Value directory; + Value symlink; + Value unknown; + }; + + static const Constants stringValues = []() { + Constants res; + res.regular.mkStringNoCopy("regular"); + res.directory.mkStringNoCopy("directory"); + res.symlink.mkStringNoCopy("symlink"); + res.unknown.mkStringNoCopy("unknown"); + return res; + }(); + +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" + using enum SourceAccessor::Type; + switch (type) { + case tRegular: + return stringValues.regular; + case tDirectory: + return stringValues.directory; + case tSymlink: + return stringValues.symlink; + default: + return stringValues.unknown; + } } static void prim_readFileType(EvalState & state, const PosIdx pos, Value ** args, Value & v) { auto path = realisePath(state, pos, *args[0], std::nullopt); /* Retrieve the directory entry type and stringize it. */ - v = *fileTypeToString(state, path.lstat().type); + v = fileTypeToString(state, path.lstat().type); } static RegisterPrimOp primop_readFileType({ @@ -2299,7 +2325,9 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value ** args, Val } else { // This branch of the conditional is much more likely. // Here we just stringize the directory entry type. - attrs.insert(state.symbols.create(name), fileTypeToString(state, *type)); + // N.B. const_cast here is ok, because these values will never be modified, since + // only thunks are mutable - other types do not change once constructed. + attrs.insert(state.symbols.create(name), const_cast(&fileTypeToString(state, *type))); } } @@ -2674,7 +2702,7 @@ bool EvalState::callPathFilter(Value * filterFun, const SourcePath & path, PosId arg1.mkString(path.path.abs()); // assert that type is not "unknown" - Value * args[]{&arg1, fileTypeToString(*this, st.type)}; + Value * args[]{&arg1, const_cast(&fileTypeToString(*this, st.type))}; Value res; callFunction(*filterFun, args, res, pos); From aef431fbd1b41dde113683579cba1cc43ad8f2bb Mon Sep 17 00:00:00 2001 From: Philip Wilk Date: Fri, 12 Sep 2025 22:29:34 +0100 Subject: [PATCH 371/382] bugfix/3514: do not throw on substituter errors if other substituters are still enabled (#13301) ## Motivation Nix currently hard fails if a substituter is inaccessible, even when they are other substituters available, unless `fallback = true`. This breaks nix build, run, shell et al entirely. This would modify the default behaviour so that nix would actually use the other available substituters and not hard error. Here is an example before vs after when using dotenv where I have manually stopped my own cache to trigger this issue, before and after the patch. The initial error is really frustrating because there is other caches available. ![image](https://github.com/user-attachments/assets/b4aec474-52d1-497d-b4e8-6f5737d6acc7) ![image](https://github.com/user-attachments/assets/ee91fcd4-4a1a-4c33-bf88-3aee67ad3cc9) ## Context https://github.com/NixOS/nix/issues/3514#issuecomment-2905056198 is the earliest issue I could find, but there are many duplicates. There is an initial PR at https://github.com/NixOS/nix/pull/7188, but this appears to have been abandoned - over 2 years with no activity, then a no comment review in jan. There was a subsequent PR at https://github.com/NixOS/nix/pull/8983 but this was closed without merge - over a year without activity. I have visualised the current and proposed flows. I believe my logic flows line up with what is suggested in https://github.com/NixOS/nix/pull/7188#issuecomment-1375652870 but correct me if I am wrong. Current behaviour: ![current](https://github.com/user-attachments/assets/d9501b34-274c-4eb3-88c3-9021a482e364) Proposed behaviour: ![proposed](https://github.com/user-attachments/assets/8236e4f4-21ef-45d7-87e1-6c8d416e8c1c) [Charts in lucid](https://lucid.app/lucidchart/1b51b08d-6c4f-40e0-bf54-480df322cccf/view) Possible issues to think about: - I could not figure out where the curl error is created... I can't figure out how to swallow it and turn it into a warn or better yet, a debug log. - Unfortunately, in contrast with the previous point, I'm not sure how verbose we want to warns/traces to be - personally I think that the warn that a substituter has been disabled (when it happens) is sufficient, and that the next one is being used, but this is personal preference. --- src/libstore/build/substitution-goal.cc | 25 +++++++++++++++---------- src/libstore/store-api.cc | 25 ++++++++++++++++--------- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index ab95ea4a2..d219834f2 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -55,9 +55,14 @@ Goal::Co PathSubstitutionGoal::init() auto subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list>(); bool substituterFailed = false; + std::optional lastStoresException = std::nullopt; for (const auto & sub : subs) { trace("trying next substituter"); + if (lastStoresException.has_value()) { + logError(lastStoresException->info()); + lastStoresException.reset(); + } cleanup(); @@ -80,19 +85,13 @@ Goal::Co PathSubstitutionGoal::init() try { // FIXME: make async info = sub->queryPathInfo(subPath ? *subPath : storePath); - } catch (InvalidPath &) { + } catch (InvalidPath & e) { continue; } catch (SubstituterDisabled & e) { - if (settings.tryFallback) - continue; - else - throw e; + continue; } catch (Error & e) { - if (settings.tryFallback) { - logError(e.info()); - continue; - } else - throw e; + lastStoresException = std::make_optional(std::move(e)); + continue; } if (info->path != storePath) { @@ -156,6 +155,12 @@ Goal::Co PathSubstitutionGoal::init() worker.failedSubstitutions++; worker.updateProgress(); } + if (lastStoresException.has_value()) { + if (!settings.tryFallback) { + throw *lastStoresException; + } else + logError(lastStoresException->info()); + } /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 275b8c84b..ada57b358 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,3 +1,4 @@ +#include "nix/util/logging.hh" #include "nix/util/signature/local-keys.hh" #include "nix/util/source-accessor.hh" #include "nix/store/globals.hh" @@ -392,11 +393,14 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta { if (!settings.useSubstitutes) return; - for (auto & sub : getDefaultSubstituters()) { - for (auto & path : paths) { - if (infos.count(path.first)) - // Choose first succeeding substituter. - continue; + + for (auto & path : paths) { + std::optional lastStoresException = std::nullopt; + for (auto & sub : getDefaultSubstituters()) { + if (lastStoresException.has_value()) { + logError(lastStoresException->info()); + lastStoresException.reset(); + } auto subPath(path.first); @@ -437,12 +441,15 @@ void Store::querySubstitutablePathInfos(const StorePathCAMap & paths, Substituta } catch (InvalidPath &) { } catch (SubstituterDisabled &) { } catch (Error & e) { - if (settings.tryFallback) - logError(e.info()); - else - throw; + lastStoresException = std::make_optional(std::move(e)); } } + if (lastStoresException.has_value()) { + if (!settings.tryFallback) { + throw *lastStoresException; + } else + logError(lastStoresException->info()); + } } } From c2427063199574b44e07a032fc44aa7e3e710559 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Sep 2025 08:19:37 -0400 Subject: [PATCH 372/382] Move `json_avoids_null` to its own header This is because we need it in declarations where we should not be including the full `nlohmann/json.hpp`. Already can clean up by moving the experimental feature "instance". Also, make the `std::map` instance better by allowing for other comparison functions. --- .../include/nix/util/experimental-features.hh | 8 +++ src/libutil/include/nix/util/json-non-null.hh | 55 +++++++++++++++++++ src/libutil/include/nix/util/json-utils.hh | 51 +---------------- src/libutil/include/nix/util/meson.build | 1 + 4 files changed, 65 insertions(+), 50 deletions(-) create mode 100644 src/libutil/include/nix/util/json-non-null.hh diff --git a/src/libutil/include/nix/util/experimental-features.hh b/src/libutil/include/nix/util/experimental-features.hh index 1eabc3461..0a8f15863 100644 --- a/src/libutil/include/nix/util/experimental-features.hh +++ b/src/libutil/include/nix/util/experimental-features.hh @@ -3,6 +3,7 @@ #include "nix/util/error.hh" #include "nix/util/types.hh" +#include "nix/util/json-non-null.hh" #include @@ -89,6 +90,13 @@ public: MissingExperimentalFeature(ExperimentalFeature missingFeature); }; +/** + * `ExperimentalFeature` is always rendered as a string. + */ +template<> +struct json_avoids_null : std::true_type +{}; + /** * Semi-magic conversion to and from json. * See the nlohmann/json readme for more details. diff --git a/src/libutil/include/nix/util/json-non-null.hh b/src/libutil/include/nix/util/json-non-null.hh new file mode 100644 index 000000000..6bacce58f --- /dev/null +++ b/src/libutil/include/nix/util/json-non-null.hh @@ -0,0 +1,55 @@ +#pragma once +///@file + +#include +#include +#include +#include +#include + +namespace nix { + +/** + * For `adl_serializer>` below, we need to track what + * types are not already using `null`. Only for them can we use `null` + * to represent `std::nullopt`. + */ +template +struct json_avoids_null; + +/** + * Handle numbers in default impl + */ +template +struct json_avoids_null : std::bool_constant::value> +{}; + +template<> +struct json_avoids_null : std::false_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + +template +struct json_avoids_null> : std::true_type +{}; + +template +struct json_avoids_null> : std::true_type +{}; + +template +struct json_avoids_null> : std::true_type +{}; + +template +struct json_avoids_null> : std::true_type +{}; + +} // namespace nix diff --git a/src/libutil/include/nix/util/json-utils.hh b/src/libutil/include/nix/util/json-utils.hh index 20c50f957..4b5fb4b21 100644 --- a/src/libutil/include/nix/util/json-utils.hh +++ b/src/libutil/include/nix/util/json-utils.hh @@ -6,6 +6,7 @@ #include "nix/util/error.hh" #include "nix/util/types.hh" +#include "nix/util/json-non-null.hh" namespace nix { @@ -59,56 +60,6 @@ Strings getStringList(const nlohmann::json & value); StringMap getStringMap(const nlohmann::json & value); StringSet getStringSet(const nlohmann::json & value); -/** - * For `adl_serializer>` below, we need to track what - * types are not already using `null`. Only for them can we use `null` - * to represent `std::nullopt`. - */ -template -struct json_avoids_null; - -/** - * Handle numbers in default impl - */ -template -struct json_avoids_null : std::bool_constant::value> -{}; - -template<> -struct json_avoids_null : std::false_type -{}; - -template<> -struct json_avoids_null : std::true_type -{}; - -template<> -struct json_avoids_null : std::true_type -{}; - -template -struct json_avoids_null> : std::true_type -{}; - -template -struct json_avoids_null> : std::true_type -{}; - -template -struct json_avoids_null> : std::true_type -{}; - -template -struct json_avoids_null> : std::true_type -{}; - -/** - * `ExperimentalFeature` is always rendered as a string. - */ -template<> -struct json_avoids_null : std::true_type -{}; - } // namespace nix namespace nlohmann { diff --git a/src/libutil/include/nix/util/meson.build b/src/libutil/include/nix/util/meson.build index bdf114259..07a4f1d11 100644 --- a/src/libutil/include/nix/util/meson.build +++ b/src/libutil/include/nix/util/meson.build @@ -42,6 +42,7 @@ headers = files( 'hash.hh', 'hilite.hh', 'json-impls.hh', + 'json-non-null.hh', 'json-utils.hh', 'logging.hh', 'lru-cache.hh', From c6d06ce486ad6b8e5d9e4a923ab750128e54e2db Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Sep 2025 08:11:53 -0400 Subject: [PATCH 373/382] Fix hash error message Wrong number of arguments was causing a format assertion. --- src/libutil/hash.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index e469957a0..220181ed6 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -135,7 +135,8 @@ static Hash parseLowLevel(std::string_view rest, HashAlgorithm algo, DecodeNameP e.addTrace({}, "While decoding hash '%s'", rest); } if (d.size() != res.hashSize) - throw BadHash("invalid %s hash '%s' %d %d", pair.encodingName, rest); + throw BadHash( + "invalid %s hash '%s', length %d != expected length %d", pair.encodingName, rest, d.size(), res.hashSize); assert(res.hashSize); memcpy(res.hash, d.data(), res.hashSize); From 095ac66d4c22d0dcc928bbaa5d35bd1652f7c75a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Sep 2025 08:13:45 -0400 Subject: [PATCH 374/382] Introduce `Hash::parseExplicitFormatUnprefixed` --- src/libutil-tests/hash.cc | 54 ++++++++++++++++++++++++++-- src/libutil/hash.cc | 32 +++++++++++++---- src/libutil/include/nix/util/hash.hh | 9 +++++ 3 files changed, 87 insertions(+), 8 deletions(-) diff --git a/src/libutil-tests/hash.cc b/src/libutil-tests/hash.cc index f9d425d92..15e639180 100644 --- a/src/libutil-tests/hash.cc +++ b/src/libutil-tests/hash.cc @@ -1,13 +1,17 @@ #include #include +#include #include "nix/util/hash.hh" +#include "nix/util/tests/characterization.hh" namespace nix { -class BLAKE3HashTest : public virtual ::testing::Test +class HashTest : public CharacterizationTest { + std::filesystem::path unitTestData = getUnitTestData() / "hash"; + public: /** @@ -16,8 +20,14 @@ public: */ ExperimentalFeatureSettings mockXpSettings; -private: + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; +class BLAKE3HashTest : public HashTest +{ void SetUp() override { mockXpSettings.set("experimental-features", "blake3-hashes"); @@ -137,6 +147,46 @@ TEST(hashString, testKnownSHA512Hashes2) "c7d329eeb6dd26545e96e55b874be909"); } +/* ---------------------------------------------------------------------------- + * parsing hashes + * --------------------------------------------------------------------------*/ + +TEST(hashParseExplicitFormatUnprefixed, testKnownSHA256Hashes1_correct) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + auto s = "abc"; + + auto hash = hashString(HashAlgorithm::SHA256, s); + ASSERT_EQ( + hash, + Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Base16)); +} + +TEST(hashParseExplicitFormatUnprefixed, testKnownSHA256Hashes1_wrongAlgo) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + ASSERT_THROW( + Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA1, + HashFormat::Base16), + BadHash); +} + +TEST(hashParseExplicitFormatUnprefixed, testKnownSHA256Hashes1_wrongBase) +{ + // values taken from: https://tools.ietf.org/html/rfc4634 + ASSERT_THROW( + Hash::parseExplicitFormatUnprefixed( + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", + HashAlgorithm::SHA256, + HashFormat::Nix32), + BadHash); +} + /* ---------------------------------------------------------------------------- * parseHashFormat, parseHashFormatOpt, printHashFormat * --------------------------------------------------------------------------*/ diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 220181ed6..6715b8112 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -99,22 +99,37 @@ struct DecodeNamePair } // namespace +static DecodeNamePair baseExplicit(HashFormat format) +{ + switch (format) { + case HashFormat::Base16: + return {base16::decode, "base16"}; + case HashFormat::Nix32: + return {BaseNix32::decode, "nix32"}; + case HashFormat::Base64: + return {base64::decode, "Base64"}; + case HashFormat::SRI: + assert(false); + } +} + /** * Given the expected size of the message once decoded it, figure out * which encoding we are using by looking at the size of the encoded * message. */ -static DecodeNamePair baseFromSize(std::string_view rest, HashAlgorithm algo) +static HashFormat baseFromSize(std::string_view rest, HashAlgorithm algo) { auto hashSize = regularHashSize(algo); + if (rest.size() == base16::encodedLength(hashSize)) - return {base16::decode, "base16"}; + return HashFormat::Base16; if (rest.size() == BaseNix32::encodedLength(hashSize)) - return {BaseNix32::decode, "nix32"}; + return HashFormat::Nix32; if (rest.size() == base64::encodedLength(hashSize)) - return {base64::decode, "Base64"}; + return HashFormat::Base64; throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(algo)); } @@ -190,7 +205,7 @@ static Hash parseAnyHelper(std::string_view rest, auto resolveAlgo) } else { /* Otherwise, decide via the length of the hash (for the given algorithm) what base encoding it is. */ - return baseFromSize(rest, algo); + return baseExplicit(baseFromSize(rest, algo)); } }(); @@ -225,7 +240,12 @@ Hash Hash::parseAny(std::string_view original, std::optional optA Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) { - return parseLowLevel(s, algo, baseFromSize(s, algo)); + return parseExplicitFormatUnprefixed(s, algo, baseFromSize(s, algo)); +} + +Hash Hash::parseExplicitFormatUnprefixed(std::string_view s, HashAlgorithm algo, HashFormat format) +{ + return parseLowLevel(s, algo, baseExplicit(format)); } Hash Hash::random(HashAlgorithm algo) diff --git a/src/libutil/include/nix/util/hash.hh b/src/libutil/include/nix/util/hash.hh index f4d137bd0..571b6acca 100644 --- a/src/libutil/include/nix/util/hash.hh +++ b/src/libutil/include/nix/util/hash.hh @@ -90,6 +90,15 @@ struct Hash */ static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo); + /** + * Like `parseNonSRIUnprefixed`, but the hash format has been + * explicitly given. + * + * @param explicitFormat cannot be SRI, but must be one of the + * "bases". + */ + static Hash parseExplicitFormatUnprefixed(std::string_view s, HashAlgorithm algo, HashFormat explicitFormat); + static Hash parseSRI(std::string_view original); public: From 20b532eab0f05e58e2080d5d62411d990daffb78 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 13 Sep 2025 01:07:42 +0300 Subject: [PATCH 375/382] packaging: Drop legacy apple sdk pattern This has been dropped on unstable an nix no longer compiled with overridden nixpkgs input. On 25.05 these overrides already do nothing. Tested with: nix build .#packages.x86_64-darwin.nix-cli -L --override-input nixpkgs https://releases.nixos.org/nixos/unstable/nixos-25.11pre859555.ab0f3607a6c7/nixexprs.tar.xz Default deployment target on 25.05 is 11.3, so 10.13 sdk override doesn't have to be updated at all as evident from the fact that we didn't observe any issues with it. --- packaging/dependencies.nix | 19 ------------------- src/libstore/package.nix | 2 -- 2 files changed, 21 deletions(-) diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 16dd34d0e..981c1aa48 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -10,27 +10,8 @@ stdenv, }: -let - prevStdenv = stdenv; -in - let inherit (pkgs) lib; - - stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv; - - # Fix the following error with the default x86_64-darwin SDK: - # - # error: aligned allocation function of type 'void *(std::size_t, std::align_val_t)' is only available on macOS 10.13 or newer - # - # Despite the use of the 10.13 deployment target here, the aligned - # allocation function Clang uses with this setting actually works - # all the way back to 10.6. - # NOTE: this is not just a version constraint, but a request to make Darwin - # provide this version level of support. Removing this minimum version - # request will regress the above error. - darwinStdenv = pkgs.overrideSDK prevStdenv { darwinMinVersion = "10.13"; }; - in scope: { inherit stdenv; diff --git a/src/libstore/package.nix b/src/libstore/package.nix index 47805547b..d890d2256 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -64,8 +64,6 @@ mkMesonLibrary (finalAttrs: { sqlite ] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp - # There have been issues building these dependencies - ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional withAWS aws-sdk-cpp; propagatedBuildInputs = [ From a0b633dd2b7323a3c710bb7995a787b1a093d536 Mon Sep 17 00:00:00 2001 From: Glen Huang Date: Fri, 22 Aug 2025 16:13:51 +0800 Subject: [PATCH 376/382] doc: Rephrase store-object.md --- doc/manual/source/store/store-object.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/manual/source/store/store-object.md b/doc/manual/source/store/store-object.md index 10c2384fa..71ec772fb 100644 --- a/doc/manual/source/store/store-object.md +++ b/doc/manual/source/store/store-object.md @@ -20,7 +20,8 @@ The graph of references excluding self-references thus forms a [directed acyclic [directed acyclic graph]: @docroot@/glossary.md#gloss-directed-acyclic-graph -We can take the [transitive closure] of the references graph, which any pair of store objects have an edge not if there is a single reference from the first to the second, but a path of one or more references from the first to the second. +We can take the [transitive closure] of the references graph, in which any pair of store objects have an edge if a *path* of one or more references exists from the first to the second object. +(A single reference always forms a path which is one reference long, but longer paths may connect objects which have no direct reference between them.) The *requisites* of a store object are all store objects reachable by paths of references which start with given store object's references. [transitive closure]: https://en.wikipedia.org/wiki/Transitive_closure From 298ea97c12809e91fe89b485a6c0a24624fcb24f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 13 Sep 2025 09:19:07 +0300 Subject: [PATCH 377/382] libutil: Fix missing return warning ../hash.cc: In function 'nix::{anonymous}::DecodeNamePair nix::baseExplicit(HashFormat)': ../hash.cc:114:1: warning: control reaches end of non-void function [-Wreturn-type] 114 | } | ^ --- src/libutil/hash.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 6715b8112..b67dc7807 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -109,8 +109,9 @@ static DecodeNamePair baseExplicit(HashFormat format) case HashFormat::Base64: return {base64::decode, "Base64"}; case HashFormat::SRI: - assert(false); + break; } + unreachable(); } /** From 74be28820c9a6f0fbc44a5258ee25343f01ae563 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 13 Sep 2025 08:39:37 -0400 Subject: [PATCH 378/382] `ValidPathInfo`, `NarInfo`, turn funky constructor into static method This is more flexible, and needed for me to be able to reshuffle the inheritance bureaucracy to make the JSON instances more precise. --- src/libfetchers/tarball.cc | 5 ++--- src/libstore-tests/nar-info.cc | 5 ++--- src/libstore-tests/path-info.cc | 5 ++--- src/libstore-tests/serve-protocol.cc | 5 ++--- src/libstore-tests/worker-protocol.cc | 5 ++--- src/libstore/binary-cache-store.cc | 10 ++++----- src/libstore/include/nix/store/nar-info.hh | 11 +++++----- src/libstore/include/nix/store/path-info.hh | 4 ++-- src/libstore/local-store.cc | 2 +- src/libstore/make-content-addressed.cc | 5 ++--- src/libstore/path-info.cc | 22 +++++++++++-------- src/libstore/store-api.cc | 5 ++--- src/libstore/unix/build/derivation-builder.cc | 5 ++--- src/nix/profile.cc | 5 ++--- 14 files changed, 44 insertions(+), 50 deletions(-) diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 8a8039b6b..b55837c9e 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -74,7 +74,7 @@ DownloadFileResult downloadFile( StringSink sink; dumpString(res.data, sink); auto hash = hashString(HashAlgorithm::SHA256, res.data); - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( *store, name, FixedOutputInfo{ @@ -82,8 +82,7 @@ DownloadFileResult downloadFile( .hash = hash, .references = {}, }, - hashString(HashAlgorithm::SHA256, sink.s), - }; + hashString(HashAlgorithm::SHA256, sink.s)); info.narSize = sink.s.size(); auto source = StringSource{sink.s}; store->addToStore(info, source, NoRepair, NoCheckSigs); diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index a73df1190..751c5e305 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -23,7 +23,7 @@ class NarInfoTest : public CharacterizationTest, public LibStoreTest static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { - NarInfo info = ValidPathInfo{ + auto info = NarInfo::makeFromCA( store, "foo", FixedOutputInfo{ @@ -41,8 +41,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) .self = true, }, }, - Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - }; + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=")); info.narSize = 34878; if (includeImpureInfo) { info.deriver = StorePath{ diff --git a/src/libstore-tests/path-info.cc b/src/libstore-tests/path-info.cc index de5c95150..63310c1c3 100644 --- a/src/libstore-tests/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -29,7 +29,7 @@ static UnkeyedValidPathInfo makeEmpty() static ValidPathInfo makeFullKeyed(const Store & store, bool includeImpureInfo) { - ValidPathInfo info = ValidPathInfo{ + auto info = ValidPathInfo::makeFromCA( store, "foo", FixedOutputInfo{ @@ -47,8 +47,7 @@ static ValidPathInfo makeFullKeyed(const Store & store, bool includeImpureInfo) .self = true, }, }, - Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - }; + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=")); info.narSize = 34878; if (includeImpureInfo) { info.deriver = StorePath{ diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index 4cd7f101b..b513e1365 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -274,7 +274,7 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( store, "foo", FixedOutputInfo{ @@ -291,8 +291,7 @@ VERSIONED_CHARACTERIZATION_TEST( .self = true, }, }, - Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - }; + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=")); info.deriver = StorePath{ "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", }; diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index a761c96dd..823d8d85a 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -515,7 +515,7 @@ VERSIONED_CHARACTERIZATION_TEST( info; }), ({ - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( store, "foo", FixedOutputInfo{ @@ -532,8 +532,7 @@ VERSIONED_CHARACTERIZATION_TEST( .self = true, }, }, - Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), - }; + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=")); info.registrationTime = 23423; info.narSize = 34878; info; diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index f4e06305a..e08a1449b 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -366,7 +366,7 @@ StorePath BinaryCacheStore::addToStoreFromDump( repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( *this, name, ContentAddressWithReferences::fromParts( @@ -378,8 +378,7 @@ StorePath BinaryCacheStore::addToStoreFromDump( // without modulus .self = false, }), - nar.hash, - }; + nar.hash); info.narSize = nar.numBytesDigested; return info; }) @@ -484,7 +483,7 @@ StorePath BinaryCacheStore::addToStore( repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( *this, name, ContentAddressWithReferences::fromParts( @@ -496,8 +495,7 @@ StorePath BinaryCacheStore::addToStore( // without modulus .self = false, }), - nar.hash, - }; + nar.hash); info.narSize = nar.numBytesDigested; return info; }) diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 39d75b0a9..1684837c6 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -18,19 +18,20 @@ struct NarInfo : ValidPathInfo NarInfo() = delete; - NarInfo(const StoreDirConfig & store, std::string name, ContentAddressWithReferences ca, Hash narHash) - : ValidPathInfo(store, std::move(name), std::move(ca), narHash) + NarInfo(ValidPathInfo info) + : ValidPathInfo{std::move(info)} { } NarInfo(StorePath path, Hash narHash) - : ValidPathInfo(std::move(path), narHash) + : NarInfo{ValidPathInfo{std::move(path), UnkeyedValidPathInfo(narHash)}} { } - NarInfo(const ValidPathInfo & info) - : ValidPathInfo(info) + static NarInfo + makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences ca, Hash narHash) { + return ValidPathInfo::makeFromCA(store, std::move(name), std::move(ca), narHash); } NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence); diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index 9f341198c..cbc5abdb4 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -179,8 +179,8 @@ struct ValidPathInfo : UnkeyedValidPathInfo : UnkeyedValidPathInfo(info) , path(path) {}; - ValidPathInfo( - const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); + static ValidPathInfo + makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); }; static_assert(std::is_move_assignable_v); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index f848ddc70..4cadf5282 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1311,7 +1311,7 @@ StorePath LocalStore::addToStoreFromDump( syncParent(realPath); } - ValidPathInfo info{*this, name, std::move(desc), narHash.hash}; + auto info = ValidPathInfo::makeFromCA(*this, name, std::move(desc), narHash.hash); info.narSize = narHash.numBytesDigested; registerValidPath(info); } diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index ce4a36849..4a7b21c3b 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -45,7 +45,7 @@ std::map makeContentAddressed(Store & srcStore, Store & ds auto narModuloHash = hashModuloSink.finish().hash; - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( dstStore, path.name(), FixedOutputInfo{ @@ -53,8 +53,7 @@ std::map makeContentAddressed(Store & srcStore, Store & ds .hash = narModuloHash, .references = std::move(refs), }, - Hash::dummy, - }; + Hash::dummy); printInfo("rewriting '%s' to '%s'", pathS, dstStore.printStorePath(info.path)); diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index e3de5949d..270c532bb 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -124,25 +124,29 @@ Strings ValidPathInfo::shortRefs() const return refs; } -ValidPathInfo::ValidPathInfo( +ValidPathInfo ValidPathInfo::makeFromCA( const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash) - : UnkeyedValidPathInfo(narHash) - , path(store.makeFixedOutputPathFromCA(name, ca)) { - this->ca = ContentAddress{ + ValidPathInfo res{ + store.makeFixedOutputPathFromCA(name, ca), + narHash, + }; + res.ca = ContentAddress{ .method = ca.getMethod(), .hash = ca.getHash(), }; - std::visit( + res.references = std::visit( overloaded{ - [this](TextInfo && ti) { this->references = std::move(ti.references); }, - [this](FixedOutputInfo && foi) { - this->references = std::move(foi.references.others); + [&](TextInfo && ti) { return std::move(ti.references); }, + [&](FixedOutputInfo && foi) { + auto references = std::move(foi.references.others); if (foi.references.self) - this->references.insert(path); + references.insert(res.path); + return references; }, }, std::move(ca).raw); + return res; } nlohmann::json diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index ada57b358..17748ec53 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -269,7 +269,7 @@ ValidPathInfo Store::addToStoreSlow( if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( *this, name, ContentAddressWithReferences::fromParts( @@ -279,8 +279,7 @@ ValidPathInfo Store::addToStoreSlow( .others = references, .self = false, }), - narHash, - }; + narHash); info.narSize = narSize; if (!isValidPath(info.path)) { diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index d6979ab5f..770bdad4d 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1591,12 +1591,11 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() assert(false); }(); - ValidPathInfo newInfo0{ + auto newInfo0 = ValidPathInfo::makeFromCA( store, outputPathName(drv.name, outputName), ContentAddressWithReferences::fromParts(outputHash.method, std::move(got), rewriteRefs()), - Hash::dummy, - }; + Hash::dummy); if (*scratchPath != newInfo0.path) { // If the path has some self-references, we need to rewrite // them. diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 0ed1face5..68005171f 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -257,7 +257,7 @@ struct ProfileManifest auto narHash = hashString(HashAlgorithm::SHA256, sink.s); - ValidPathInfo info{ + auto info = ValidPathInfo::makeFromCA( *store, "profile", FixedOutputInfo{ @@ -270,8 +270,7 @@ struct ProfileManifest .self = false, }, }, - narHash, - }; + narHash); info.narSize = sink.s.size(); StringSource source(sink.s); From e75501da3ecf2b4081bd17a9d22f008178671fc0 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 13 Sep 2025 23:21:24 +0300 Subject: [PATCH 379/382] libexpr: Remove non-const iterators of Bindings --- src/libexpr/attr-set.cc | 3 +-- src/libexpr/include/nix/expr/attr-set.hh | 12 ------------ 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 48d4c4d4a..88474c36f 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -35,8 +35,7 @@ Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos) void Bindings::sort() { - if (size_) - std::sort(begin(), end()); + std::sort(attrs, attrs + size_); } Value & Value::mkAttrs(BindingsBuilder & bindings) diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 4ab54c8eb..5bf266e54 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -81,8 +81,6 @@ public: return !size_; } - typedef Attr * iterator; - typedef const Attr * const_iterator; void push_back(const Attr & attr) @@ -108,16 +106,6 @@ public: return nullptr; } - iterator begin() - { - return &attrs[0]; - } - - iterator end() - { - return &attrs[size_]; - } - const_iterator begin() const { return &attrs[0]; From 7295034362a2655d9e916db6dd3e1f89b393ed94 Mon Sep 17 00:00:00 2001 From: dramforever Date: Sun, 14 Sep 2025 05:39:19 +0800 Subject: [PATCH 380/382] libstore: Raise default connect-timeout to 15 secs This allows the weird network or DNS server fallback mechanism inside glibc to work, and prevents a "Resolving timed out after 5000 milliseconds" error. Read on for details. The DNS request stuff (dns-hosts) in glibc uses this fallback procedure to minimize network RTT in the ideal case while dealing with ill-behaving networks and DNS servers gracefully (see resolv.conf(5)): - Use sendmmsg() to send UDP DNS requests for IPv4 and IPv6 in parallel - If that times out (meaning that none or only one of the responses have been received), send the requests one by one, waiting for the response before sending the next request ("single-request") - If that still times out, try to use a different socket (hence different address) for each request ("single-request-reopen") The default timeout inside glibc is 5 seconds. Therefore, setting connect-timeout, and therefore CURLOPT_CONNECTTIMEOUT to 5 seconds prevents the single-request fallback, and setting it to even 10 seconds prevents the single-request-reopen fallback as well. The fallback decision is saved by glibc, but only thread-locally, and libcurl starts a new thread for getaddrinfo() for each connection. Therefore for every connection the fallback starts from sendmmsg() all over again. And since these are considered to have timed out by libcurl, even though getaddrinfo() might return a successful result, it is not cached in libcurl. While a user could tweak these with resolv.conf(5) options (e.g. using networking.resolvconf.extraOptions in NixOS), and indeed that is probably needed to avoid annoying delays, it still means that the default connect-timeout of 5 is too low. Raise it to give fallback a chance. --- src/libstore/include/nix/store/filetransfer.hh | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 6f541d463..2f2d59036 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -31,9 +31,17 @@ struct FileTransferSettings : Config )", {"binary-caches-parallel-connections"}}; + /* Do not set this too low. On glibc, getaddrinfo() contains fallback code + paths that deal with ill-behaved DNS servers. Setting this too low + prevents some fallbacks from occurring. + + See description of options timeout, single-request, single-request-reopen + in resolv.conf(5). Also see https://github.com/NixOS/nix/pull/13985 for + details on the interaction between getaddrinfo(3) behavior and libcurl + CURLOPT_CONNECTTIMEOUT. */ Setting connectTimeout{ this, - 5, + 15, "connect-timeout", R"( The timeout (in seconds) for establishing connections in the From ddabd94f82787bd4f47fff70818d16b0a0dbbfc0 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 14 Sep 2025 22:52:37 +0300 Subject: [PATCH 381/382] libexpr: Make Bindings::iterator a proper strong type instead of pointer As evident from the number of tests that were holding this API completely wrong (the end() iterator returned from find() is NEVER nullptr) we should not have this footgun. A proper strong type guarantees that this confusion will not happen again. Also this will be helpful down the road when Bindings becomes something smarter than an array of Attr. --- src/libexpr-tests/primops.cc | 24 ++++---- src/libexpr-tests/trivial.cc | 12 ++-- src/libexpr/include/nix/expr/attr-set.hh | 74 +++++++++++++++++++++--- 3 files changed, 83 insertions(+), 27 deletions(-) diff --git a/src/libexpr-tests/primops.cc b/src/libexpr-tests/primops.cc index f3f7de8d9..aa4ef5e21 100644 --- a/src/libexpr-tests/primops.cc +++ b/src/libexpr-tests/primops.cc @@ -195,18 +195,18 @@ TEST_F(PrimOpTest, unsafeGetAttrPos) auto v = eval(expr); ASSERT_THAT(v, IsAttrsOfSize(3)); - auto file = v.attrs()->find(createSymbol("file")); + auto file = v.attrs()->get(createSymbol("file")); ASSERT_NE(file, nullptr); ASSERT_THAT(*file->value, IsString()); auto s = baseNameOf(file->value->string_view()); ASSERT_EQ(s, "foo.nix"); - auto line = v.attrs()->find(createSymbol("line")); + auto line = v.attrs()->get(createSymbol("line")); ASSERT_NE(line, nullptr); state.forceValue(*line->value, noPos); ASSERT_THAT(*line->value, IsIntEq(4)); - auto column = v.attrs()->find(createSymbol("column")); + auto column = v.attrs()->get(createSymbol("column")); ASSERT_NE(column, nullptr); state.forceValue(*column->value, noPos); ASSERT_THAT(*column->value, IsIntEq(3)); @@ -246,7 +246,7 @@ TEST_F(PrimOpTest, removeAttrsRetains) { auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]"); ASSERT_THAT(v, IsAttrsOfSize(1)); - ASSERT_NE(v.attrs()->find(createSymbol("y")), nullptr); + ASSERT_NE(v.attrs()->get(createSymbol("y")), nullptr); } TEST_F(PrimOpTest, listToAttrsEmptyList) @@ -266,7 +266,7 @@ TEST_F(PrimOpTest, listToAttrs) { auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]"); ASSERT_THAT(v, IsAttrsOfSize(1)); - auto key = v.attrs()->find(createSymbol("key")); + auto key = v.attrs()->get(createSymbol("key")); ASSERT_NE(key, nullptr); ASSERT_THAT(*key->value, IsIntEq(123)); } @@ -275,7 +275,7 @@ TEST_F(PrimOpTest, intersectAttrs) { auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }"); ASSERT_THAT(v, IsAttrsOfSize(1)); - auto b = v.attrs()->find(createSymbol("b")); + auto b = v.attrs()->get(createSymbol("b")); ASSERT_NE(b, nullptr); ASSERT_THAT(*b->value, IsIntEq(3)); } @@ -293,11 +293,11 @@ TEST_F(PrimOpTest, functionArgs) auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)"); ASSERT_THAT(v, IsAttrsOfSize(2)); - auto x = v.attrs()->find(createSymbol("x")); + auto x = v.attrs()->get(createSymbol("x")); ASSERT_NE(x, nullptr); ASSERT_THAT(*x->value, IsFalse()); - auto y = v.attrs()->find(createSymbol("y")); + auto y = v.attrs()->get(createSymbol("y")); ASSERT_NE(y, nullptr); ASSERT_THAT(*y->value, IsTrue()); } @@ -307,13 +307,13 @@ TEST_F(PrimOpTest, mapAttrs) auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }"); ASSERT_THAT(v, IsAttrsOfSize(2)); - auto a = v.attrs()->find(createSymbol("a")); + auto a = v.attrs()->get(createSymbol("a")); ASSERT_NE(a, nullptr); ASSERT_THAT(*a->value, IsThunk()); state.forceValue(*a->value, noPos); ASSERT_THAT(*a->value, IsIntEq(10)); - auto b = v.attrs()->find(createSymbol("b")); + auto b = v.attrs()->get(createSymbol("b")); ASSERT_NE(b, nullptr); ASSERT_THAT(*b->value, IsThunk()); state.forceValue(*b->value, noPos); @@ -839,11 +839,11 @@ TEST_P(ParseDrvNamePrimOpTest, parseDrvName) auto v = eval(expr); ASSERT_THAT(v, IsAttrsOfSize(2)); - auto name = v.attrs()->find(createSymbol("name")); + auto name = v.attrs()->get(createSymbol("name")); ASSERT_TRUE(name); ASSERT_THAT(*name->value, IsStringEq(expectedName)); - auto version = v.attrs()->find(createSymbol("version")); + auto version = v.attrs()->get(createSymbol("version")); ASSERT_TRUE(version); ASSERT_THAT(*version->value, IsStringEq(expectedVersion)); } diff --git a/src/libexpr-tests/trivial.cc b/src/libexpr-tests/trivial.cc index 02433234e..a287ce4d1 100644 --- a/src/libexpr-tests/trivial.cc +++ b/src/libexpr-tests/trivial.cc @@ -75,11 +75,11 @@ TEST_F(TrivialExpressionTest, updateAttrs) { auto v = eval("{ a = 1; } // { b = 2; a = 3; }"); ASSERT_THAT(v, IsAttrsOfSize(2)); - auto a = v.attrs()->find(createSymbol("a")); + auto a = v.attrs()->get(createSymbol("a")); ASSERT_NE(a, nullptr); ASSERT_THAT(*a->value, IsIntEq(3)); - auto b = v.attrs()->find(createSymbol("b")); + auto b = v.attrs()->get(createSymbol("b")); ASSERT_NE(b, nullptr); ASSERT_THAT(*b->value, IsIntEq(2)); } @@ -176,7 +176,7 @@ TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) auto v = eval(expr); ASSERT_THAT(v, IsAttrsOfSize(1)); - auto a = v.attrs()->find(createSymbol("a")); + auto a = v.attrs()->get(createSymbol("a")); ASSERT_NE(a, nullptr); ASSERT_THAT(*a->value, IsThunk()); @@ -184,11 +184,11 @@ TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) ASSERT_THAT(*a->value, IsAttrsOfSize(2)); - auto b = a->value->attrs()->find(createSymbol("b")); + auto b = a->value->attrs()->get(createSymbol("b")); ASSERT_NE(b, nullptr); ASSERT_THAT(*b->value, IsIntEq(1)); - auto c = a->value->attrs()->find(createSymbol("c")); + auto c = a->value->attrs()->get(createSymbol("c")); ASSERT_NE(c, nullptr); ASSERT_THAT(*c->value, IsIntEq(2)); } @@ -330,7 +330,7 @@ TEST_F(TrivialExpressionTest, bindOr) { auto v = eval("{ or = 1; }"); ASSERT_THAT(v, IsAttrsOfSize(1)); - auto b = v.attrs()->find(createSymbol("or")); + auto b = v.attrs()->get(createSymbol("or")); ASSERT_NE(b, nullptr); ASSERT_THAT(*b->value, IsIntEq(1)); } diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 5bf266e54..132be163d 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -6,6 +6,7 @@ #include #include +#include namespace nix { @@ -81,7 +82,55 @@ public: return !size_; } - typedef const Attr * const_iterator; + class iterator + { + public: + using value_type = Attr; + using pointer = const value_type *; + using reference = const value_type &; + using difference_type = std::ptrdiff_t; + using iterator_category = std::forward_iterator_tag; + + friend class Bindings; + + private: + pointer ptr = nullptr; + + explicit iterator(pointer ptr) + : ptr(ptr) + { + } + + public: + iterator() = default; + + reference operator*() const + { + return *ptr; + } + + const value_type * operator->() const + { + return ptr; + } + + iterator & operator++() + { + ++ptr; + return *this; + } + + iterator operator++(int) + { + pointer tmp = ptr; + ++*this; + return iterator(tmp); + } + + bool operator==(const iterator & rhs) const = default; + }; + + using const_iterator = iterator; void push_back(const Attr & attr) { @@ -91,29 +140,33 @@ public: const_iterator find(Symbol name) const { Attr key(name, 0); - const_iterator i = std::lower_bound(begin(), end(), key); - if (i != end() && i->name == name) - return i; + auto first = attrs; + auto last = attrs + size_; + const Attr * i = std::lower_bound(first, last, key); + if (i != last && i->name == name) + return const_iterator{i}; return end(); } const Attr * get(Symbol name) const { Attr key(name, 0); - const_iterator i = std::lower_bound(begin(), end(), key); - if (i != end() && i->name == name) - return &*i; + auto first = attrs; + auto last = attrs + size_; + const Attr * i = std::lower_bound(first, last, key); + if (i != last && i->name == name) + return i; return nullptr; } const_iterator begin() const { - return &attrs[0]; + return const_iterator(attrs); } const_iterator end() const { - return &attrs[size_]; + return const_iterator(attrs + size_); } Attr & operator[](size_t pos) @@ -147,6 +200,9 @@ public: friend class EvalState; }; +static_assert(std::forward_iterator); +static_assert(std::ranges::forward_range); + /** * A wrapper around Bindings that ensures that its always in sorted * order at the end. The only way to consume a BindingsBuilder is to From d83084043398bd629b404124cfdc82525e2ac8ce Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 14 Sep 2025 23:29:44 +0300 Subject: [PATCH 382/382] libexpr: Remove Bindings::find A follow-up optimization will make it impossible to make a find function that returns an iterator in an efficient manner. All consumer code can easily use the `get` variant. --- src/libexpr/eval.cc | 26 ++++++++++++------------ src/libexpr/get-drvs.cc | 15 +++++++------- src/libexpr/include/nix/expr/attr-set.hh | 11 ---------- src/libexpr/include/nix/expr/eval.hh | 2 +- src/libexpr/primops.cc | 26 ++++++++++++------------ src/nix/nix-env/user-env.cc | 4 ++-- 6 files changed, 36 insertions(+), 48 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f855dc67e..dc8fd4d38 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -582,7 +582,7 @@ std::optional EvalState::getDoc(Value & v) } if (isFunctor(v)) { try { - Value & functor = *v.attrs()->find(s.functor)->value; + Value & functor = *v.attrs()->get(s.functor)->value; Value * vp[] = {&v}; Value partiallyApplied; // The first parameter is not user-provided, and may be @@ -1709,8 +1709,8 @@ void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res forceValue(fun, pos); if (fun.type() == nAttrs) { - auto found = fun.attrs()->find(s.functor); - if (found != fun.attrs()->end()) { + auto found = fun.attrs()->get(s.functor); + if (found) { Value * v = allocValue(); callFunction(*found->value, fun, *v, pos); forceValue(*v, pos); @@ -2160,10 +2160,10 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx return v.boolean(); } -Bindings::const_iterator EvalState::getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx) +const Attr * EvalState::getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx) { - auto value = attrSet->find(attrSym); - if (value == attrSet->end()) { + auto value = attrSet->get(attrSym); + if (!value) { error("attribute '%s' missing", symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); } return value; @@ -2171,7 +2171,7 @@ Bindings::const_iterator EvalState::getAttr(Symbol attrSym, const Bindings * att bool EvalState::isFunctor(const Value & fun) const { - return fun.type() == nAttrs && fun.attrs()->find(s.functor) != fun.attrs()->end(); + return fun.type() == nAttrs && fun.attrs()->get(s.functor); } void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx) @@ -2252,8 +2252,8 @@ bool EvalState::isDerivation(Value & v) std::optional EvalState::tryAttrsToString(const PosIdx pos, Value & v, NixStringContext & context, bool coerceMore, bool copyToStore) { - auto i = v.attrs()->find(s.toString); - if (i != v.attrs()->end()) { + auto i = v.attrs()->get(s.toString); + if (i) { Value v1; callFunction(*i->value, v, v1, pos); return coerceToString( @@ -2298,8 +2298,8 @@ BackedStringView EvalState::coerceToString( auto maybeString = tryAttrsToString(pos, v, context, coerceMore, copyToStore); if (maybeString) return std::move(*maybeString); - auto i = v.attrs()->find(s.outPath); - if (i == v.attrs()->end()) { + auto i = v.attrs()->get(s.outPath); + if (!i) { error( "cannot coerce %1% to a string: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) @@ -2403,8 +2403,8 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext /* Similarly, handle __toString where the result may be a path value. */ if (v.type() == nAttrs) { - auto i = v.attrs()->find(s.toString); - if (i != v.attrs()->end()) { + auto i = v.attrs()->get(s.toString); + if (i) { Value v1; callFunction(*i->value, v, v1, pos); return coerceToPath(pos, v1, context, errorCtx); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 00b673365..5a7281b2b 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -45,8 +45,8 @@ PackageInfo::PackageInfo(EvalState & state, ref store, const std::string std::string PackageInfo::queryName() const { if (name == "" && attrs) { - auto i = attrs->find(state->s.name); - if (i == attrs->end()) + auto i = attrs->get(state->s.name); + if (!i) state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); } @@ -56,11 +56,10 @@ std::string PackageInfo::queryName() const std::string PackageInfo::querySystem() const { if (system == "" && attrs) { - auto i = attrs->find(state->s.system); + auto i = attrs->get(state->s.system); system = - i == attrs->end() - ? "unknown" - : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation"); + !i ? "unknown" + : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation"); } return system; } @@ -95,9 +94,9 @@ StorePath PackageInfo::requireDrvPath() const StorePath PackageInfo::queryOutPath() const { if (!outPath && attrs) { - auto i = attrs->find(state->s.outPath); + auto i = attrs->get(state->s.outPath); NixStringContext context; - if (i != attrs->end()) + if (i) outPath = state->coerceToStorePath( i->pos, *i->value, context, "while evaluating the output path of a derivation"); } diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 132be163d..8b8edddf4 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -137,17 +137,6 @@ public: attrs[size_++] = attr; } - const_iterator find(Symbol name) const - { - Attr key(name, 0); - auto first = attrs; - auto last = attrs + size_; - const Attr * i = std::lower_bound(first, last, key); - if (i != last && i->name == name) - return const_iterator{i}; - return end(); - } - const Attr * get(Symbol name) const { Attr key(name, 0); diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 3639eab15..64f528581 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -613,7 +613,7 @@ public: /** * Get attribute from an attribute set and throw an error if it doesn't exist. */ - Bindings::const_iterator getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx); + const Attr * getAttr(Symbol attrSym, const Bindings * attrSet, std::string_view errorCtx); template [[gnu::noinline]] diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0d5eb23ae..a046a2c28 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1367,8 +1367,8 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName using nlohmann::json; std::optional jsonObject; auto pos = v.determinePos(noPos); - auto attr = attrs->find(state.s.structuredAttrs); - if (attr != attrs->end() + auto attr = attrs->get(state.s.structuredAttrs); + if (attr && state.forceBool( *attr->value, pos, @@ -1378,8 +1378,8 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; - attr = attrs->find(state.s.ignoreNulls); - if (attr != attrs->end()) + attr = attrs->get(state.s.ignoreNulls); + if (attr) ignoreNulls = state.forceBool( *attr->value, pos, @@ -2040,8 +2040,8 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value ** args, Va state.forceAttrs(*v2, pos, "while evaluating an element of the list passed to builtins.findFile"); std::string prefix; - auto i = v2->attrs()->find(state.s.prefix); - if (i != v2->attrs()->end()) + auto i = v2->attrs()->get(state.s.prefix); + if (i) prefix = state.forceStringNoCtx( *i->value, pos, @@ -3008,8 +3008,8 @@ static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value ** auto attr = state.forceStringNoCtx( *args[0], pos, "while evaluating the first argument passed to builtins.unsafeGetAttrPos"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.unsafeGetAttrPos"); - auto i = args[1]->attrs()->find(state.symbols.create(attr)); - if (i == args[1]->attrs()->end()) + auto i = args[1]->attrs()->get(state.symbols.create(attr)); + if (!i) v.mkNull(); else state.mkPos(v, i->pos); @@ -3076,7 +3076,7 @@ static void prim_hasAttr(EvalState & state, const PosIdx pos, Value ** args, Val { auto attr = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hasAttr"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.hasAttr"); - v.mkBool(args[1]->attrs()->find(state.symbols.create(attr)) != args[1]->attrs()->end()); + v.mkBool(args[1]->attrs()->get(state.symbols.create(attr))); } static RegisterPrimOp primop_hasAttr({ @@ -3286,14 +3286,14 @@ static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value ** ar if (left.size() < right.size()) { for (auto & l : left) { - auto r = right.find(l.name); - if (r != right.end()) + auto r = right.get(l.name); + if (r) attrs.insert(*r); } } else { for (auto & r : right) { - auto l = left.find(r.name); - if (l != left.end()) + auto l = left.get(r.name); + if (l) attrs.insert(r); } } diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 552172825..fbdcb14f8 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -141,10 +141,10 @@ bool createUserEnv( debug("evaluating user environment builder"); state.forceValue(topLevel, topLevel.determinePos(noPos)); NixStringContext context; - auto & aDrvPath(*topLevel.attrs()->find(state.s.drvPath)); + auto & aDrvPath(*topLevel.attrs()->get(state.s.drvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); topLevelDrv.requireDerivation(); - auto & aOutPath(*topLevel.attrs()->find(state.s.outPath)); + auto & aOutPath(*topLevel.attrs()->get(state.s.outPath)); auto topLevelOut = state.coerceToStorePath(aOutPath.pos, *aOutPath.value, context, ""); /* Realise the resulting store expression. */