diff --git a/.gitignore b/.gitignore index b75c5d489..e10c75418 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,10 @@ perl/Makefile.config # / /aclocal.m4 /autom4te.cache +/precompiled-headers.h.gch +/precompiled-headers.h.pch /config.* /configure -/nix.spec /stamp-h1 /svn-revision /libtool @@ -84,6 +85,7 @@ perl/Makefile.config /tests/restricted-innocent /tests/shell /tests/shell.drv +/tests/config.nix # /tests/lang/ /tests/lang/*.out @@ -117,3 +119,5 @@ GPATH GRTAGS GSYMS GTAGS + +nix-rust/target diff --git a/.travis.yml b/.travis.yml index 99218a963..ee4ea1ac6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,2 +1,8 @@ -os: osx -script: ./tests/install-darwin.sh +matrix: + include: + - language: osx + script: ./tests/install-darwin.sh + - language: nix + script: nix-build release.nix -A build.x86_64-linux +notifications: + email: false diff --git a/.version b/.version index c0943d3e9..7208c2182 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.3 \ No newline at end of file +2.4 \ No newline at end of file diff --git a/Makefile b/Makefile index 45a3338ed..469070533 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,7 @@ makefiles = \ + mk/precompiled-headers.mk \ local.mk \ + nix-rust/local.mk \ src/libutil/local.mk \ src/libstore/local.mk \ src/libmain/local.mk \ @@ -15,15 +17,16 @@ makefiles = \ tests/local.mk \ tests/plugins/local.mk -GLOBAL_CXXFLAGS += -g -Wall -include config.h - -include Makefile.config OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) - GLOBAL_CFLAGS += -O3 GLOBAL_CXXFLAGS += -O3 +else + GLOBAL_CXXFLAGS += -O0 endif include mk/lib.mk + +GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++17 diff --git a/Makefile.config.in b/Makefile.config.in index f3821b2ef..aef95efd0 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -4,10 +4,11 @@ CC = @CC@ CFLAGS = @CFLAGS@ CXX = @CXX@ CXXFLAGS = @CXXFLAGS@ +LDFLAGS = @LDFLAGS@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ -HAVE_READLINE = @HAVE_READLINE@ HAVE_SECCOMP = @HAVE_SECCOMP@ +BOOST_LDFLAGS = @BOOST_LDFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ @@ -16,6 +17,7 @@ SODIUM_LIBS = @SODIUM_LIBS@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ +LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@ EDITLINE_LIBS = @EDITLINE_LIBS@ bash = @bash@ bindir = @bindir@ diff --git a/README.md b/README.md index 3173c6c44..48cb1685c 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Open Collective supporters](https://opencollective.com/nixos/tiers/supporter/badge.svg?label=Supporters&color=brightgreen)](https://opencollective.com/nixos) + Nix, the purely functional package manager ------------------------------------------ diff --git a/configure.ac b/configure.ac index 60c743f83..930a12e9b 100644 --- a/configure.ac +++ b/configure.ac @@ -1,4 +1,5 @@ AC_INIT(nix, m4_esyscmd([bash -c "echo -n $(cat ./.version)$VERSION_SUFFIX"])) +AC_CONFIG_MACRO_DIRS([m4]) AC_CONFIG_SRCDIR(README.md) AC_CONFIG_AUX_DIR(config) @@ -42,13 +43,25 @@ esac AC_MSG_RESULT($system) AC_SUBST(system) -AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier (`cpu-os')]) +AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')]) # State should be stored in /nix/var, unless the user overrides it explicitly. test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var +CFLAGS= +CXXFLAGS= +AC_PROG_CC +AC_PROG_CXX +AC_PROG_CPP + +AC_CHECK_TOOL([AR], [ar]) + +# Use 64-bit file system calls so that we can support files > 2 GiB. +AC_SYS_LARGEFILE + + # Solaris-specific stuff. AC_STRUCT_DIRENT_D_TYPE if test "$sys_name" = sunos; then @@ -57,19 +70,6 @@ if test "$sys_name" = sunos; then fi -CFLAGS= -CXXFLAGS= -AC_PROG_CC -AC_PROG_CXX -AC_PROG_CPP -AX_CXX_COMPILE_STDCXX_17 - -AC_CHECK_TOOL([AR], [ar]) - -# Use 64-bit file system calls so that we can support files > 2 GiB. -AC_SYS_LARGEFILE - - # Check for pubsetbuf. AC_MSG_CHECKING([for pubsetbuf]) AC_LANG_PUSH(C++) @@ -117,26 +117,15 @@ fi ]) NEED_PROG(bash, bash) -NEED_PROG(patch, patch) AC_PATH_PROG(xmllint, xmllint, false) AC_PATH_PROG(xsltproc, xsltproc, false) AC_PATH_PROG(flex, flex, false) AC_PATH_PROG(bison, bison, false) -NEED_PROG(sed, sed) -NEED_PROG(tar, tar) -NEED_PROG(bzip2, bzip2) -NEED_PROG(gzip, gzip) -NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(lsof, lsof, lsof) -NEED_PROG(cat, cat) -NEED_PROG(tr, tr) -AC_ARG_WITH(coreutils-bin, AC_HELP_STRING([--with-coreutils-bin=PATH], - [path of cat, mkdir, etc.]), - coreutils=$withval, coreutils=$(dirname $cat)) -AC_SUBST(coreutils) +AC_SUBST(coreutils, [$(dirname $(type -p cat))]) AC_ARG_WITH(store-dir, AC_HELP_STRING([--with-store-dir=PATH], @@ -145,7 +134,42 @@ AC_ARG_WITH(store-dir, AC_HELP_STRING([--with-store-dir=PATH], AC_SUBST(storedir) -# Look for OpenSSL, a required dependency. +# Look for boost, a required dependency. +# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags, +# and CPPFLAGS are not passed to the C++ compiler automatically. +# Thus we append the returned CPPFLAGS to the CXXFLAGS here. +AX_BOOST_BASE([1.66], [CXXFLAGS="$BOOST_CPPFLAGS $CXXFLAGS"], [AC_MSG_ERROR([Nix requires boost.])]) +# For unknown reasons, setting this directly in the ACTION-IF-FOUND above +# ends up with LDFLAGS being empty, so we set it afterwards. +LDFLAGS="$BOOST_LDFLAGS $LDFLAGS" + +# On some platforms, new-style atomics need a helper library +AC_MSG_CHECKING(whether -latomic is needed) +AC_LINK_IFELSE([AC_LANG_SOURCE([[ +#include +uint64_t v; +int main() { + return (int)__atomic_load_n(&v, __ATOMIC_ACQUIRE); +}]])], GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=no, GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=yes) +AC_MSG_RESULT($GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC) +if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then + LIBS="-latomic $LIBS" +fi + +PKG_PROG_PKG_CONFIG + +AC_ARG_ENABLE(shared, AC_HELP_STRING([--enable-shared], + [Build shared libraries for Nix [default=yes]]), + shared=$enableval, shared=yes) +if test "$shared" = yes; then + AC_SUBST(BUILD_SHARED_LIBS, 1, [Whether to build shared libraries.]) +else + AC_SUBST(BUILD_SHARED_LIBS, 0, [Whether to build shared libraries.]) + PKG_CONFIG="$PKG_CONFIG --static" +fi + +# Look for OpenSSL, a required dependency. FIXME: this is only (maybe) +# used by S3BinaryCacheStore. PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"]) @@ -154,17 +178,26 @@ AC_CHECK_LIB([bz2], [BZ2_bzWriteOpen], [true], [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])]) AC_CHECK_HEADERS([bzlib.h], [true], [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])]) - +# Checks for libarchive +PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"]) # Look for SQLite, a required dependency. PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CXXFLAGS"]) - # Look for libcurl, a required dependency. PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) # Look for editline, a required dependency. -PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"]) +# The the libeditline.pc file was added only in libeditline >= 1.15.2, +# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607, +# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for +# editline.h when the pkg-config approach fails. +PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [ + AC_CHECK_HEADERS([editline.h], [true], + [AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])]) + AC_SEARCH_LIBS([readline read_history], [editline], [], + [AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])]) +]) # Look for libsodium, an optional dependency. PKG_CHECK_MODULES([SODIUM], [libsodium], @@ -173,12 +206,15 @@ PKG_CHECK_MODULES([SODIUM], [libsodium], have_sodium=1], [have_sodium=]) AC_SUBST(HAVE_SODIUM, [$have_sodium]) - # Look for liblzma, a required dependency. PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"]) AC_CHECK_LIB([lzma], [lzma_stream_encoder_mt], [AC_DEFINE([HAVE_LZMA_MT], [1], [xz multithreaded compression support])]) +# Look for zlib, a required dependency. +PKG_CHECK_MODULES([ZLIB], [zlib], [CXXFLAGS="$ZLIB_CFLAGS $CXXFLAGS"]) +AC_CHECK_HEADER([zlib.h],[:],[AC_MSG_ERROR([could not find the zlib.h header])]) +LDFLAGS="-lz $LDFLAGS" # Look for libbrotli{enc,dec}. PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"]) @@ -257,16 +293,6 @@ AC_ARG_WITH(sandbox-shell, AC_HELP_STRING([--with-sandbox-shell=PATH], sandbox_shell=$withval) AC_SUBST(sandbox_shell) -AC_ARG_ENABLE(shared, AC_HELP_STRING([--enable-shared], - [Build shared libraries for Nix [default=yes]]), - shared=$enableval, shared=yes) -if test "$shared" = yes; then - AC_SUBST(BUILD_SHARED_LIBS, 1, [Whether to build shared libraries.]) -else - AC_SUBST(BUILD_SHARED_LIBS, 0, [Whether to build shared libraries.]) -fi - - # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix test "$exec_prefix" = NONE && exec_prefix='${prefix}' diff --git a/contrib/stack-collapse.py b/contrib/stack-collapse.py new file mode 100755 index 000000000..f5602c95c --- /dev/null +++ b/contrib/stack-collapse.py @@ -0,0 +1,38 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i python3 -p python3 --pure + +# To be used with `--trace-function-calls` and `flamegraph.pl`. +# +# For example: +# +# nix-instantiate --trace-function-calls '' -A hello 2> nix-function-calls.trace +# ./contrib/stack-collapse.py nix-function-calls.trace > nix-function-calls.folded +# nix-shell -p flamegraph --run "flamegraph.pl nix-function-calls.folded > nix-function-calls.svg" + +import sys +from pprint import pprint +import fileinput + +stack = [] +timestack = [] + +for line in fileinput.input(): + components = line.strip().split(" ", 2) + if components[0] != "function-trace": + continue + + direction = components[1] + components = components[2].rsplit(" ", 2) + + loc = components[0] + _at = components[1] + time = int(components[2]) + + if direction == "entered": + stack.append(loc) + timestack.append(time) + elif direction == "exited": + dur = time - timestack.pop() + vst = ";".join(stack) + print(f"{vst} {dur}") + stack.pop() diff --git a/corepkgs/config.nix.in b/corepkgs/config.nix.in index 32ce6b399..cb9945944 100644 --- a/corepkgs/config.nix.in +++ b/corepkgs/config.nix.in @@ -1,29 +1,13 @@ +# FIXME: remove this file? let fromEnv = var: def: let val = builtins.getEnv var; in if val != "" then val else def; in rec { - shell = "@bash@"; - coreutils = "@coreutils@"; - bzip2 = "@bzip2@"; - gzip = "@gzip@"; - xz = "@xz@"; - tar = "@tar@"; - tarFlags = "@tarFlags@"; - tr = "@tr@"; nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@"; nixPrefix = "@prefix@"; nixLibexecDir = fromEnv "NIX_LIBEXEC_DIR" "@libexecdir@"; nixLocalstateDir = "@localstatedir@"; nixSysconfDir = "@sysconfdir@"; nixStoreDir = fromEnv "NIX_STORE_DIR" "@storedir@"; - - # If Nix is installed in the Nix store, then automatically add it as - # a dependency to the core packages. This ensures that they work - # properly in a chroot. - chrootDeps = - if dirOf nixPrefix == builtins.storeDir then - [ (builtins.storePath nixPrefix) ] - else - [ ]; } diff --git a/corepkgs/unpack-channel.nix b/corepkgs/unpack-channel.nix index a654db40e..10515bc8b 100644 --- a/corepkgs/unpack-channel.nix +++ b/corepkgs/unpack-channel.nix @@ -1,43 +1,12 @@ -with import ; - -let - - builder = builtins.toFile "unpack-channel.sh" - '' - mkdir $out - cd $out - xzpat="\.xz\$" - gzpat="\.gz\$" - if [[ "$src" =~ $xzpat ]]; then - ${xz} -d < $src | ${tar} xf - ${tarFlags} - elif [[ "$src" =~ $gzpat ]]; then - ${gzip} -d < $src | ${tar} xf - ${tarFlags} - else - ${bzip2} -d < $src | ${tar} xf - ${tarFlags} - fi - if [ * != $channelName ]; then - mv * $out/$channelName - fi - if [ -n "$binaryCacheURL" ]; then - mkdir $out/binary-caches - echo -n "$binaryCacheURL" > $out/binary-caches/$channelName - fi - ''; - -in - -{ name, channelName, src, binaryCacheURL ? "" }: +{ name, channelName, src }: derivation { - system = builtins.currentSystem; - builder = shell; - args = [ "-e" builder ]; - inherit name channelName src binaryCacheURL; + builder = "builtin:unpack-channel"; - PATH = "${nixBinDir}:${coreutils}"; + system = "builtin"; + + inherit name channelName src; # No point in doing this remotely. preferLocalBuild = true; - - inherit chrootDeps; } diff --git a/doc/manual/advanced-topics/advanced-topics.xml b/doc/manual/advanced-topics/advanced-topics.xml index b710f9f2b..871b7eb1d 100644 --- a/doc/manual/advanced-topics/advanced-topics.xml +++ b/doc/manual/advanced-topics/advanced-topics.xml @@ -7,5 +7,8 @@ Advanced Topics + + + diff --git a/doc/manual/advanced-topics/cores-vs-jobs.xml b/doc/manual/advanced-topics/cores-vs-jobs.xml new file mode 100644 index 000000000..4d58ac7c5 --- /dev/null +++ b/doc/manual/advanced-topics/cores-vs-jobs.xml @@ -0,0 +1,121 @@ + + +Tuning Cores and Jobs + +Nix has two relevant settings with regards to how your CPU cores +will be utilized: and +. This chapter will talk about what +they are, how they interact, and their configuration trade-offs. + + + + + + Dictates how many separate derivations will be built at the same + time. If you set this to zero, the local machine will do no + builds. Nix will still substitute from binary caches, and build + remotely if remote builders are configured. + + + + + + Suggests how many cores each derivation should use. Similar to + make -j. + + + + +The setting determines the value of +NIX_BUILD_CORES. NIX_BUILD_CORES is equal +to , unless +equals 0, in which case NIX_BUILD_CORES +will be the total number of cores in the system. + +The maximum number of consumed cores is a simple multiplication, + * NIX_BUILD_CORES. + +The balance on how to set these two independent variables depends +upon each builder's workload and hardware. Here are a few example +scenarios on a machine with 24 cores: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Balancing 24 Build Cores
NIX_BUILD_CORESMaximum ProcessesResult
1242424 + One derivation will be built at a time, each one can use 24 + cores. Undersold if a job can’t use 24 cores. +
46624 + Four derivations will be built at once, each given access to + six cores. +
126672 + 12 derivations will be built at once, each given access to six + cores. This configuration is over-sold. If all 12 derivations + being built simultaneously try to use all six cores, the + machine's performance will be degraded due to extensive context + switching between the 12 builds. +
241124 + 24 derivations can build at the same time, each using a single + core. Never oversold, but derivations which require many cores + will be very slow to compile. +
24024576 + 24 derivations can build at the same time, each using all the + available cores of the machine. Very likely to be oversold, + and very likely to suffer context switches. +
+ +It is up to the derivations' build script to respect +host's requested cores-per-build by following the value of the +NIX_BUILD_CORES environment variable. + +
diff --git a/doc/manual/advanced-topics/diff-hook.xml b/doc/manual/advanced-topics/diff-hook.xml new file mode 100644 index 000000000..fb4bf819f --- /dev/null +++ b/doc/manual/advanced-topics/diff-hook.xml @@ -0,0 +1,205 @@ + + +Verifying Build Reproducibility with <option linkend="conf-diff-hook">diff-hook</option> + +Check build reproducibility by running builds multiple times +and comparing their results. + +Specify a program with Nix's to +compare build results when two builds produce different results. Note: +this hook is only executed if the results are not the same, this hook +is not used for determining if the results are the same. + +For purposes of demonstration, we'll use the following Nix file, +deterministic.nix for testing: + + +let + inherit (import <nixpkgs> {}) runCommand; +in { + stable = runCommand "stable" {} '' + touch $out + ''; + + unstable = runCommand "unstable" {} '' + echo $RANDOM > $out + ''; +} + + +Additionally, nix.conf contains: + + +diff-hook = /etc/nix/my-diff-hook +run-diff-hook = true + + +where /etc/nix/my-diff-hook is an executable +file containing: + + +#!/bin/sh +exec >&2 +echo "For derivation $3:" +/run/current-system/sw/bin/diff -r "$1" "$2" + + + + +The diff hook is executed by the same user and group who ran the +build. However, the diff hook does not have write access to the store +path just built. + +
+ + Spot-Checking Build Determinism + + + + Verify a path which already exists in the Nix store by passing + to the build command. + + + If the build passes and is deterministic, Nix will exit with a + status code of 0: + + +$ nix-build ./deterministic.nix -A stable +these derivations will be built: + /nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv +building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'... +/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable + +$ nix-build ./deterministic.nix -A stable --check +checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'... +/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable + + + If the build is not deterministic, Nix will exit with a status + code of 1: + + +$ nix-build ./deterministic.nix -A unstable +these derivations will be built: + /nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv +building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... +/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable + +$ nix-build ./deterministic.nix -A unstable --check +checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... +error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may not be deterministic: output '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable' differs + + +In the Nix daemon's log, we will now see: + +For derivation /nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv: +1c1 +< 8108 +--- +> 30204 + + + + Using with + will cause Nix to keep the second build's output in a special, + .check path: + + +$ nix-build ./deterministic.nix -A unstable --check --keep-failed +checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... +note: keeping build directory '/tmp/nix-build-unstable.drv-0' +error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may not be deterministic: output '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable' differs from '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable.check' + + + In particular, notice the + /nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable.check + output. Nix has copied the build results to that directory where you + can examine it. + + + <literal>.check</literal> paths are not registered store paths + + Check paths are not protected against garbage collection, + and this path will be deleted on the next garbage collection. + + The path is guaranteed to be alive for the duration of + 's execution, but may be deleted + any time after. + + If the comparison is performed as part of automated tooling, + please use the diff-hook or author your tooling to handle the case + where the build was not deterministic and also a check path does + not exist. + + + + is only usable if the derivation has + been built on the system already. If the derivation has not been + built Nix will fail with the error: + +error: some outputs of '/nix/store/hzi1h60z2qf0nb85iwnpvrai3j2w7rr6-unstable.drv' are not valid, so checking is not possible + + + Run the build without , and then try with + again. + +
+ +
+ + Automatic and Optionally Enforced Determinism Verification + + + + Automatically verify every build at build time by executing the + build multiple times. + + + + Setting and + in your + nix.conf permits the automated verification + of every build Nix performs. + + + + The following configuration will run each build three times, and + will require the build to be deterministic: + + +enforce-determinism = true +repeat = 2 + + + + + Setting to false as in + the following configuration will run the build multiple times, + execute the build hook, but will allow the build to succeed even + if it does not build reproducibly: + + +enforce-determinism = false +repeat = 1 + + + + + An example output of this configuration: + +$ nix-build ./test.nix -A unstable +these derivations will be built: + /nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv +building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)... +building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)... +output '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable' of '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' differs from '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable.check' from previous round +/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable + + +
+
diff --git a/doc/manual/advanced-topics/distributed-builds.xml b/doc/manual/advanced-topics/distributed-builds.xml index ce2e077ed..9ac4a92cd 100644 --- a/doc/manual/advanced-topics/distributed-builds.xml +++ b/doc/manual/advanced-topics/distributed-builds.xml @@ -184,4 +184,7 @@ to be included. (This is the default.) the option builders-use-substitutes in your local nix.conf. +To build only on remote builders and disable building on the local machine, +you can use the option . + diff --git a/doc/manual/advanced-topics/post-build-hook.xml b/doc/manual/advanced-topics/post-build-hook.xml new file mode 100644 index 000000000..08a7a772f --- /dev/null +++ b/doc/manual/advanced-topics/post-build-hook.xml @@ -0,0 +1,160 @@ + + +Using the <option linkend="conf-post-build-hook">post-build-hook</option> +Uploading to an S3-compatible binary cache after each build + + +
+ Implementation Caveats + Here we use the post-build hook to upload to a binary cache. + This is a simple and working example, but it is not suitable for all + use cases. + + The post build hook program runs after each executed build, + and blocks the build loop. The build loop exits if the hook program + fails. + + Concretely, this implementation will make Nix slow or unusable + when the internet is slow or unreliable. + + A more advanced implementation might pass the store paths to a + user-supplied daemon or queue for processing the store paths outside + of the build loop. +
+ +
+ Prerequisites + + + This tutorial assumes you have configured an S3-compatible binary cache + according to the instructions at + , and + that the root user's default AWS profile can + upload to the bucket. + +
+ +
+ Set up a Signing Key + Use nix-store --generate-binary-cache-key to + create our public and private signing keys. We will sign paths + with the private key, and distribute the public key for verifying + the authenticity of the paths. + + +# nix-store --generate-binary-cache-key example-nix-cache-1 /etc/nix/key.private /etc/nix/key.public +# cat /etc/nix/key.public +example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM= + + +Then, add the public key and the cache URL to your +nix.conf's +and like: + + +substituters = https://cache.nixos.org/ s3://example-nix-cache +trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM= + + +we will restart the Nix daemon a later step. +
+ +
+ Implementing the build hook + Write the following script to + /etc/nix/upload-to-cache.sh: + + + +#!/bin/sh + +set -eu +set -f # disable globbing +export IFS=' ' + +echo "Signing paths" $OUT_PATHS +nix sign-paths --key-file /etc/nix/key.private $OUT_PATHS +echo "Uploading paths" $OUT_PATHS +exec nix copy --to 's3://example-nix-cache' $OUT_PATHS + + + + Should <literal>$OUT_PATHS</literal> be quoted? + + The $OUT_PATHS variable is a space-separated + list of Nix store paths. In this case, we expect and want the + shell to perform word splitting to make each output path its + own argument to nix sign-paths. Nix guarantees + the paths will not contain any spaces, however a store path + might contain glob characters. The set -f + disables globbing in the shell. + + + + Then make sure the hook program is executable by the root user: + +# chmod +x /etc/nix/upload-to-cache.sh + +
+ +
+ Updating Nix Configuration + + Edit /etc/nix/nix.conf to run our hook, + by adding the following configuration snippet at the end: + + +post-build-hook = /etc/nix/upload-to-cache.sh + + +Then, restart the nix-daemon. +
+ +
+ Testing + + Build any derivation, for example: + + +$ nix-build -E '(import <nixpkgs> {}).writeText "example" (builtins.toString builtins.currentTime)' +these derivations will be built: + /nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv +building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'... +running post-build-hook '/home/grahamc/projects/github.com/NixOS/nix/post-hook.sh'... +post-build-hook: Signing paths /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example +post-build-hook: Uploading paths /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example +/nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example + + + Then delete the path from the store, and try substituting it from the binary cache: + +$ rm ./result +$ nix-store --delete /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example + + +Now, copy the path back from the cache: + +$ nix store --realize /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example +copying path '/nix/store/m8bmqwrch6l3h8s0k3d673xpmipcdpsa-example from 's3://example-nix-cache'... +warning: you did not specify '--add-root'; the result might be removed by the garbage collector +/nix/store/m8bmqwrch6l3h8s0k3d673xpmipcdpsa-example + +
+
+ Conclusion + + We now have a Nix installation configured to automatically sign and + upload every local build to a remote binary cache. + + + + Before deploying this to production, be sure to consider the + implementation caveats in . + +
+
diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index f0da1f612..48dce7c95 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -1,7 +1,9 @@ + + xml:id="sec-conf-file" + version="5"> nix.conf @@ -236,8 +238,74 @@ false. linkend='opt-cores'>--cores command line switch and defaults to 1. The value 0 means that the builder should use all available CPU cores in the - system. + system. + See also . + + + diff-hook + + + Absolute path to an executable capable of diffing build results. + The hook executes if is + true, and the output of a build is known to not be the same. + This program is not executed to determine if two results are the + same. + + + + The diff hook is executed by the same user and group who ran the + build. However, the diff hook does not have write access to the + store path just built. + + + The diff hook program receives three parameters: + + + + + A path to the previous build's results + + + + + + A path to the current build's results + + + + + + The path to the build's derivation + + + + + + The path to the build's scratch directory. This directory + will exist only if the build was run with + . + + + + + + The stderr and stdout output from the diff hook will not be + displayed to the user. Instead, it will print to the nix-daemon's + log. + + + When using the Nix daemon, diff-hook must + be set in the nix.conf configuration file, and + cannot be passed at the command line. + + + + + + enforce-determinism + + See . @@ -365,7 +433,7 @@ builtins.fetchurl { keep-env-derivations If false (default), derivations - are not stored in Nix user environments. That is, the derivation + are not stored in Nix user environments. That is, the derivations of any build-time-only dependencies may be garbage-collected. If true, when you add a Nix derivation to @@ -415,8 +483,10 @@ builtins.fetchurl { max-free - This option defines after how many free bytes to stop collecting - garbage once the min-free condition gets triggered. + When a garbage collection is triggered by the + min-free option, it stops as soon as + max-free bytes are available. The default is + infinity (i.e. delete all garbage). @@ -431,7 +501,10 @@ builtins.fetchurl { regardless). It can be overridden using the () - command line switch. + command line switch. + + See also . + max-silent-time @@ -457,9 +530,11 @@ builtins.fetchurl { min-free - When the disk reaches min-free bytes of free disk space during a build, nix - will start to garbage-collection until max-free bytes are available on the disk. - A value of 0 (the default) means that this feature is disabled. + When free disk space in /nix/store + drops below min-free during a build, Nix + performs a garbage-collection until max-free + bytes are available or there is no more garbage. A value of + 0 (the default) disables this feature. @@ -589,15 +664,71 @@ password my-password + + post-build-hook + + Optional. The path to a program to execute after each build. + + This option is only settable in the global + nix.conf, or on the command line by trusted + users. + + When using the nix-daemon, the daemon executes the hook as + root. If the nix-daemon is not involved, the + hook runs as the user executing the nix-build. + + + The hook executes after an evaluation-time build. + The hook does not execute on substituted paths. + The hook's output always goes to the user's terminal. + If the hook fails, the build succeeds but no further builds execute. + The hook executes synchronously, and blocks other builds from progressing while it runs. + + + The program executes with no arguments. The program's environment + contains the following environment variables: + + + + DRV_PATH + + The derivation for the built paths. + Example: + /nix/store/5nihn1a7pa8b25l9zafqaqibznlvvp3f-bash-4.4-p23.drv + + + + + + OUT_PATHS + + Output paths of the built derivation, separated by a space character. + Example: + /nix/store/zf5lbh336mnzf1nlswdn11g4n2m8zh3g-bash-4.4-p23-dev + /nix/store/rjxwxwv1fpn9wa2x5ssk5phzwlcv4mna-bash-4.4-p23-doc + /nix/store/6bqvbzjkcp9695dq0dpl5y43nvy37pq1-bash-4.4-p23-info + /nix/store/r7fng3kk3vlpdlh2idnrbn37vh4imlj2-bash-4.4-p23-man + /nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23. + + + + + + See for an example + implementation. + + + + repeat How many times to repeat builds to check whether they are deterministic. The default value is 0. If the value is non-zero, every build is repeated the specified number of times. If the contents of any of the runs differs from the - previous ones, the build is rejected and the resulting store paths - are not registered as “valid” in Nix’s database. - + previous ones and is + true, the build is rejected and the resulting store paths are not + registered as “valid” in Nix’s database. require-sigs @@ -628,6 +759,19 @@ password my-password + run-diff-hook + + + If true, enable the execution of . + + + + When using the Nix daemon, run-diff-hook must + be set in the nix.conf configuration file, + and cannot be passed at the command line. + + + sandbox @@ -729,6 +873,14 @@ password my-password + stalled-download-timeout + + The timeout (in seconds) for receiving data from servers + during download. Nix cancels idle downloads after this timeout's + duration. + + + substituters A list of URLs of substituters, separated by @@ -784,6 +936,31 @@ requiredSystemFeatures = [ "kvm" ]; + tarball-ttl + + + Default: 3600 seconds. + + The number of seconds a downloaded tarball is considered + fresh. If the cached tarball is stale, Nix will check whether + it is still up to date using the ETag header. Nix will download + a new version if the ETag header is unsupported, or the + cached ETag doesn't match. + + + Setting the TTL to 0 forces Nix to always + check if the tarball is up to date. + + Nix caches tarballs in + $XDG_CACHE_HOME/nix/tarballs. + + Files fetched via NIX_PATH, + fetchGit, fetchMercurial, + fetchTarball, and fetchurl + respect this TTL. + + + timeout @@ -804,6 +981,34 @@ requiredSystemFeatures = [ "kvm" ]; + trace-function-calls + + + + Default: false. + + If set to true, the Nix evaluator will + trace every function call. Nix will print a log message at the + "vomit" level for every function entrance and function exit. + + +function-trace entered undefined position at 1565795816999559622 +function-trace exited undefined position at 1565795816999581277 +function-trace entered /nix/store/.../example.nix:226:41 at 1565795253249935150 +function-trace exited /nix/store/.../example.nix:226:41 at 1565795253249941684 + + + The undefined position means the function + call is a builtin. + + Use the contrib/stack-collapse.py script + distributed with the Nix source code to convert the trace logs + in to a format suitable for flamegraph.pl. + + + + + trusted-public-keys A whitespace-separated list of public keys. When diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml index c532ffdde..696d68c34 100644 --- a/doc/manual/command-ref/env-common.xml +++ b/doc/manual/command-ref/env-common.xml @@ -14,7 +14,8 @@ IN_NIX_SHELL Indicator that tells if the current environment was set up by - nix-shell. + nix-shell. Since Nix 2.0 the values are + "pure" and "impure" @@ -121,7 +122,7 @@ $ mount -o bind /mnt/otherdisk/nix /nix NIX_LOG_DIR Overrides the location of the Nix log directory - (default prefix/log/nix). + (default prefix/var/log/nix). diff --git a/doc/manual/command-ref/nix-build.xml b/doc/manual/command-ref/nix-build.xml index 40fe7a43f..886d25910 100644 --- a/doc/manual/command-ref/nix-build.xml +++ b/doc/manual/command-ref/nix-build.xml @@ -30,6 +30,7 @@ attrPath + @@ -98,6 +99,10 @@ also . + + Show what store paths would be built or downloaded. + + / outlink diff --git a/doc/manual/command-ref/nix-channel.xml b/doc/manual/command-ref/nix-channel.xml index ff4021a76..ebcf56aff 100644 --- a/doc/manual/command-ref/nix-channel.xml +++ b/doc/manual/command-ref/nix-channel.xml @@ -31,12 +31,14 @@ Description -A Nix channel is a mechanism that allows you to automatically stay -up-to-date with a set of pre-built Nix expressions. A Nix channel is -just a URL that points to a place containing both a set of Nix -expressions and a pointer to a binary cache. See also . +A Nix channel is a mechanism that allows you to automatically +stay up-to-date with a set of pre-built Nix expressions. A Nix +channel is just a URL that points to a place containing a set of Nix +expressions. See also . + +To see the list of official NixOS channels, visit . This command has the following operations: @@ -112,13 +114,13 @@ $ nix-env -iA nixpkgs.hello You can revert channel updates using : -$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.nixpkgsVersion' +$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version' "14.04.527.0e935f1" $ nix-channel --rollback switching from generation 483 to 482 -$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.nixpkgsVersion' +$ nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version' "14.04.526.dbadfad" @@ -172,18 +174,6 @@ following files: - binary-cache-url - - A file containing the URL to a binary cache (such - as https://cache.nixos.org). Nix will automatically - check this cache for pre-built binaries, if the user has - sufficient rights to add binary caches. For instance, in a - multi-user Nix setup, the binary caches provided by the channels - of the root user are used automatically, but caches corresponding - to the channels of non-root users are ignored. - - - diff --git a/doc/manual/command-ref/nix-env.xml b/doc/manual/command-ref/nix-env.xml index 56c466268..d257a5e49 100644 --- a/doc/manual/command-ref/nix-env.xml +++ b/doc/manual/command-ref/nix-env.xml @@ -221,31 +221,53 @@ also . ~/.nix-defexpr - A directory that contains the default Nix + The source for the default Nix expressions used by the , , and operations to obtain derivations. The + --available operations to obtain derivations. The option may be used to override this default. - The Nix expressions in this directory are combined into a - single set, with each file as an attribute that has the name of - the file. Thus, if ~/.nix-defexpr contains - two files, foo and bar, + If ~/.nix-defexpr is a file, + it is loaded as a Nix expression. If the expression + is a set, it is used as the default Nix expression. + If the expression is a function, an empty set is passed + as argument and the return value is used as + the default Nix expression. + + If ~/.nix-defexpr is a directory + containing a default.nix file, that file + is loaded as in the above paragraph. + + If ~/.nix-defexpr is a directory without + a default.nix file, then its contents + (both files and subdirectories) are loaded as Nix expressions. + The expressions are combined into a single set, each expression + under an attribute with the same name as the original file + or subdirectory. + + + For example, if ~/.nix-defexpr contains + two files, foo.nix and bar.nix, then the default Nix expression will essentially be { - foo = import ~/.nix-defexpr/foo; - bar = import ~/.nix-defexpr/bar; + foo = import ~/.nix-defexpr/foo.nix; + bar = import ~/.nix-defexpr/bar.nix; } + The file manifest.nix is always ignored. + Subdirectories without a default.nix file + are traversed recursively in search of more Nix expressions, + but the names of these intermediate directories are not + added to the attribute paths of the default Nix expression. + The command nix-channel places symlinks to the downloaded Nix expressions from each subscribed channel in this directory. - @@ -637,7 +659,7 @@ upgrading `mozilla-1.2' to `mozilla-1.4' gcc-3.3.1 are split into two parts: the package name (gcc), and the version (3.3.1). The version part starts after the first -dash not following by a letter. x is considered an +dash not followed by a letter. x is considered an upgrade of y if their package names match, and the version of y is higher that that of x. @@ -1348,10 +1370,13 @@ profile. The generations can be a list of generation numbers, the special value old to delete all non-current generations, a value such as 30d to delete all generations older than the specified number of days (except for the -generation that was active at that point in time), or a value such as. -+5 to only keep the specified items older than the -current generation. Periodically deleting old generations is important -to make garbage collection effective. +generation that was active at that point in time), or a value such as ++5 to keep the last 5 generations +ignoring any newer than current, e.g., if 30 is the current +generation +5 will delete generation 25 +and all older generations. +Periodically deleting old generations is important to make garbage collection +effective. diff --git a/doc/manual/command-ref/nix-prefetch-url.xml b/doc/manual/command-ref/nix-prefetch-url.xml index 8ef748c74..621ded72e 100644 --- a/doc/manual/command-ref/nix-prefetch-url.xml +++ b/doc/manual/command-ref/nix-prefetch-url.xml @@ -53,7 +53,7 @@ avoided. If hash is specified, then a download is not performed if the Nix store already contains a file with the same hash and base name. Otherwise, the file is downloaded, and an -error if signaled if the actual hash of the file does not match the +error is signaled if the actual hash of the file does not match the specified hash. This command prints the hash on standard output. Additionally, diff --git a/doc/manual/command-ref/nix-shell.xml b/doc/manual/command-ref/nix-shell.xml index bb4a4e420..766482460 100644 --- a/doc/manual/command-ref/nix-shell.xml +++ b/doc/manual/command-ref/nix-shell.xml @@ -39,7 +39,12 @@ - packages + + + packages + expressions + + path @@ -189,8 +194,8 @@ also . NIX_BUILD_SHELL - - Shell used to start the interactive environment. + + Shell used to start the interactive environment. Defaults to the bash found in PATH. @@ -222,8 +227,9 @@ $ nix-shell '<nixpkgs>' -A pan --pure \ --command 'export NIX_DEBUG=1; export NIX_CORES=8; return' -Nix expressions can also be given on the command line. For instance, -the following starts a shell containing the packages +Nix expressions can also be given on the command line using the +-E and -p flags. +For instance, the following starts a shell containing the packages sqlite and libX11: @@ -238,6 +244,14 @@ $ nix-shell -p sqlite xorg.libX11 … -L/nix/store/j1zg5v…-sqlite-3.8.0.2/lib -L/nix/store/0gmcz9…-libX11-1.6.1/lib … +Note that -p accepts multiple full nix expressions that +are valid in the buildInputs = [ ... ] shown above, +not only package names. So the following is also legal: + + +$ nix-shell -p sqlite 'git.override { withManual = false; }' + + The -p flag looks up Nixpkgs in the Nix search path. You can override it by passing or setting NIX_PATH. For example, the following gives you a shell diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml index d73cb92ee..113a3c2e4 100644 --- a/doc/manual/command-ref/nix-store.xml +++ b/doc/manual/command-ref/nix-store.xml @@ -215,6 +215,48 @@ printed.) +Special exit codes: + + + + 100 + Generic build failure, the builder process + returned with a non-zero exit code. + + + 101 + Build timeout, the build was aborted because it + did not complete within the specified timeout. + + + + 102 + Hash mismatch, the build output was rejected + because it does not match the specified outputHash. + + + + 104 + Not deterministic, the build succeeded in check + mode but the resulting output is not binary reproducable. + + + + + +With the flag it's possible for +multiple failures to occur, in this case the 1xx status codes are or combined +using binary or. +1100100 + ^^^^ + |||`- timeout + ||`-- output hash mismatch + |`--- build failure + `---- not deterministic + + @@ -883,6 +925,60 @@ $ nix-store --add ./foo.c + + +Operation <option>--add-fixed</option> + +Synopsis + + + nix-store + + + algorithm + paths + + + + +Description + +The operation adds the specified paths to +the Nix store. Unlike paths are registered using the +specified hashing algorithm, resulting in the same output path as a fixed output +derivation. This can be used for sources that are not available from a public +url or broke since the download expression was written. + + +This operation has the following options: + + + + + + + Use recursive instead of flat hashing mode, used when adding directories + to the store. + + + + + + + + + + +Example + + +$ nix-store --add-fixed sha256 ./hello-2.10.tar.gz +/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz + + + + + diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 4c572e129..0383bfaed 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -107,14 +107,22 @@ / number - Sets the maximum number of build jobs that Nix will + + + Sets the maximum number of build jobs that Nix will perform in parallel to the specified number. Specify auto to use the number of CPUs in the system. The default is specified by the max-jobs configuration setting, which itself defaults to 1. A higher value is useful on SMP systems or to - exploit I/O latency. + exploit I/O latency. + + Setting it to 0 disallows building on the local + machine, which is useful when you want builds to happen only on remote + builders. + + @@ -235,9 +243,10 @@ name value This option is accepted by - nix-env, nix-instantiate and - nix-build. When evaluating Nix expressions, the - expression evaluator will automatically try to call functions that + nix-env, nix-instantiate, + nix-shell and nix-build. + When evaluating Nix expressions, the expression evaluator will + automatically try to call functions that it encounters. It can automatically call functions for which every argument has a default value (e.g., { argName ? @@ -314,7 +323,14 @@ Nix expressions to be parsed and evaluated, rather than as a list of file names of Nix expressions. (nix-instantiate, nix-build - and nix-shell only.) + and nix-shell only.) + + For nix-shell, this option is commonly used + to give you a shell in which you can build the packages returned + by the expression. If you want to get a shell which contain the + built packages ready for use, give your + expression to the nix-shell -p convenience flag + instead. diff --git a/doc/manual/expressions/advanced-attributes.xml b/doc/manual/expressions/advanced-attributes.xml index 07b0d97d3..372d03de7 100644 --- a/doc/manual/expressions/advanced-attributes.xml +++ b/doc/manual/expressions/advanced-attributes.xml @@ -11,7 +11,7 @@ attributes. - allowedReferences + allowedReferences The optional attribute allowedReferences specifies a list of legal @@ -32,7 +32,7 @@ allowedReferences = []; - allowedRequisites + allowedRequisites This attribute is similar to allowedReferences, but it specifies the legal @@ -50,7 +50,7 @@ allowedRequisites = [ foobar ]; - disallowedReferences + disallowedReferences The optional attribute disallowedReferences specifies a list of illegal @@ -67,7 +67,7 @@ disallowedReferences = [ foo ]; - disallowedRequisites + disallowedRequisites This attribute is similar to disallowedReferences, but it specifies illegal @@ -85,7 +85,7 @@ disallowedRequisites = [ foobar ]; - exportReferencesGraph + exportReferencesGraph This attribute allows builders access to the references graph of their inputs. The attribute is a list of @@ -124,7 +124,7 @@ derivation { - impureEnvVars + impureEnvVars This attribute allows you to specify a list of environment variables that should be passed from the environment @@ -158,9 +158,9 @@ impureEnvVars = [ "http_proxy" "https_proxy" ... ]; - outputHash - outputHashAlgo - outputHashMode + outputHash + outputHashAlgo + outputHashMode These attributes declare that the derivation is a so-called fixed-output derivation, which @@ -282,7 +282,7 @@ stdenv.mkDerivation { - passAsFile + passAsFile A list of names of attributes that should be passed via files rather than environment variables. For example, @@ -309,7 +309,7 @@ big = "a very long string"; - preferLocalBuild + preferLocalBuild If this attribute is set to true and - allowSubstitutes + allowSubstitutes - If this attribute is set to + + If this attribute is set to false, then Nix will always build this derivation; it will not try to substitute its outputs. This is useful for very trivial derivations (such as writeText in Nixpkgs) that are cheaper to - build than to substitute from a binary cache. + build than to substitute from a binary cache. + + You need to have a builder configured which satisfies + the derivation’s system attribute, since the + derivation cannot be substituted. Thus it is usually a good idea + to align system with + builtins.currentSystem when setting + allowSubstitutes to false. + For most trivial derivations this should be the case. + + diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 0fb5261b3..394e1fc32 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -170,18 +170,6 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - - builtins.splitVersion - s - - Split a string representing a version into its - components, by the same version splitting logic underlying the - version comparison in - nix-env -u. - - - - builtins.concatLists lists @@ -301,7 +289,7 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" Return element n from the list xs. Elements are counted - starting from 0. A fatal error occurs in the index is out of + starting from 0. A fatal error occurs if the index is out of bounds. @@ -347,7 +335,7 @@ stdenv.mkDerivation { … } You can change the cache timeout either on the command line with or in the Nix configuration file with this option: - tarball-ttl number of seconds to cache. + number of seconds to cache. Note that when obtaining the hash with nix-prefetch-url @@ -425,6 +413,13 @@ stdenv.mkDerivation { … } This is often a branch or tag name. Defaults to HEAD. + + + By default, the ref value is prefixed + with refs/heads/. As of Nix 2.3.0 + Nix will not prefix refs/heads/ if + ref starts with refs/. + @@ -438,6 +433,14 @@ stdenv.mkDerivation { … } } + + Fetching an arbitrary ref + builtins.fetchGit { + url = "https://github.com/NixOS/nix.git"; + ref = "refs/heads/0.5-release"; +} + + Fetching a repository's specific commit on an arbitrary branch @@ -484,11 +487,8 @@ stdenv.mkDerivation { … } Fetching a tag builtins.fetchGit { url = "https://github.com/nixos/nix.git"; - ref = "tags/1.9"; + ref = "refs/tags/1.9"; } - Due to a bug (#2385), - only non-annotated tags can be fetched. @@ -498,7 +498,7 @@ stdenv.mkDerivation { … } fetch the latest version of a remote branch. Nix will refetch the branch in accordance to - . + . This behavior is disabled in Pure evaluation mode. builtins.fetchGit { @@ -705,6 +705,19 @@ builtins.genList (x: x * x) 5 + + builtins.hashFile + type p + + Return a base-16 representation of the + cryptographic hash of the file at path p. The + hash algorithm specified by type must + be one of "md5", "sha1", + "sha256" or "sha512". + + + + builtins.head list @@ -733,6 +746,11 @@ builtins.genList (x: x * x) 5 separate file, and use it from Nix expressions in other files. + Unlike some languages, import is a regular + function in Nix. Paths using the angle bracket syntax (e.g., + import <foo>) are normal path + values (see ). + A Nix expression loaded by import must not contain any free variables (identifiers that are not defined in the Nix expression itself and are not @@ -1102,6 +1120,16 @@ Evaluates to [ "foo" ]. + + builtins.placeholder + output + + Return a placeholder string for the specified + output that will be substituted by the + corresponding output path at build time. Typical outputs would be + "out", "bin" or + "dev". + builtins.readDir @@ -1247,6 +1275,19 @@ Evaluates to [ " " [ "FOO" ] " " ]. + + + builtins.splitVersion + s + + Split a string representing a version into its + components, by the same version splitting logic underlying the + version comparison in + nix-env -u. + + + + builtins.stringLength e @@ -1440,7 +1481,7 @@ in foo A set containing { __toString = self: ...; }. An integer. A list, in which case the string representations of its elements are joined with spaces. - A Boolean (false yields "", true yields "1". + A Boolean (false yields "", true yields "1"). null, which yields the empty string. @@ -1579,12 +1620,18 @@ stdenv.mkDerivation (rec { builtins.tryEval e - Try to evaluate e. + Try to shallowly evaluate e. Return a set containing the attributes success (true if e evaluated successfully, false if an error was thrown) and value, equalling e - if successful and false otherwise. + if successful and false otherwise. Note that this + doesn't evaluate e deeply, so + let e = { x = throw ""; }; in (builtins.tryEval e).success + will be true. Using builtins.deepSeq + one can get the expected result: let e = { x = throw ""; + }; in (builtins.tryEval (builtins.deepSeq e e)).success will be + false. diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml index f961ed921..0d0cbbe15 100644 --- a/doc/manual/expressions/language-constructs.xml +++ b/doc/manual/expressions/language-constructs.xml @@ -43,7 +43,7 @@ encountered).. Let-expressions -A let-expression allows you define local variables for an +A let-expression allows you to define local variables for an expression. For instance, @@ -217,7 +217,25 @@ but can also be written as: ellipsis(...) as you can access attribute names as a, using args.a, which was given as an additional attribute to the function. - + + + + + The args@ expression is bound to the argument passed to the function which + means that attributes with defaults that aren't explicitly specified in the function call + won't cause an evaluation error, but won't exist in args. + + + For instance + +let + function = args@{ a ? 23, ... }: args; +in + function {} + + will evaluate to an empty attribute set. + + diff --git a/doc/manual/expressions/language-operators.xml b/doc/manual/expressions/language-operators.xml index f1f750934..4f11bf529 100644 --- a/doc/manual/expressions/language-operators.xml +++ b/doc/manual/expressions/language-operators.xml @@ -15,13 +15,16 @@ weakest binding). + Name Syntax Associativity Description + Precedence + Select e . attrpath [ or def ] @@ -33,19 +36,25 @@ weakest binding). dot-separated list of attribute names.) If the attribute doesn’t exist, return def if provided, otherwise abort evaluation. + 1 + Application e1 e2 left Call function e1 with argument e2. + 2 + Arithmetic Negation - e none Arithmetic negation. + 3 + Has Attribute e ? attrpath none @@ -53,34 +62,69 @@ weakest binding). the attribute denoted by attrpath; return true or false. + 4 + List Concatenation e1 ++ e2 right List concatenation. + 5 + Multiplication e1 * e2, + + left + Arithmetic multiplication. + 6 + + + Division + e1 / e2 left - Arithmetic multiplication and division. + Arithmetic division. + 6 + Addition + + e1 + e2 + + left + Arithmetic addition. + 7 + + + Subtraction - e1 + e2, e1 - e2 left - Arithmetic addition and subtraction. String or path concatenation (only by +). + Arithmetic subtraction. + 7 + String Concatenation + + string1 + string2 + + left + String concatenation. + 7 + + + Not ! e none Boolean negation. + 8 + Update e1 // e2 right @@ -89,47 +133,90 @@ weakest binding). e2 (with the latter taking precedence over the former in case of equally named attributes). + 9 + Less Than e1 < e2, - e1 > e2, - e1 <= e2, + + none + Arithmetic comparison. + 10 + + + Less Than or Equal To + + e1 <= e2 + + none + Arithmetic comparison. + 10 + + + Greater Than + + e1 > e2 + + none + Arithmetic comparison. + 10 + + + Greater Than or Equal To + e1 >= e2 none Arithmetic comparison. + 10 + Equality + + e1 == e2 + + none + Equality. + 11 + + + Inequality - e1 == e2, e1 != e2 none - Equality and inequality. + Inequality. + 11 + Logical AND e1 && e2 left Logical AND. + 12 + Logical OR e1 || e2 left Logical OR. + 13 + Logical Implication e1 -> e2 none Logical implication (equivalent to !e1 || e2). + 14 - \ No newline at end of file + diff --git a/doc/manual/expressions/simple-building-testing.xml b/doc/manual/expressions/simple-building-testing.xml index 0348c082b..7326a3e76 100644 --- a/doc/manual/expressions/simple-building-testing.xml +++ b/doc/manual/expressions/simple-building-testing.xml @@ -43,7 +43,7 @@ use nix-build’s switch to give the symlink another name. -Nix has a transactional semantics. Once a build finishes +Nix has transactional semantics. Once a build finishes successfully, Nix makes a note of this in its database: it registers that the path denoted by out is now valid. If you try to build the derivation again, Nix diff --git a/doc/manual/installation/env-variables.xml b/doc/manual/installation/env-variables.xml index d1ee0bb2e..e2b8fc867 100644 --- a/doc/manual/installation/env-variables.xml +++ b/doc/manual/installation/env-variables.xml @@ -67,5 +67,23 @@ $ sudo launchctl kickstart -k system/org.nixos.nix-daemon +
+ +Proxy Environment Variables + +The Nix installer has special handling for these proxy-related +environment variables: +http_proxy, https_proxy, +ftp_proxy, no_proxy, +HTTP_PROXY, HTTPS_PROXY, +FTP_PROXY, NO_PROXY. + +If any of these variables are set when running the Nix installer, +then the installer will create an override file at +/etc/systemd/system/nix-daemon.service.d/override.conf +so nix-daemon will use them. + +
+ diff --git a/doc/manual/installation/prerequisites-source.xml b/doc/manual/installation/prerequisites-source.xml index 426768095..684dd5d74 100644 --- a/doc/manual/installation/prerequisites-source.xml +++ b/doc/manual/installation/prerequisites-source.xml @@ -13,7 +13,7 @@ Bash Shell. The ./configure script relies on bashisms, so Bash is required. - A version of GCC or Clang that supports C++14. + A version of GCC or Clang that supports C++17. pkg-config to locate dependencies. If your distribution does not provide it, you can get @@ -54,6 +54,10 @@ 1.66.0 or higher. It can be obtained from the official web site . + The editline library of version + 1.14.0 or higher. It can be obtained from the its repository + . + The xmllint and xsltproc programs to build this manual and the man-pages. These are part of the libxml2 and diff --git a/doc/manual/packages/basic-package-mgmt.xml b/doc/manual/packages/basic-package-mgmt.xml index e8d1419da..0f21297f3 100644 --- a/doc/manual/packages/basic-package-mgmt.xml +++ b/doc/manual/packages/basic-package-mgmt.xml @@ -24,11 +24,11 @@ symlinks to the files of the active applications. Components are installed from a set of Nix expressions that tell Nix how to build those packages, including, if necessary, their dependencies. There is a collection of -Nix expressions called the Nix Package collection that contains +Nix expressions called the Nixpkgs package collection that contains packages ranging from basic development stuff such as GCC and Glibc, to end-user applications like Mozilla Firefox. (Nix is however not -tied to the Nix Package collection; you could write your own Nix -expressions based on it, or completely new ones.) +tied to the Nixpkgs package collection; you could write your own Nix +expressions based on Nixpkgs, or completely new ones.)
You can manually download the latest version of Nixpkgs from . However, diff --git a/doc/manual/packages/channels.xml b/doc/manual/packages/channels.xml index 15c119fcb..1ed2bba52 100644 --- a/doc/manual/packages/channels.xml +++ b/doc/manual/packages/channels.xml @@ -17,6 +17,9 @@ a set of Nix expressions and a manifest. Using the command nix-channel you can automatically stay up to date with whatever is available at that URL. + +To see the list of official NixOS channels, visit . You can “subscribe” to a channel using nix-channel --add, e.g., diff --git a/doc/manual/packages/garbage-collection.xml b/doc/manual/packages/garbage-collection.xml index a1b0ef22a..b506f22b0 100644 --- a/doc/manual/packages/garbage-collection.xml +++ b/doc/manual/packages/garbage-collection.xml @@ -52,12 +52,13 @@ garbage collector as follows: $ nix-store --gc -The behaviour of the gargage collector is affected by the keep- -derivations (default: true) and keep-outputs +The behaviour of the gargage collector is affected by the +keep-derivations (default: true) and keep-outputs (default: false) options in the Nix configuration file. The defaults will ensure -that all derivations that are not build-time dependencies of garbage collector roots -will be collected but that all output paths that are not runtime dependencies -will be collected. (This is usually what you want, but while you are developing +that all derivations that are build-time dependencies of garbage collector roots +will be kept and that all output paths that are runtime dependencies +will be kept as well. All other derivations or paths will be collected. +(This is usually what you want, but while you are developing it may make sense to keep outputs to ensure that rebuild times are quick.) If you are feeling uncertain, you can also first view what files would diff --git a/doc/manual/packages/s3-substituter.xml b/doc/manual/packages/s3-substituter.xml index e7589ffdb..868b5a66d 100644 --- a/doc/manual/packages/s3-substituter.xml +++ b/doc/manual/packages/s3-substituter.xml @@ -113,7 +113,7 @@ the S3 URL: exactly s3://example-nix-cache.
Nix will use the default + xlink:href="https://docs.aws.amazon.com/sdk-for-cpp/v1/developer-guide/credentials.html">default credential provider chain for authenticating requests to Amazon S3. @@ -138,7 +138,7 @@ the S3 URL: be s3://example-nix-cache. Nix will use the default + xlink:href="https://docs.aws.amazon.com/sdk-for-cpp/v1/developer-guide/credentials.html">default credential provider chain for authenticating requests to Amazon S3. @@ -159,7 +159,6 @@ the S3 URL: "s3:ListBucket", "s3:ListBucketMultipartUploads", "s3:ListMultipartUploadParts", - "s3:ListObjects", "s3:PutObject" ], "Resource": [ diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index e8ff586fa..2655d68e3 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -12,6 +12,7 @@ --> + diff --git a/doc/manual/release-notes/rl-2.3.xml b/doc/manual/release-notes/rl-2.3.xml new file mode 100644 index 000000000..0ad7d641f --- /dev/null +++ b/doc/manual/release-notes/rl-2.3.xml @@ -0,0 +1,91 @@ +
+ +Release 2.3 (2019-09-04) + +This is primarily a bug fix release. However, it makes some +incompatible changes: + + + + + Nix now uses BSD file locks instead of POSIX file + locks. Because of this, you should not use Nix 2.3 and previous + releases at the same time on a Nix store. + + + + +It also has the following changes: + + + + + builtins.fetchGit's ref + argument now allows specifying an absolute remote ref. + Nix will automatically prefix ref with + refs/heads only if ref doesn't + already begin with refs/. + + + + + The installer now enables sandboxing by default on Linux when the + system has the necessary kernel support. + + + + + The max-jobs setting now defaults to 1. + + + + New builtin functions: + builtins.isPath, + builtins.hashFile. + + + + + The nix command has a new + () flag to + print build log output to stderr, rather than showing the last log + line in the progress bar. To distinguish between concurrent + builds, log lines are prefixed by the name of the package. + + + + + Builds are now executed in a pseudo-terminal, and the + TERM environment variable is set to + xterm-256color. This allows many programs + (e.g. gcc, clang, + cmake) to print colorized log output. + + + + Add convenience flag. This flag + disables substituters; sets the tarball-ttl + setting to infinity (ensuring that any previously downloaded files + are considered current); and disables retrying downloads and sets + the connection timeout to the minimum. This flag is enabled + automatically if there are no configured non-loopback network + interfaces. + + + + Add a post-build-hook setting to run a + program after a build has succeeded. + + + + Add a trace-function-calls setting to log + the duration of Nix function calls to stderr. + + + + +
diff --git a/local.mk b/local.mk index 4b380176f..55f85a044 100644 --- a/local.mk +++ b/local.mk @@ -2,11 +2,13 @@ ifeq ($(MAKECMDGOALS), dist) dist-files += $(shell cat .dist-files) endif -dist-files += configure config.h.in nix.spec perl/configure +dist-files += configure config.h.in perl/configure clean-files += Makefile.config -GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr -I src/nix +GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr -I src/nix -Wno-deprecated-declarations $(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix, 0644))) + +$(GCH) $(PCH): src/libutil/util.hh config.h diff --git a/m4/ax_cxx_compile_stdcxx.m4 b/m4/ax_cxx_compile_stdcxx.m4 new file mode 100644 index 000000000..43087b2e6 --- /dev/null +++ b/m4/ax_cxx_compile_stdcxx.m4 @@ -0,0 +1,951 @@ +# =========================================================================== +# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_CXX_COMPILE_STDCXX(VERSION, [ext|noext], [mandatory|optional]) +# +# DESCRIPTION +# +# Check for baseline language coverage in the compiler for the specified +# version of the C++ standard. If necessary, add switches to CXX and +# CXXCPP to enable support. VERSION may be '11' (for the C++11 standard) +# or '14' (for the C++14 standard). +# +# The second argument, if specified, indicates whether you insist on an +# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g. +# -std=c++11). If neither is specified, you get whatever works, with +# preference for an extended mode. +# +# The third argument, if specified 'mandatory' or if left unspecified, +# indicates that baseline support for the specified C++ standard is +# required and that the macro should error out if no mode with that +# support is found. If specified 'optional', then configuration proceeds +# regardless, after defining HAVE_CXX${VERSION} if and only if a +# supporting mode is found. +# +# LICENSE +# +# Copyright (c) 2008 Benjamin Kosnik +# Copyright (c) 2012 Zack Weinberg +# Copyright (c) 2013 Roy Stogner +# Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov +# Copyright (c) 2015 Paul Norman +# Copyright (c) 2015 Moritz Klammler +# Copyright (c) 2016, 2018 Krzesimir Nowak +# Copyright (c) 2019 Enji Cooper +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 11 + +dnl This macro is based on the code from the AX_CXX_COMPILE_STDCXX_11 macro +dnl (serial version number 13). + +AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl + m4_if([$1], [11], [ax_cxx_compile_alternatives="11 0x"], + [$1], [14], [ax_cxx_compile_alternatives="14 1y"], + [$1], [17], [ax_cxx_compile_alternatives="17 1z"], + [m4_fatal([invalid first argument `$1' to AX_CXX_COMPILE_STDCXX])])dnl + m4_if([$2], [], [], + [$2], [ext], [], + [$2], [noext], [], + [m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX])])dnl + m4_if([$3], [], [ax_cxx_compile_cxx$1_required=true], + [$3], [mandatory], [ax_cxx_compile_cxx$1_required=true], + [$3], [optional], [ax_cxx_compile_cxx$1_required=false], + [m4_fatal([invalid third argument `$3' to AX_CXX_COMPILE_STDCXX])]) + AC_LANG_PUSH([C++])dnl + ac_success=no + + m4_if([$2], [noext], [], [dnl + if test x$ac_success = xno; then + for alternative in ${ax_cxx_compile_alternatives}; do + switch="-std=gnu++${alternative}" + cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch]) + AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch, + $cachevar, + [ac_save_CXX="$CXX" + CXX="$CXX $switch" + AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])], + [eval $cachevar=yes], + [eval $cachevar=no]) + CXX="$ac_save_CXX"]) + if eval test x\$$cachevar = xyes; then + CXX="$CXX $switch" + if test -n "$CXXCPP" ; then + CXXCPP="$CXXCPP $switch" + fi + ac_success=yes + break + fi + done + fi]) + + m4_if([$2], [ext], [], [dnl + if test x$ac_success = xno; then + dnl HP's aCC needs +std=c++11 according to: + dnl http://h21007.www2.hp.com/portal/download/files/unprot/aCxx/PDF_Release_Notes/769149-001.pdf + dnl Cray's crayCC needs "-h std=c++11" + for alternative in ${ax_cxx_compile_alternatives}; do + for switch in -std=c++${alternative} +std=c++${alternative} "-h std=c++${alternative}"; do + cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch]) + AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch, + $cachevar, + [ac_save_CXX="$CXX" + CXX="$CXX $switch" + AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])], + [eval $cachevar=yes], + [eval $cachevar=no]) + CXX="$ac_save_CXX"]) + if eval test x\$$cachevar = xyes; then + CXX="$CXX $switch" + if test -n "$CXXCPP" ; then + CXXCPP="$CXXCPP $switch" + fi + ac_success=yes + break + fi + done + if test x$ac_success = xyes; then + break + fi + done + fi]) + AC_LANG_POP([C++]) + if test x$ax_cxx_compile_cxx$1_required = xtrue; then + if test x$ac_success = xno; then + AC_MSG_ERROR([*** A compiler with support for C++$1 language features is required.]) + fi + fi + if test x$ac_success = xno; then + HAVE_CXX$1=0 + AC_MSG_NOTICE([No compiler with C++$1 support was found]) + else + HAVE_CXX$1=1 + AC_DEFINE(HAVE_CXX$1,1, + [define if the compiler supports basic C++$1 syntax]) + fi + AC_SUBST(HAVE_CXX$1) +]) + + +dnl Test body for checking C++11 support + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_11], + _AX_CXX_COMPILE_STDCXX_testbody_new_in_11 +) + + +dnl Test body for checking C++14 support + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_14], + _AX_CXX_COMPILE_STDCXX_testbody_new_in_11 + _AX_CXX_COMPILE_STDCXX_testbody_new_in_14 +) + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_17], + _AX_CXX_COMPILE_STDCXX_testbody_new_in_11 + _AX_CXX_COMPILE_STDCXX_testbody_new_in_14 + _AX_CXX_COMPILE_STDCXX_testbody_new_in_17 +) + +dnl Tests for new features in C++11 + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_11], [[ + +// If the compiler admits that it is not ready for C++11, why torture it? +// Hopefully, this will speed up the test. + +#ifndef __cplusplus + +#error "This is not a C++ compiler" + +#elif __cplusplus < 201103L + +#error "This is not a C++11 compiler" + +#else + +namespace cxx11 +{ + + namespace test_static_assert + { + + template + struct check + { + static_assert(sizeof(int) <= sizeof(T), "not big enough"); + }; + + } + + namespace test_final_override + { + + struct Base + { + virtual ~Base() {} + virtual void f() {} + }; + + struct Derived : public Base + { + virtual ~Derived() override {} + virtual void f() override {} + }; + + } + + namespace test_double_right_angle_brackets + { + + template < typename T > + struct check {}; + + typedef check single_type; + typedef check> double_type; + typedef check>> triple_type; + typedef check>>> quadruple_type; + + } + + namespace test_decltype + { + + int + f() + { + int a = 1; + decltype(a) b = 2; + return a + b; + } + + } + + namespace test_type_deduction + { + + template < typename T1, typename T2 > + struct is_same + { + static const bool value = false; + }; + + template < typename T > + struct is_same + { + static const bool value = true; + }; + + template < typename T1, typename T2 > + auto + add(T1 a1, T2 a2) -> decltype(a1 + a2) + { + return a1 + a2; + } + + int + test(const int c, volatile int v) + { + static_assert(is_same::value == true, ""); + static_assert(is_same::value == false, ""); + static_assert(is_same::value == false, ""); + auto ac = c; + auto av = v; + auto sumi = ac + av + 'x'; + auto sumf = ac + av + 1.0; + static_assert(is_same::value == true, ""); + static_assert(is_same::value == true, ""); + static_assert(is_same::value == true, ""); + static_assert(is_same::value == false, ""); + static_assert(is_same::value == true, ""); + return (sumf > 0.0) ? sumi : add(c, v); + } + + } + + namespace test_noexcept + { + + int f() { return 0; } + int g() noexcept { return 0; } + + static_assert(noexcept(f()) == false, ""); + static_assert(noexcept(g()) == true, ""); + + } + + namespace test_constexpr + { + + template < typename CharT > + unsigned long constexpr + strlen_c_r(const CharT *const s, const unsigned long acc) noexcept + { + return *s ? strlen_c_r(s + 1, acc + 1) : acc; + } + + template < typename CharT > + unsigned long constexpr + strlen_c(const CharT *const s) noexcept + { + return strlen_c_r(s, 0UL); + } + + static_assert(strlen_c("") == 0UL, ""); + static_assert(strlen_c("1") == 1UL, ""); + static_assert(strlen_c("example") == 7UL, ""); + static_assert(strlen_c("another\0example") == 7UL, ""); + + } + + namespace test_rvalue_references + { + + template < int N > + struct answer + { + static constexpr int value = N; + }; + + answer<1> f(int&) { return answer<1>(); } + answer<2> f(const int&) { return answer<2>(); } + answer<3> f(int&&) { return answer<3>(); } + + void + test() + { + int i = 0; + const int c = 0; + static_assert(decltype(f(i))::value == 1, ""); + static_assert(decltype(f(c))::value == 2, ""); + static_assert(decltype(f(0))::value == 3, ""); + } + + } + + namespace test_uniform_initialization + { + + struct test + { + static const int zero {}; + static const int one {1}; + }; + + static_assert(test::zero == 0, ""); + static_assert(test::one == 1, ""); + + } + + namespace test_lambdas + { + + void + test1() + { + auto lambda1 = [](){}; + auto lambda2 = lambda1; + lambda1(); + lambda2(); + } + + int + test2() + { + auto a = [](int i, int j){ return i + j; }(1, 2); + auto b = []() -> int { return '0'; }(); + auto c = [=](){ return a + b; }(); + auto d = [&](){ return c; }(); + auto e = [a, &b](int x) mutable { + const auto identity = [](int y){ return y; }; + for (auto i = 0; i < a; ++i) + a += b--; + return x + identity(a + b); + }(0); + return a + b + c + d + e; + } + + int + test3() + { + const auto nullary = [](){ return 0; }; + const auto unary = [](int x){ return x; }; + using nullary_t = decltype(nullary); + using unary_t = decltype(unary); + const auto higher1st = [](nullary_t f){ return f(); }; + const auto higher2nd = [unary](nullary_t f1){ + return [unary, f1](unary_t f2){ return f2(unary(f1())); }; + }; + return higher1st(nullary) + higher2nd(nullary)(unary); + } + + } + + namespace test_variadic_templates + { + + template + struct sum; + + template + struct sum + { + static constexpr auto value = N0 + sum::value; + }; + + template <> + struct sum<> + { + static constexpr auto value = 0; + }; + + static_assert(sum<>::value == 0, ""); + static_assert(sum<1>::value == 1, ""); + static_assert(sum<23>::value == 23, ""); + static_assert(sum<1, 2>::value == 3, ""); + static_assert(sum<5, 5, 11>::value == 21, ""); + static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, ""); + + } + + // http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae + // Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function + // because of this. + namespace test_template_alias_sfinae + { + + struct foo {}; + + template + using member = typename T::member_type; + + template + void func(...) {} + + template + void func(member*) {} + + void test(); + + void test() { func(0); } + + } + +} // namespace cxx11 + +#endif // __cplusplus >= 201103L + +]]) + + +dnl Tests for new features in C++14 + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_14], [[ + +// If the compiler admits that it is not ready for C++14, why torture it? +// Hopefully, this will speed up the test. + +#ifndef __cplusplus + +#error "This is not a C++ compiler" + +#elif __cplusplus < 201402L + +#error "This is not a C++14 compiler" + +#else + +namespace cxx14 +{ + + namespace test_polymorphic_lambdas + { + + int + test() + { + const auto lambda = [](auto&&... args){ + const auto istiny = [](auto x){ + return (sizeof(x) == 1UL) ? 1 : 0; + }; + const int aretiny[] = { istiny(args)... }; + return aretiny[0]; + }; + return lambda(1, 1L, 1.0f, '1'); + } + + } + + namespace test_binary_literals + { + + constexpr auto ivii = 0b0000000000101010; + static_assert(ivii == 42, "wrong value"); + + } + + namespace test_generalized_constexpr + { + + template < typename CharT > + constexpr unsigned long + strlen_c(const CharT *const s) noexcept + { + auto length = 0UL; + for (auto p = s; *p; ++p) + ++length; + return length; + } + + static_assert(strlen_c("") == 0UL, ""); + static_assert(strlen_c("x") == 1UL, ""); + static_assert(strlen_c("test") == 4UL, ""); + static_assert(strlen_c("another\0test") == 7UL, ""); + + } + + namespace test_lambda_init_capture + { + + int + test() + { + auto x = 0; + const auto lambda1 = [a = x](int b){ return a + b; }; + const auto lambda2 = [a = lambda1(x)](){ return a; }; + return lambda2(); + } + + } + + namespace test_digit_separators + { + + constexpr auto ten_million = 100'000'000; + static_assert(ten_million == 100000000, ""); + + } + + namespace test_return_type_deduction + { + + auto f(int& x) { return x; } + decltype(auto) g(int& x) { return x; } + + template < typename T1, typename T2 > + struct is_same + { + static constexpr auto value = false; + }; + + template < typename T > + struct is_same + { + static constexpr auto value = true; + }; + + int + test() + { + auto x = 0; + static_assert(is_same::value, ""); + static_assert(is_same::value, ""); + return x; + } + + } + +} // namespace cxx14 + +#endif // __cplusplus >= 201402L + +]]) + + +dnl Tests for new features in C++17 + +m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[ + +// If the compiler admits that it is not ready for C++17, why torture it? +// Hopefully, this will speed up the test. + +#ifndef __cplusplus + +#error "This is not a C++ compiler" + +#elif __cplusplus < 201703L + +#error "This is not a C++17 compiler" + +#else + +#include +#include +#include + +namespace cxx17 +{ + + namespace test_constexpr_lambdas + { + + constexpr int foo = [](){return 42;}(); + + } + + namespace test::nested_namespace::definitions + { + + } + + namespace test_fold_expression + { + + template + int multiply(Args... args) + { + return (args * ... * 1); + } + + template + bool all(Args... args) + { + return (args && ...); + } + + } + + namespace test_extended_static_assert + { + + static_assert (true); + + } + + namespace test_auto_brace_init_list + { + + auto foo = {5}; + auto bar {5}; + + static_assert(std::is_same, decltype(foo)>::value); + static_assert(std::is_same::value); + } + + namespace test_typename_in_template_template_parameter + { + + template typename X> struct D; + + } + + namespace test_fallthrough_nodiscard_maybe_unused_attributes + { + + int f1() + { + return 42; + } + + [[nodiscard]] int f2() + { + [[maybe_unused]] auto unused = f1(); + + switch (f1()) + { + case 17: + f1(); + [[fallthrough]]; + case 42: + f1(); + } + return f1(); + } + + } + + namespace test_extended_aggregate_initialization + { + + struct base1 + { + int b1, b2 = 42; + }; + + struct base2 + { + base2() { + b3 = 42; + } + int b3; + }; + + struct derived : base1, base2 + { + int d; + }; + + derived d1 {{1, 2}, {}, 4}; // full initialization + derived d2 {{}, {}, 4}; // value-initialized bases + + } + + namespace test_general_range_based_for_loop + { + + struct iter + { + int i; + + int& operator* () + { + return i; + } + + const int& operator* () const + { + return i; + } + + iter& operator++() + { + ++i; + return *this; + } + }; + + struct sentinel + { + int i; + }; + + bool operator== (const iter& i, const sentinel& s) + { + return i.i == s.i; + } + + bool operator!= (const iter& i, const sentinel& s) + { + return !(i == s); + } + + struct range + { + iter begin() const + { + return {0}; + } + + sentinel end() const + { + return {5}; + } + }; + + void f() + { + range r {}; + + for (auto i : r) + { + [[maybe_unused]] auto v = i; + } + } + + } + + namespace test_lambda_capture_asterisk_this_by_value + { + + struct t + { + int i; + int foo() + { + return [*this]() + { + return i; + }(); + } + }; + + } + + namespace test_enum_class_construction + { + + enum class byte : unsigned char + {}; + + byte foo {42}; + + } + + namespace test_constexpr_if + { + + template + int f () + { + if constexpr(cond) + { + return 13; + } + else + { + return 42; + } + } + + } + + namespace test_selection_statement_with_initializer + { + + int f() + { + return 13; + } + + int f2() + { + if (auto i = f(); i > 0) + { + return 3; + } + + switch (auto i = f(); i + 4) + { + case 17: + return 2; + + default: + return 1; + } + } + + } + + namespace test_template_argument_deduction_for_class_templates + { + + template + struct pair + { + pair (T1 p1, T2 p2) + : m1 {p1}, + m2 {p2} + {} + + T1 m1; + T2 m2; + }; + + void f() + { + [[maybe_unused]] auto p = pair{13, 42u}; + } + + } + + namespace test_non_type_auto_template_parameters + { + + template + struct B + {}; + + B<5> b1; + B<'a'> b2; + + } + + namespace test_structured_bindings + { + + int arr[2] = { 1, 2 }; + std::pair pr = { 1, 2 }; + + auto f1() -> int(&)[2] + { + return arr; + } + + auto f2() -> std::pair& + { + return pr; + } + + struct S + { + int x1 : 2; + volatile double y1; + }; + + S f3() + { + return {}; + } + + auto [ x1, y1 ] = f1(); + auto& [ xr1, yr1 ] = f1(); + auto [ x2, y2 ] = f2(); + auto& [ xr2, yr2 ] = f2(); + const auto [ x3, y3 ] = f3(); + + } + + namespace test_exception_spec_type_system + { + + struct Good {}; + struct Bad {}; + + void g1() noexcept; + void g2(); + + template + Bad + f(T*, T*); + + template + Good + f(T1*, T2*); + + static_assert (std::is_same_v); + + } + + namespace test_inline_variables + { + + template void f(T) + {} + + template inline T g(T) + { + return T{}; + } + + template<> inline void f<>(int) + {} + + template<> int g<>(int) + { + return 5; + } + + } + +} // namespace cxx17 + +#endif // __cplusplus < 201703L + +]]) diff --git a/m4/ax_cxx_compile_stdcxx_17.m4 b/m4/ax_cxx_compile_stdcxx_17.m4 new file mode 100644 index 000000000..a68341717 --- /dev/null +++ b/m4/ax_cxx_compile_stdcxx_17.m4 @@ -0,0 +1,35 @@ +# ============================================================================= +# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx_17.html +# ============================================================================= +# +# SYNOPSIS +# +# AX_CXX_COMPILE_STDCXX_17([ext|noext], [mandatory|optional]) +# +# DESCRIPTION +# +# Check for baseline language coverage in the compiler for the C++17 +# standard; if necessary, add switches to CXX and CXXCPP to enable +# support. +# +# This macro is a convenience alias for calling the AX_CXX_COMPILE_STDCXX +# macro with the version set to C++17. The two optional arguments are +# forwarded literally as the second and third argument respectively. +# Please see the documentation for the AX_CXX_COMPILE_STDCXX macro for +# more information. If you want to use this macro, you also need to +# download the ax_cxx_compile_stdcxx.m4 file. +# +# LICENSE +# +# Copyright (c) 2015 Moritz Klammler +# Copyright (c) 2016 Krzesimir Nowak +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 2 + +AX_REQUIRE_DEFINED([AX_CXX_COMPILE_STDCXX]) +AC_DEFUN([AX_CXX_COMPILE_STDCXX_17], [AX_CXX_COMPILE_STDCXX([17], [$1], [$2])]) diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 8432c9596..77534babb 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -44,7 +44,7 @@ print STDERR "Nix revision is $nixRev, version is $version\n"; File::Path::make_path($releasesDir); if (system("mountpoint -q $releasesDir") != 0) { - system("sshfs hydra-mirror:/releases $releasesDir") == 0 or die; + system("sshfs hydra-mirror\@nixos.org:/releases $releasesDir") == 0 or die; } my $releaseDir = "$releasesDir/nix/$releaseName"; @@ -67,10 +67,10 @@ sub downloadFile { } my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash} or die; - my $sha256_actual = `nix hash-file --type sha256 '$dstFile'`; + my $sha256_actual = `nix hash-file --base16 --type sha256 '$dstFile'`; chomp $sha256_actual; if ($sha256_expected ne $sha256_actual) { - print STDERR "file $dstFile is corrupt\n"; + print STDERR "file $dstFile is corrupt, got $sha256_actual, expected $sha256_expected\n"; exit 1; } diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in index 92ed12fa5..9f26296a9 100644 --- a/misc/launchd/org.nixos.nix-daemon.plist.in +++ b/misc/launchd/org.nixos.nix-daemon.plist.in @@ -13,8 +13,12 @@ RunAtLoad - Program - @bindir@/nix-daemon + ProgramArguments + + /bin/sh + -c + /bin/wait4path /nix/var/nix/profiles/default/bin/nix-daemon && /nix/var/nix/profiles/default/bin/nix-daemon + StandardErrorPath /var/log/nix-daemon.log StandardOutPath diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in index 5fc04a3f5..25655204d 100644 --- a/misc/systemd/nix-daemon.service.in +++ b/misc/systemd/nix-daemon.service.in @@ -7,3 +7,6 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket [Service] ExecStart=@@bindir@/nix-daemon nix-daemon --daemon KillMode=process + +[Install] +WantedBy=multi-user.target diff --git a/mk/libraries.mk b/mk/libraries.mk index 3953446cb..307e29b9d 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -91,7 +91,7 @@ define build-library $(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) + $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) ifneq ($(OS), Darwin) $(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d)) @@ -105,7 +105,7 @@ define build-library $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) $$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ -shared $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME))) ifneq ($(OS), Darwin) diff --git a/mk/patterns.mk b/mk/patterns.mk index 3219d9629..7319f4cdd 100644 --- a/mk/patterns.mk +++ b/mk/patterns.mk @@ -1,10 +1,10 @@ $(buildprefix)%.o: %.cc @mkdir -p "$(dir $@)" - $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP + $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP $(buildprefix)%.o: %.cpp @mkdir -p "$(dir $@)" - $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP + $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP $(buildprefix)%.o: %.c @mkdir -p "$(dir $@)" diff --git a/mk/precompiled-headers.mk b/mk/precompiled-headers.mk new file mode 100644 index 000000000..1a727ba1b --- /dev/null +++ b/mk/precompiled-headers.mk @@ -0,0 +1,42 @@ +PRECOMPILE_HEADERS ?= 1 + +print-var-help += \ + echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build"; + +GCH = $(buildprefix)precompiled-headers.h.gch + +$(GCH): precompiled-headers.h + @rm -f $@ + @mkdir -p "$(dir $@)" + $(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) + +PCH = $(buildprefix)precompiled-headers.h.pch + +$(PCH): precompiled-headers.h + @rm -f $@ + @mkdir -p "$(dir $@)" + $(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) + +clean-files += $(GCH) $(PCH) + +ifeq ($(PRECOMPILE_HEADERS), 1) + + ifeq ($(CXX), g++) + + GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch + + GLOBAL_ORDER_AFTER += $(GCH) + + else ifeq ($(CXX), clang++) + + GLOBAL_CXXFLAGS_PCH += -include-pch $(PCH) -Winvalid-pch + + GLOBAL_ORDER_AFTER += $(PCH) + + else + + $(error Don't know how to precompile headers on $(CXX)) + + endif + +endif diff --git a/mk/programs.mk b/mk/programs.mk index 2fbda12bd..d93df4468 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -32,7 +32,7 @@ define build-program $$(eval $$(call create-dir, $$(_d))) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$@ $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) + $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $(1)_INSTALL_DIR ?= $$(bindir) $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$(1) @@ -46,7 +46,7 @@ define build-program _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH)) $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) else diff --git a/nix-rust/Cargo.lock b/nix-rust/Cargo.lock new file mode 100644 index 000000000..957c01e5a --- /dev/null +++ b/nix-rust/Cargo.lock @@ -0,0 +1,399 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "assert_matches" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "autocfg" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bit-set" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bit-vec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "byteorder" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "c2-chacha" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fnv" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "getrandom" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.66" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "nix-rust" +version = "0.1.0" +dependencies = [ + "assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-traits" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "proptest" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "quick-error" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_chacha" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "redox_syscall" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "regex-syntax" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "remove_dir_all" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rusty-fork" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tempfile" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", + "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5" +"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" +"checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80" +"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb" +"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" +"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" +"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" +"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407" +"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558" +"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4" +"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" +"checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f" +"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" +"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" +"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" +"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" +"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" +"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" +"checksum rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3dd93264e10c577503e926bd1430193eeb5d21b059148910082245309b424fae" +"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" +"checksum wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" +"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/nix-rust/Cargo.toml b/nix-rust/Cargo.toml new file mode 100644 index 000000000..1372e5a73 --- /dev/null +++ b/nix-rust/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "nix-rust" +version = "0.1.0" +authors = ["Eelco Dolstra "] +edition = "2018" + +[lib] +name = "nixrust" +crate-type = ["cdylib"] + +[dependencies] +libc = "0.2" +#futures-preview = { version = "=0.3.0-alpha.19" } +#hyper = "0.13.0-alpha.4" +#http = "0.1" +#tokio = { version = "0.2.0-alpha.6", default-features = false, features = ["rt-full"] } +lazy_static = "1.4" +#byteorder = "1.3" + +[dev-dependencies] +hex = "0.3" +assert_matches = "1.3" +proptest = "0.9" diff --git a/nix-rust/local.mk b/nix-rust/local.mk new file mode 100644 index 000000000..1e006e500 --- /dev/null +++ b/nix-rust/local.mk @@ -0,0 +1,45 @@ +ifeq ($(OPTIMIZE), 1) + RUST_MODE = --release + RUST_DIR = release +else + RUST_MODE = + RUST_DIR = debug +endif + +libnixrust_PATH := $(d)/target/$(RUST_DIR)/libnixrust.$(SO_EXT) +libnixrust_INSTALL_PATH := $(libdir)/libnixrust.$(SO_EXT) +libnixrust_LDFLAGS_USE := -L$(d)/target/$(RUST_DIR) -lnixrust -ldl +libnixrust_LDFLAGS_USE_INSTALLED := -L$(libdir) -lnixrust -ldl + +ifeq ($(OS), Darwin) +libnixrust_BUILD_FLAGS = NIX_LDFLAGS="-undefined dynamic_lookup" +else +libnixrust_LDFLAGS_USE += -Wl,-rpath,$(abspath $(d)/target/$(RUST_DIR)) +libnixrust_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$(libdir) +endif + +$(libnixrust_PATH): $(call rwildcard, $(d)/src, *.rs) $(d)/Cargo.toml + $(trace-gen) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) \ + $(libnixrust_BUILD_FLAGS) \ + cargo build $(RUST_MODE) $$(if [[ -d vendor ]]; then echo --offline; fi) \ + && touch target/$(RUST_DIR)/libnixrust.$(SO_EXT) + +$(libnixrust_INSTALL_PATH): $(libnixrust_PATH) + $(target-gen) cp $^ $@ +ifeq ($(OS), Darwin) + install_name_tool -id $@ $@ +endif + +dist-files += $(d)/vendor + +clean: clean-rust + +clean-rust: + $(suppress) rm -rfv nix-rust/target + +ifneq ($(OS), Darwin) +check: rust-tests + +rust-tests: + cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi) +endif diff --git a/nix-rust/src/c.rs b/nix-rust/src/c.rs new file mode 100644 index 000000000..c1358545f --- /dev/null +++ b/nix-rust/src/c.rs @@ -0,0 +1,77 @@ +use super::{error, store::path, store::StorePath, util}; + +#[no_mangle] +pub unsafe extern "C" fn ffi_String_new(s: &str, out: *mut String) { + // FIXME: check whether 's' is valid UTF-8? + out.write(s.to_string()) +} + +#[no_mangle] +pub unsafe extern "C" fn ffi_String_drop(self_: *mut String) { + std::ptr::drop_in_place(self_); +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_new( + path: &str, + store_dir: &str, +) -> Result { + StorePath::new(std::path::Path::new(path), std::path::Path::new(store_dir)) + .map_err(|err| err.into()) +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_new2( + hash: &[u8; crate::store::path::STORE_PATH_HASH_BYTES], + name: &str, +) -> Result { + StorePath::from_parts(*hash, name).map_err(|err| err.into()) +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_fromBaseName( + base_name: &str, +) -> Result { + StorePath::new_from_base_name(base_name).map_err(|err| err.into()) +} + +#[no_mangle] +pub unsafe extern "C" fn ffi_StorePath_drop(self_: *mut StorePath) { + std::ptr::drop_in_place(self_); +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_to_string(self_: &StorePath) -> Vec { + let mut buf = vec![0; path::STORE_PATH_HASH_CHARS + 1 + self_.name.name().len()]; + util::base32::encode_into(self_.hash.hash(), &mut buf[0..path::STORE_PATH_HASH_CHARS]); + buf[path::STORE_PATH_HASH_CHARS] = b'-'; + buf[path::STORE_PATH_HASH_CHARS + 1..].clone_from_slice(self_.name.name().as_bytes()); + buf +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_less_than(a: &StorePath, b: &StorePath) -> bool { + a < b +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_eq(a: &StorePath, b: &StorePath) -> bool { + a == b +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_clone(self_: &StorePath) -> StorePath { + self_.clone() +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_name(self_: &StorePath) -> &str { + self_.name.name() +} + +#[no_mangle] +pub extern "C" fn ffi_StorePath_hash_data( + self_: &StorePath, +) -> &[u8; crate::store::path::STORE_PATH_HASH_BYTES] { + self_.hash.hash() +} diff --git a/nix-rust/src/error.rs b/nix-rust/src/error.rs new file mode 100644 index 000000000..bb0c9a933 --- /dev/null +++ b/nix-rust/src/error.rs @@ -0,0 +1,118 @@ +use std::fmt; + +#[derive(Debug)] +pub enum Error { + InvalidPath(crate::store::StorePath), + BadStorePath(std::path::PathBuf), + NotInStore(std::path::PathBuf), + BadNarInfo, + BadBase32, + StorePathNameEmpty, + StorePathNameTooLong, + BadStorePathName, + NarSizeFieldTooBig, + BadNarString, + BadNarPadding, + BadNarVersionMagic, + MissingNarOpenTag, + MissingNarCloseTag, + MissingNarField, + BadNarField(String), + BadExecutableField, + IOError(std::io::Error), + #[cfg(unused)] + HttpError(hyper::error::Error), + Misc(String), + #[cfg(not(test))] + Foreign(CppException), + BadTarFileMemberName(String), +} + +impl From for Error { + fn from(err: std::io::Error) -> Self { + Error::IOError(err) + } +} + +#[cfg(unused)] +impl From for Error { + fn from(err: hyper::error::Error) -> Self { + Error::HttpError(err) + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::InvalidPath(_) => write!(f, "invalid path"), + Error::BadNarInfo => write!(f, ".narinfo file is corrupt"), + Error::BadStorePath(path) => write!(f, "path '{}' is not a store path", path.display()), + Error::NotInStore(path) => { + write!(f, "path '{}' is not in the Nix store", path.display()) + } + Error::BadBase32 => write!(f, "invalid base32 string"), + Error::StorePathNameEmpty => write!(f, "store path name is empty"), + Error::StorePathNameTooLong => { + write!(f, "store path name is longer than 211 characters") + } + Error::BadStorePathName => write!(f, "store path name contains forbidden character"), + Error::NarSizeFieldTooBig => write!(f, "size field in NAR is too big"), + Error::BadNarString => write!(f, "NAR string is not valid UTF-8"), + Error::BadNarPadding => write!(f, "NAR padding is not zero"), + Error::BadNarVersionMagic => write!(f, "unsupported NAR version"), + Error::MissingNarOpenTag => write!(f, "NAR open tag is missing"), + Error::MissingNarCloseTag => write!(f, "NAR close tag is missing"), + Error::MissingNarField => write!(f, "expected NAR field is missing"), + Error::BadNarField(s) => write!(f, "unrecognized NAR field '{}'", s), + Error::BadExecutableField => write!(f, "bad 'executable' field in NAR"), + Error::IOError(err) => write!(f, "I/O error: {}", err), + #[cfg(unused)] + Error::HttpError(err) => write!(f, "HTTP error: {}", err), + #[cfg(not(test))] + Error::Foreign(_) => write!(f, ""), // FIXME + Error::Misc(s) => write!(f, "{}", s), + Error::BadTarFileMemberName(s) => { + write!(f, "tar archive contains illegal file name '{}'", s) + } + } + } +} + +#[cfg(not(test))] +impl From for CppException { + fn from(err: Error) -> Self { + match err { + Error::Foreign(ex) => ex, + _ => CppException::new(&err.to_string()), + } + } +} + +#[cfg(not(test))] +#[repr(C)] +#[derive(Debug)] +pub struct CppException(*const libc::c_void); // == std::exception_ptr* + +#[cfg(not(test))] +impl CppException { + fn new(s: &str) -> Self { + Self(unsafe { make_error(s) }) + } +} + +#[cfg(not(test))] +impl Drop for CppException { + fn drop(&mut self) { + unsafe { + destroy_error(self.0); + } + } +} + +#[cfg(not(test))] +extern "C" { + #[allow(improper_ctypes)] // YOLO + fn make_error(s: &str) -> *const libc::c_void; + + fn destroy_error(exc: *const libc::c_void); +} diff --git a/nix-rust/src/lib.rs b/nix-rust/src/lib.rs new file mode 100644 index 000000000..27ea69fbd --- /dev/null +++ b/nix-rust/src/lib.rs @@ -0,0 +1,9 @@ +#[cfg(not(test))] +mod c; +mod error; +#[cfg(unused)] +mod nar; +mod store; +mod util; + +pub use error::Error; diff --git a/nix-rust/src/nar.rs b/nix-rust/src/nar.rs new file mode 100644 index 000000000..cb520935e --- /dev/null +++ b/nix-rust/src/nar.rs @@ -0,0 +1,126 @@ +use crate::Error; +use byteorder::{LittleEndian, ReadBytesExt}; +use std::convert::TryFrom; +use std::io::Read; + +pub fn parse(input: &mut R) -> Result<(), Error> { + if String::read(input)? != NAR_VERSION_MAGIC { + return Err(Error::BadNarVersionMagic); + } + + parse_file(input) +} + +const NAR_VERSION_MAGIC: &str = "nix-archive-1"; + +fn parse_file(input: &mut R) -> Result<(), Error> { + if String::read(input)? != "(" { + return Err(Error::MissingNarOpenTag); + } + + if String::read(input)? != "type" { + return Err(Error::MissingNarField); + } + + match String::read(input)?.as_ref() { + "regular" => { + let mut _executable = false; + let mut tag = String::read(input)?; + if tag == "executable" { + _executable = true; + if String::read(input)? != "" { + return Err(Error::BadExecutableField); + } + tag = String::read(input)?; + } + if tag != "contents" { + return Err(Error::MissingNarField); + } + let _contents = Vec::::read(input)?; + if String::read(input)? != ")" { + return Err(Error::MissingNarCloseTag); + } + } + "directory" => loop { + match String::read(input)?.as_ref() { + "entry" => { + if String::read(input)? != "(" { + return Err(Error::MissingNarOpenTag); + } + if String::read(input)? != "name" { + return Err(Error::MissingNarField); + } + let _name = String::read(input)?; + if String::read(input)? != "node" { + return Err(Error::MissingNarField); + } + parse_file(input)?; + let tag = String::read(input)?; + if tag != ")" { + return Err(Error::MissingNarCloseTag); + } + } + ")" => break, + s => return Err(Error::BadNarField(s.into())), + } + }, + "symlink" => { + if String::read(input)? != "target" { + return Err(Error::MissingNarField); + } + let _target = String::read(input)?; + if String::read(input)? != ")" { + return Err(Error::MissingNarCloseTag); + } + } + s => return Err(Error::BadNarField(s.into())), + } + + Ok(()) +} + +trait Deserialize: Sized { + fn read(input: &mut R) -> Result; +} + +impl Deserialize for String { + fn read(input: &mut R) -> Result { + let buf = Deserialize::read(input)?; + Ok(String::from_utf8(buf).map_err(|_| Error::BadNarString)?) + } +} + +impl Deserialize for Vec { + fn read(input: &mut R) -> Result { + let n: usize = Deserialize::read(input)?; + let mut buf = vec![0; n]; + input.read_exact(&mut buf)?; + skip_padding(input, n)?; + Ok(buf) + } +} + +fn skip_padding(input: &mut R, len: usize) -> Result<(), Error> { + if len % 8 != 0 { + let mut buf = [0; 8]; + let buf = &mut buf[0..8 - (len % 8)]; + input.read_exact(buf)?; + if !buf.iter().all(|b| *b == 0) { + return Err(Error::BadNarPadding); + } + } + Ok(()) +} + +impl Deserialize for u64 { + fn read(input: &mut R) -> Result { + Ok(input.read_u64::()?) + } +} + +impl Deserialize for usize { + fn read(input: &mut R) -> Result { + let n: u64 = Deserialize::read(input)?; + Ok(usize::try_from(n).map_err(|_| Error::NarSizeFieldTooBig)?) + } +} diff --git a/nix-rust/src/store/binary_cache_store.rs b/nix-rust/src/store/binary_cache_store.rs new file mode 100644 index 000000000..9e1e88b7c --- /dev/null +++ b/nix-rust/src/store/binary_cache_store.rs @@ -0,0 +1,48 @@ +use super::{PathInfo, Store, StorePath}; +use crate::Error; +use hyper::client::Client; + +pub struct BinaryCacheStore { + base_uri: String, + client: Client, +} + +impl BinaryCacheStore { + pub fn new(base_uri: String) -> Self { + Self { + base_uri, + client: Client::new(), + } + } +} + +impl Store for BinaryCacheStore { + fn query_path_info( + &self, + path: &StorePath, + ) -> std::pin::Pin> + Send>> { + let uri = format!("{}/{}.narinfo", self.base_uri.clone(), path.hash); + let path = path.clone(); + let client = self.client.clone(); + let store_dir = self.store_dir().to_string(); + + Box::pin(async move { + let response = client.get(uri.parse::().unwrap()).await?; + + if response.status() == hyper::StatusCode::NOT_FOUND + || response.status() == hyper::StatusCode::FORBIDDEN + { + return Err(Error::InvalidPath(path)); + } + + let mut body = response.into_body(); + + let mut bytes = Vec::new(); + while let Some(next) = body.next().await { + bytes.extend(next?); + } + + PathInfo::parse_nar_info(std::str::from_utf8(&bytes).unwrap(), &store_dir) + }) + } +} diff --git a/nix-rust/src/store/mod.rs b/nix-rust/src/store/mod.rs new file mode 100644 index 000000000..da972482c --- /dev/null +++ b/nix-rust/src/store/mod.rs @@ -0,0 +1,17 @@ +pub mod path; + +#[cfg(unused)] +mod binary_cache_store; +#[cfg(unused)] +mod path_info; +#[cfg(unused)] +mod store; + +pub use path::{StorePath, StorePathHash, StorePathName}; + +#[cfg(unused)] +pub use binary_cache_store::BinaryCacheStore; +#[cfg(unused)] +pub use path_info::PathInfo; +#[cfg(unused)] +pub use store::Store; diff --git a/nix-rust/src/store/path.rs b/nix-rust/src/store/path.rs new file mode 100644 index 000000000..47b5975c0 --- /dev/null +++ b/nix-rust/src/store/path.rs @@ -0,0 +1,225 @@ +use crate::error::Error; +use crate::util::base32; +use std::fmt; +use std::path::Path; + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] +pub struct StorePath { + pub hash: StorePathHash, + pub name: StorePathName, +} + +pub const STORE_PATH_HASH_BYTES: usize = 20; +pub const STORE_PATH_HASH_CHARS: usize = 32; + +impl StorePath { + pub fn new(path: &Path, store_dir: &Path) -> Result { + if path.parent() != Some(store_dir) { + return Err(Error::NotInStore(path.into())); + } + Self::new_from_base_name( + path.file_name() + .ok_or(Error::BadStorePath(path.into()))? + .to_str() + .ok_or(Error::BadStorePath(path.into()))?, + ) + } + + pub fn from_parts(hash: [u8; STORE_PATH_HASH_BYTES], name: &str) -> Result { + Ok(StorePath { + hash: StorePathHash(hash), + name: StorePathName::new(name)?, + }) + } + + pub fn new_from_base_name(base_name: &str) -> Result { + if base_name.len() < STORE_PATH_HASH_CHARS + 1 + || base_name.as_bytes()[STORE_PATH_HASH_CHARS] != '-' as u8 + { + return Err(Error::BadStorePath(base_name.into())); + } + + Ok(StorePath { + hash: StorePathHash::new(&base_name[0..STORE_PATH_HASH_CHARS])?, + name: StorePathName::new(&base_name[STORE_PATH_HASH_CHARS + 1..])?, + }) + } +} + +impl fmt::Display for StorePath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}-{}", self.hash, self.name) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct StorePathHash([u8; STORE_PATH_HASH_BYTES]); + +impl StorePathHash { + pub fn new(s: &str) -> Result { + assert_eq!(s.len(), STORE_PATH_HASH_CHARS); + let v = base32::decode(s)?; + assert_eq!(v.len(), STORE_PATH_HASH_BYTES); + let mut bytes: [u8; 20] = Default::default(); + bytes.copy_from_slice(&v[0..STORE_PATH_HASH_BYTES]); + Ok(Self(bytes)) + } + + pub fn hash<'a>(&'a self) -> &'a [u8; STORE_PATH_HASH_BYTES] { + &self.0 + } +} + +impl fmt::Display for StorePathHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut buf = vec![0; STORE_PATH_HASH_CHARS]; + base32::encode_into(&self.0, &mut buf); + f.write_str(std::str::from_utf8(&buf).unwrap()) + } +} + +impl Ord for StorePathHash { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // Historically we've sorted store paths by their base32 + // serialization, but our base32 encodes bytes in reverse + // order. So compare them in reverse order as well. + self.0.iter().rev().cmp(other.0.iter().rev()) + } +} + +impl PartialOrd for StorePathHash { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] +pub struct StorePathName(String); + +impl StorePathName { + pub fn new(s: &str) -> Result { + if s.len() == 0 { + return Err(Error::StorePathNameEmpty); + } + + if s.len() > 211 { + return Err(Error::StorePathNameTooLong); + } + + if s.starts_with('.') + || !s.chars().all(|c| { + c.is_ascii_alphabetic() + || c.is_ascii_digit() + || c == '+' + || c == '-' + || c == '.' + || c == '_' + || c == '?' + || c == '=' + }) + { + return Err(Error::BadStorePathName); + } + + Ok(Self(s.to_string())) + } + + pub fn name<'a>(&'a self) -> &'a str { + &self.0 + } +} + +impl fmt::Display for StorePathName { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use assert_matches::assert_matches; + + #[test] + fn test_parse() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3"; + let p = StorePath::new_from_base_name(&s).unwrap(); + assert_eq!(p.name.0, "konsole-18.12.3"); + assert_eq!( + p.hash.0, + [ + 0x9f, 0x76, 0x49, 0x20, 0xf6, 0x5d, 0xe9, 0x71, 0xc4, 0xca, 0x46, 0x21, 0xab, 0xff, + 0x9b, 0x44, 0xef, 0x87, 0x0f, 0x3c + ] + ); + } + + #[test] + fn test_no_name() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::StorePathNameEmpty) + ); + } + + #[test] + fn test_no_dash() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::BadStorePath(_)) + ); + } + + #[test] + fn test_short_hash() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxl-konsole-18.12.3"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::BadStorePath(_)) + ); + } + + #[test] + fn test_invalid_hash() { + let s = "7h7qgvs4kgzsn8e6rb273saxyqh4jxlz-konsole-18.12.3"; + assert_matches!(StorePath::new_from_base_name(&s), Err(Error::BadBase32)); + } + + #[test] + fn test_long_name() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"; + assert_matches!(StorePath::new_from_base_name(&s), Ok(_)); + } + + #[test] + fn test_too_long_name() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::StorePathNameTooLong) + ); + } + + #[test] + fn test_bad_name() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-foo bar"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::BadStorePathName) + ); + + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-kónsole"; + assert_matches!( + StorePath::new_from_base_name(&s), + Err(Error::BadStorePathName) + ); + } + + #[test] + fn test_roundtrip() { + let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3"; + assert_eq!(StorePath::new_from_base_name(&s).unwrap().to_string(), s); + } +} diff --git a/nix-rust/src/store/path_info.rs b/nix-rust/src/store/path_info.rs new file mode 100644 index 000000000..c2903ed29 --- /dev/null +++ b/nix-rust/src/store/path_info.rs @@ -0,0 +1,70 @@ +use crate::store::StorePath; +use crate::Error; +use std::collections::BTreeSet; + +#[derive(Clone, Debug)] +pub struct PathInfo { + pub path: StorePath, + pub references: BTreeSet, + pub nar_size: u64, + pub deriver: Option, + + // Additional binary cache info. + pub url: Option, + pub compression: Option, + pub file_size: Option, +} + +impl PathInfo { + pub fn parse_nar_info(nar_info: &str, store_dir: &str) -> Result { + let mut path = None; + let mut references = BTreeSet::new(); + let mut nar_size = None; + let mut deriver = None; + let mut url = None; + let mut compression = None; + let mut file_size = None; + + for line in nar_info.lines() { + let colon = line.find(':').ok_or(Error::BadNarInfo)?; + + let (name, value) = line.split_at(colon); + + if !value.starts_with(": ") { + return Err(Error::BadNarInfo); + } + + let value = &value[2..]; + + if name == "StorePath" { + path = Some(StorePath::new(std::path::Path::new(value), store_dir)?); + } else if name == "NarSize" { + nar_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?); + } else if name == "References" { + if !value.is_empty() { + for r in value.split(' ') { + references.insert(StorePath::new_from_base_name(r)?); + } + } + } else if name == "Deriver" { + deriver = Some(StorePath::new_from_base_name(value)?); + } else if name == "URL" { + url = Some(value.into()); + } else if name == "Compression" { + compression = Some(value.into()); + } else if name == "FileSize" { + file_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?); + } + } + + Ok(PathInfo { + path: path.ok_or(Error::BadNarInfo)?, + references, + nar_size: nar_size.ok_or(Error::BadNarInfo)?, + deriver, + url: Some(url.ok_or(Error::BadNarInfo)?), + compression, + file_size, + }) + } +} diff --git a/nix-rust/src/store/store.rs b/nix-rust/src/store/store.rs new file mode 100644 index 000000000..c33dc4a90 --- /dev/null +++ b/nix-rust/src/store/store.rs @@ -0,0 +1,53 @@ +use super::{PathInfo, StorePath}; +use crate::Error; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; + +pub trait Store: Send + Sync { + fn store_dir(&self) -> &str { + "/nix/store" + } + + fn query_path_info( + &self, + store_path: &StorePath, + ) -> std::pin::Pin> + Send>>; +} + +impl dyn Store { + pub fn parse_store_path(&self, path: &Path) -> Result { + StorePath::new(path, self.store_dir()) + } + + pub async fn compute_path_closure( + &self, + roots: BTreeSet, + ) -> Result, Error> { + let mut done = BTreeSet::new(); + let mut result = BTreeMap::new(); + let mut pending = vec![]; + + for root in roots { + pending.push(self.query_path_info(&root)); + done.insert(root); + } + + while !pending.is_empty() { + let (info, _, remaining) = futures::future::select_all(pending).await; + pending = remaining; + + let info = info?; + + for path in &info.references { + if !done.contains(path) { + pending.push(self.query_path_info(&path)); + done.insert(path.clone()); + } + } + + result.insert(info.path.clone(), info); + } + + Ok(result) + } +} diff --git a/nix-rust/src/util/base32.rs b/nix-rust/src/util/base32.rs new file mode 100644 index 000000000..efd4a2901 --- /dev/null +++ b/nix-rust/src/util/base32.rs @@ -0,0 +1,160 @@ +use crate::error::Error; +use lazy_static::lazy_static; + +pub fn encoded_len(input_len: usize) -> usize { + if input_len == 0 { + 0 + } else { + (input_len * 8 - 1) / 5 + 1 + } +} + +pub fn decoded_len(input_len: usize) -> usize { + input_len * 5 / 8 +} + +static BASE32_CHARS: &'static [u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz"; + +lazy_static! { + static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = { + let mut xs = [0xffu8; 256]; + for (n, c) in BASE32_CHARS.iter().enumerate() { + xs[*c as usize] = n as u8; + } + Box::new(xs) + }; +} + +pub fn encode(input: &[u8]) -> String { + let mut buf = vec![0; encoded_len(input.len())]; + encode_into(input, &mut buf); + std::str::from_utf8(&buf).unwrap().to_string() +} + +pub fn encode_into(input: &[u8], output: &mut [u8]) { + let len = encoded_len(input.len()); + assert_eq!(len, output.len()); + + let mut nr_bits_left: usize = 0; + let mut bits_left: u16 = 0; + let mut pos = len; + + for b in input { + bits_left |= (*b as u16) << nr_bits_left; + nr_bits_left += 8; + while nr_bits_left > 5 { + output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize]; + pos -= 1; + bits_left >>= 5; + nr_bits_left -= 5; + } + } + + if nr_bits_left > 0 { + output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize]; + pos -= 1; + } + + assert_eq!(pos, 0); +} + +pub fn decode(input: &str) -> Result, crate::Error> { + let mut res = Vec::with_capacity(decoded_len(input.len())); + + let mut nr_bits_left: usize = 0; + let mut bits_left: u16 = 0; + + for c in input.chars().rev() { + let b = BASE32_CHARS_REVERSE[c as usize]; + if b == 0xff { + return Err(Error::BadBase32); + } + bits_left |= (b as u16) << nr_bits_left; + nr_bits_left += 5; + if nr_bits_left >= 8 { + res.push((bits_left & 0xff) as u8); + bits_left >>= 8; + nr_bits_left -= 8; + } + } + + if nr_bits_left > 0 && bits_left != 0 { + return Err(Error::BadBase32); + } + + Ok(res) +} + +#[cfg(test)] +mod tests { + use super::*; + use assert_matches::assert_matches; + use hex; + use proptest::proptest; + + #[test] + fn test_encode() { + assert_eq!(encode(&[]), ""); + + assert_eq!( + encode(&hex::decode("0839703786356bca59b0f4a32987eb2e6de43ae8").unwrap()), + "x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88" + ); + + assert_eq!( + encode( + &hex::decode("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + .unwrap() + ), + "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + ); + + assert_eq!( + encode( + &hex::decode("ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f") + .unwrap() + ), + "2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx" + ); + } + + #[test] + fn test_decode() { + assert_eq!(hex::encode(decode("").unwrap()), ""); + + assert_eq!( + hex::encode(decode("x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88").unwrap()), + "0839703786356bca59b0f4a32987eb2e6de43ae8" + ); + + assert_eq!( + hex::encode(decode("1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s").unwrap()), + "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" + ); + + assert_eq!( + hex::encode(decode("2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx").unwrap()), + "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f" + ); + + assert_matches!( + decode("xoxf8v9fxf3jk8zln1cwlsrmhqvp0f88"), + Err(Error::BadBase32) + ); + assert_matches!( + decode("2b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"), + Err(Error::BadBase32) + ); + assert_matches!(decode("2"), Err(Error::BadBase32)); + assert_matches!(decode("2gs"), Err(Error::BadBase32)); + assert_matches!(decode("2gs8"), Err(Error::BadBase32)); + } + + proptest! { + + #[test] + fn roundtrip(s: Vec) { + assert_eq!(s, decode(&encode(&s)).unwrap()); + } + } +} diff --git a/nix-rust/src/util/mod.rs b/nix-rust/src/util/mod.rs new file mode 100644 index 000000000..eaad9d406 --- /dev/null +++ b/nix-rust/src/util/mod.rs @@ -0,0 +1 @@ +pub mod base32; diff --git a/nix.spec.in b/nix.spec.in deleted file mode 100644 index 477768c6a..000000000 --- a/nix.spec.in +++ /dev/null @@ -1,173 +0,0 @@ -%undefine _hardened_build - -%global nixbld_user "nix-builder-" -%global nixbld_group "nixbld" - -# NOTE: BUILD on EL7 requires -# - Centos / RHEL7 software collection repository -# yum install centos-release-scl -# -# - Recent boost backport -# curl https://copr.fedorainfracloud.org/coprs/whosthere/boost/repo/epel-7/whosthere-boost-epel-7.repo -o /etc/yum.repos.d/whosthere-boost-epel-7.repo -# - -# Disable documentation generation -# necessary on some platforms -%bcond_without docgen - -Summary: The Nix software deployment system -Name: nix -Version: @PACKAGE_VERSION@ -Release: 2%{?dist} -License: LGPLv2+ -Group: Applications/System -URL: http://nixos.org/ -Source0: %{name}-%{version}.tar.bz2 - -Requires: curl -Requires: bzip2 -Requires: gzip -Requires: xz -BuildRequires: bison -BuildRequires: boost-devel >= 1.60 -BuildRequires: bzip2-devel - -# for RHEL <= 7, we need software collections for a C++14 compatible compatible compiler -%if 0%{?rhel} -BuildRequires: devtoolset-7-gcc -BuildRequires: devtoolset-7-gcc-c++ -%endif - -BuildRequires: flex -BuildRequires: libcurl-devel -BuildRequires: libseccomp-devel -BuildRequires: openssl-devel -BuildRequires: sqlite-devel -BuildRequires: xz-devel - -%description -Nix is a purely functional package manager. It allows multiple -versions of a package to be installed side-by-side, ensures that -dependency specifications are complete, supports atomic upgrades and -rollbacks, allows non-root users to install software, and has many -other features. It is the basis of the NixOS Linux distribution, but -it can be used equally well under other Unix systems. - -%package devel -Summary: Development files for %{name} -Requires: %{name}%{?_isa} = %{version}-%{release} - -%description devel -The %{name}-devel package contains libraries and header files for -developing applications that use %{name}. - - -%package doc -Summary: Documentation files for %{name} -BuildArch: noarch -Requires: %{name} = %{version}-%{release} - -%description doc -The %{name}-doc package contains documentation files for %{name}. - -%prep -%setup -q - - -%build -%if 0%{?rhel} -source /opt/rh/devtoolset-7/enable -%endif -extraFlags= -# - override docdir so large documentation files are owned by the -# -doc subpackage -# - set localstatedir by hand to the preferred nix value -%configure --localstatedir=/nix/var \ - %{!?without_docgen:--disable-doc-gen} \ - --docdir=%{_defaultdocdir}/%{name}-doc-%{version} \ - $extraFlags -make V=1 %{?_smp_mflags} - - -%install -%if 0%{?rhel} -source /opt/rh/devtoolset-7/enable -%endif - -make DESTDIR=$RPM_BUILD_ROOT install - -find $RPM_BUILD_ROOT -name '*.la' -exec rm -f {} ';' - -# make the store -mkdir -p $RPM_BUILD_ROOT/nix/store -chmod 1775 $RPM_BUILD_ROOT/nix/store - -# make per-user directories -for d in profiles gcroots; -do - mkdir -p $RPM_BUILD_ROOT/nix/var/nix/$d/per-user - chmod 1777 $RPM_BUILD_ROOT/nix/var/nix/$d/per-user -done - -# fix permission of nix profile -# (until this is fixed in the relevant Makefile) -chmod -x $RPM_BUILD_ROOT%{_sysconfdir}/profile.d/nix.sh - -# we ship this file in the base package -rm -f $RPM_BUILD_ROOT%{_defaultdocdir}/%{name}-doc-%{version}/README - -# Get rid of Upstart job. -rm -rf $RPM_BUILD_ROOT%{_sysconfdir}/init - - -%clean -rm -rf $RPM_BUILD_ROOT - - -%pre -getent group %{nixbld_group} >/dev/null || groupadd -r %{nixbld_group} -for i in $(seq 10); -do - getent passwd %{nixbld_user}$i >/dev/null || \ - useradd -r -g %{nixbld_group} -G %{nixbld_group} -d /var/empty \ - -s %{_sbindir}/nologin \ - -c "Nix build user $i" %{nixbld_user}$i -done - -%post -chgrp %{nixbld_group} /nix/store -%if ! 0%{?rhel} || 0%{?rhel} >= 7 -# Enable and start Nix worker -systemctl enable nix-daemon.socket nix-daemon.service -systemctl start nix-daemon.socket -%endif - -%files -%license COPYING -%{_bindir}/nix* -%{_libdir}/*.so -%{_prefix}/libexec/* -%if ! 0%{?rhel} || 0%{?rhel} >= 7 -%{_prefix}/lib/systemd/system/nix-daemon.socket -%{_prefix}/lib/systemd/system/nix-daemon.service -%endif -%{_datadir}/nix -#%if ! %{without docgen} -#%{_mandir}/man1/*.1* -#%{_mandir}/man5/*.5* -#%{_mandir}/man8/*.8* -#%endif -%config(noreplace) %{_sysconfdir}/profile.d/nix.sh -%config(noreplace) %{_sysconfdir}/profile.d/nix-daemon.sh -/nix - -%files devel -%{_includedir}/nix -%{_prefix}/lib/pkgconfig/*.pc - - -#%if ! %{without docgen} -#%files doc -#%docdir %{_defaultdocdir}/%{name}-doc-%{version} -#%{_defaultdocdir}/%{name}-doc-%{version} -#%endif diff --git a/perl/Makefile b/perl/Makefile index 284c75022..7ddb0cf69 100644 --- a/perl/Makefile +++ b/perl/Makefile @@ -7,8 +7,9 @@ GLOBAL_CXXFLAGS += -g -Wall OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) - GLOBAL_CFLAGS += -O3 GLOBAL_CXXFLAGS += -O3 +else + GLOBAL_CXXFLAGS += -O0 endif include mk/lib.mk diff --git a/perl/configure.ac b/perl/configure.ac index 966700695..c3769e142 100644 --- a/perl/configure.ac +++ b/perl/configure.ac @@ -6,7 +6,6 @@ CFLAGS= CXXFLAGS= AC_PROG_CC AC_PROG_CXX -AX_CXX_COMPILE_STDCXX_11 # Use 64-bit file system calls so that we can support files > 2 GiB. AC_SYS_LARGEFILE @@ -69,14 +68,15 @@ AC_SUBST(perlFlags) PKG_CHECK_MODULES([NIX], [nix-store]) -NEED_PROG([NIX_INSTANTIATE_PROGRAM], [nix-instantiate]) +NEED_PROG([NIX], [nix]) # Get nix configure values -nixbindir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixBinDir | tr -d \") -nixlibexecdir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixLibexecDir | tr -d \") -nixlocalstatedir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixLocalstateDir | tr -d \") -nixsysconfdir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixSysconfDir | tr -d \") -nixstoredir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixStoreDir | tr -d \") +export NIX_REMOTE=daemon +nixbindir=$("$NIX" --experimental-features nix-command eval --raw -f '' nixBinDir) +nixlibexecdir=$("$NIX" --experimental-features nix-command eval --raw -f '' nixLibexecDir) +nixlocalstatedir=$("$NIX" --experimental-features nix-command eval --raw -f '' nixLocalstateDir) +nixsysconfdir=$("$NIX" --experimental-features nix-command eval --raw -f '' nixSysconfDir) +nixstoredir=$("$NIX" --experimental-features nix-command eval --raw -f '' nixStoreDir) AC_SUBST(nixbindir) AC_SUBST(nixlibexecdir) AC_SUBST(nixlocalstatedir) diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in index 67a20c3f4..bc1749e60 100644 --- a/perl/lib/Nix/Config.pm.in +++ b/perl/lib/Nix/Config.pm.in @@ -11,10 +11,6 @@ $logDir = $ENV{"NIX_LOG_DIR"} || "@nixlocalstatedir@/log/nix"; $confDir = $ENV{"NIX_CONF_DIR"} || "@nixsysconfdir@/nix"; $storeDir = $ENV{"NIX_STORE_DIR"} || "@nixstoredir@"; -$bzip2 = "@bzip2@"; -$xz = "@xz@"; -$curl = "@curl@"; - $useBindings = 1; %config = (); diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index ce553bb53..fefcd5ae6 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -59,7 +59,7 @@ void setVerbosity(int level) int isValidPath(char * path) CODE: try { - RETVAL = store()->isValidPath(path); + RETVAL = store()->isValidPath(store()->parseStorePath(path)); } catch (Error & e) { croak("%s", e.what()); } @@ -70,9 +70,8 @@ int isValidPath(char * path) SV * queryReferences(char * path) PPCODE: try { - PathSet paths = store()->queryPathInfo(path)->references; - for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) - XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references) + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -81,7 +80,7 @@ SV * queryReferences(char * path) SV * queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(path)->narHash.to_string(); + auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -91,9 +90,9 @@ SV * queryPathHash(char * path) SV * queryDeriver(char * path) PPCODE: try { - auto deriver = store()->queryPathInfo(path)->deriver; - if (deriver == "") XSRETURN_UNDEF; - XPUSHs(sv_2mortal(newSVpv(deriver.c_str(), 0))); + auto info = store()->queryPathInfo(store()->parseStorePath(path)); + if (!info->deriver) XSRETURN_UNDEF; + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -102,18 +101,18 @@ SV * queryDeriver(char * path) SV * queryPathInfo(char * path, int base32) PPCODE: try { - auto info = store()->queryPathInfo(path); - if (info->deriver == "") + auto info = store()->queryPathInfo(store()->parseStorePath(path)); + if (info->deriver) XPUSHs(&PL_sv_undef); else - XPUSHs(sv_2mortal(newSVpv(info->deriver.c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); auto s = info->narHash.to_string(base32 ? Base32 : Base16); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); AV * arr = newAV(); - for (PathSet::iterator i = info->references.begin(); i != info->references.end(); ++i) - av_push(arr, newSVpv(i->c_str(), 0)); + for (auto & i : info->references) + av_push(arr, newSVpv(store()->printStorePath(i).c_str(), 0)); XPUSHs(sv_2mortal(newRV((SV *) arr))); } catch (Error & e) { croak("%s", e.what()); @@ -123,8 +122,8 @@ SV * queryPathInfo(char * path, int base32) SV * queryPathFromHashPart(char * hashPart) PPCODE: try { - Path path = store()->queryPathFromHashPart(hashPart); - XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + auto path = store()->queryPathFromHashPart(hashPart); + XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -133,11 +132,11 @@ SV * queryPathFromHashPart(char * hashPart) SV * computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { - PathSet paths; + StorePathSet paths; for (int n = 2; n < items; ++n) - store()->computeFSClosure(SvPV_nolen(ST(n)), paths, flipDirection, includeOutputs); - for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) - XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); + for (auto & i : paths) + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -146,11 +145,11 @@ SV * computeFSClosure(int flipDirection, int includeOutputs, ...) SV * topoSortPaths(...) PPCODE: try { - PathSet paths; - for (int n = 0; n < items; ++n) paths.insert(SvPV_nolen(ST(n))); - Paths sorted = store()->topoSortPaths(paths); - for (Paths::iterator i = sorted.begin(); i != sorted.end(); ++i) - XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + StorePathSet paths; + for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); + auto sorted = store()->topoSortPaths(paths); + for (auto & i : sorted) + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -159,7 +158,7 @@ SV * topoSortPaths(...) SV * followLinksToStorePath(char * path) CODE: try { - RETVAL = newSVpv(store()->followLinksToStorePath(path).c_str(), 0); + RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0); } catch (Error & e) { croak("%s", e.what()); } @@ -170,8 +169,8 @@ SV * followLinksToStorePath(char * path) void exportPaths(int fd, ...) PPCODE: try { - Paths paths; - for (int n = 1; n < items; ++n) paths.push_back(SvPV_nolen(ST(n))); + StorePathSet paths; + for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); store()->exportPaths(paths, sink); } catch (Error & e) { @@ -275,8 +274,8 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) SV * addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { - Path path = store()->addToStore(baseNameOf(srcPath), srcPath, recursive, parseHashType(algo)); - XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, recursive, parseHashType(algo)); + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -286,8 +285,8 @@ SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { Hash h(hash, parseHashType(algo)); - Path path = store()->makeFixedOutputPath(recursive, h, name); - XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + auto path = store()->makeFixedOutputPath(recursive, h, name); + XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } @@ -298,35 +297,35 @@ SV * derivationFromPath(char * drvPath) HV *hash; CODE: try { - Derivation drv = store()->derivationFromPath(drvPath); + Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath)); hash = newHV(); HV * outputs = newHV(); - for (DerivationOutputs::iterator i = drv.outputs.begin(); i != drv.outputs.end(); ++i) - hv_store(outputs, i->first.c_str(), i->first.size(), newSVpv(i->second.path.c_str(), 0), 0); + for (auto & i : drv.outputs) + hv_store(outputs, i.first.c_str(), i.first.size(), newSVpv(store()->printStorePath(i.second.path).c_str(), 0), 0); hv_stores(hash, "outputs", newRV((SV *) outputs)); AV * inputDrvs = newAV(); - for (DerivationInputs::iterator i = drv.inputDrvs.begin(); i != drv.inputDrvs.end(); ++i) - av_push(inputDrvs, newSVpv(i->first.c_str(), 0)); // !!! ignores i->second + for (auto & i : drv.inputDrvs) + av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); AV * inputSrcs = newAV(); - for (PathSet::iterator i = drv.inputSrcs.begin(); i != drv.inputSrcs.end(); ++i) - av_push(inputSrcs, newSVpv(i->c_str(), 0)); + for (auto & i : drv.inputSrcs) + av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0)); hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); hv_stores(hash, "builder", newSVpv(drv.builder.c_str(), 0)); AV * args = newAV(); - for (Strings::iterator i = drv.args.begin(); i != drv.args.end(); ++i) - av_push(args, newSVpv(i->c_str(), 0)); + for (auto & i : drv.args) + av_push(args, newSVpv(i.c_str(), 0)); hv_stores(hash, "args", newRV((SV *) args)); HV * env = newHV(); - for (StringPairs::iterator i = drv.env.begin(); i != drv.env.end(); ++i) - hv_store(env, i->first.c_str(), i->first.size(), newSVpv(i->second.c_str(), 0), 0); + for (auto & i : drv.env) + hv_store(env, i.first.c_str(), i.first.size(), newSVpv(i.second.c_str(), 0), 0); hv_stores(hash, "env", newRV((SV *) env)); RETVAL = newRV_noinc((SV *)hash); @@ -340,7 +339,7 @@ SV * derivationFromPath(char * drvPath) void addTempRoot(char * storePath) PPCODE: try { - store()->addTempRoot(storePath); + store()->addTempRoot(store()->parseStorePath(storePath)); } catch (Error & e) { croak("%s", e.what()); } diff --git a/precompiled-headers.h b/precompiled-headers.h new file mode 100644 index 000000000..e0d885b23 --- /dev/null +++ b/precompiled-headers.h @@ -0,0 +1,61 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "util.hh" +#include "args.hh" diff --git a/release-common.nix b/release-common.nix index 4ad29c9b0..6800f3165 100644 --- a/release-common.nix +++ b/release-common.nix @@ -30,9 +30,7 @@ rec { }); configureFlags = - [ - "--enable-gc" - ] ++ lib.optionals stdenv.isLinux [ + lib.optionals stdenv.isLinux [ "--with-sandbox-shell=${sh}/bin/busybox" ]; @@ -42,16 +40,19 @@ rec { libxml2 libxslt docbook5 - docbook5_xsl + docbook_xsl_ns autoconf-archive autoreconfHook ]; buildDeps = [ curl - bzip2 xz brotli editline + bzip2 xz brotli zlib editline openssl pkgconfig sqlite + libarchive boost + nlohmann_json + rustc cargo # Tests git diff --git a/release.nix b/release.nix index ab13451ff..db2b5ef05 100644 --- a/release.nix +++ b/release.nix @@ -1,5 +1,5 @@ { nix ? builtins.fetchGit ./. -, nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-19.03"; } +, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-19.09.tar.gz , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: @@ -10,6 +10,50 @@ let jobs = rec { + # Create a "vendor" directory that contains the crates listed in + # Cargo.lock, and include it in the Nix tarball. This allows Nix + # to be built without network access. + vendoredCrates = + let + lockFile = builtins.fromTOML (builtins.readFile nix-rust/Cargo.lock); + + files = map (pkg: import { + url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download"; + sha256 = lockFile.metadata."checksum ${pkg.name} ${pkg.version} (registry+https://github.com/rust-lang/crates.io-index)"; + }) (builtins.filter (pkg: pkg.source or "" == "registry+https://github.com/rust-lang/crates.io-index") lockFile.package); + + in pkgs.runCommand "cargo-vendor-dir" {} + '' + mkdir -p $out/vendor + + cat > $out/vendor/config < "$dir/.cargo-checksum.json" + + # Clean up some cruft from the winapi crates. FIXME: find + # a way to remove winapi* from our dependencies. + if [[ $dir =~ /winapi ]]; then + find $dir -name "*.a" -print0 | xargs -0 rm -f -- + fi + + mv "$dir" $out/vendor/ + + rm -rf $out/vendor/tmp + '') files)} + ''; + tarball = with pkgs; @@ -25,8 +69,6 @@ let buildInputs = tarballDeps ++ buildDeps; - configureFlags = "--enable-gc"; - postUnpack = '' (cd $sourceRoot && find . -type f) | cut -c3- > $sourceRoot/.dist-files cat $sourceRoot/.dist-files @@ -40,6 +82,8 @@ let distPhase = '' + cp -prd ${vendoredCrates}/vendor/ nix-rust/vendor/ + runHook preDist make dist mkdir -p $out/tarballs @@ -72,7 +116,12 @@ let # https://github.com/NixOS/nixpkgs/issues/45462 '' mkdir -p $out/lib - cp ${boost}/lib/libboost_context* $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib + rm -f $out/lib/*.a + ${lib.optionalString stdenv.isLinux '' + chmod u+w $out/lib/*.so.* + patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* + ''} ''; configureFlags = configureFlags ++ @@ -123,7 +172,7 @@ let in runCommand "nix-binary-tarball-${version}" - { nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; + { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; } '' @@ -165,10 +214,10 @@ let chmod +x $TMPDIR/install-systemd-multi-user.sh chmod +x $TMPDIR/install-multi-user dir=nix-${version}-${system} - fn=$out/$dir.tar.bz2 + fn=$out/$dir.tar.xz mkdir -p $out/nix-support echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products - tar cvfj $fn \ + tar cvfJ $fn \ --owner=0 --group=0 --mode=u+rw,uga+r \ --absolute-names \ --hard-dereference \ @@ -191,31 +240,26 @@ let name = "nix-build"; src = tarball; + enableParallelBuilding = true; + buildInputs = buildDeps; dontInstall = false; doInstallCheck = true; - lcovFilter = [ "*/boost/*" "*-tab.*" "*/nlohmann/*" "*/linenoise/*" ]; + lcovFilter = [ "*/boost/*" "*-tab.*" ]; # We call `dot', and even though we just use it to # syntax-check generated dot files, it still requires some # fonts. So provide those. FONTCONFIG_FILE = texFunctions.fontsConf; + + # To test building without precompiled headers. + makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ]; }; - #rpm_fedora27x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora27x86_64) [ ]; - - - #deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ]; - #deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; - - #deb_ubuntu1710i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1710i386) [ ] [ "libsodium18" ]; - #deb_ubuntu1710x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1710x86_64) [ ] [ "libsodium18" "libboost-context1.62.0" ]; - - # System tests. tests.remoteBuilds = (import ./tests/remote-builds.nix rec { inherit nixpkgs; @@ -257,7 +301,7 @@ let x86_64-linux = "${build.x86_64-linux}"; } EOF - su - alice -c 'nix upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix' + su - alice -c 'nix --experimental-features nix-command upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix' (! [ -L /home/alice/.profile-1-link ]) su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}' @@ -266,6 +310,7 @@ let umount /nix ''); # */ + /* tests.evalNixpkgs = import (nixpkgs + "/pkgs/top-level/make-tarball.nix") { inherit nixpkgs; @@ -284,6 +329,7 @@ let touch $out ''; + */ installerScript = @@ -295,7 +341,7 @@ let substitute ${./scripts/install.in} $out/install \ ${pkgs.lib.concatMapStrings - (system: "--replace '@binaryTarball_${system}@' $(nix hash-file --base16 --type sha256 ${binaryTarball.${system}}/*.tar.bz2) ") + (system: "--replace '@binaryTarball_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${binaryTarball.${system}}/*.tar.xz) ") [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] } \ --replace '@nixVersion@' ${build.x86_64-linux.src.version} @@ -321,8 +367,8 @@ let tests.remoteBuilds tests.nix-copy-closure tests.binaryTarball - tests.evalNixpkgs - tests.evalNixOS + #tests.evalNixpkgs + #tests.evalNixOS installerScript ]; }; @@ -330,55 +376,4 @@ let }; - makeRPM_i686 = makeRPM "i686-linux"; - makeRPM_x86_64 = makeRPM "x86_64-linux"; - - makeRPM = - system: diskImageFun: extraPackages: - - with import nixpkgs { inherit system; }; - - releaseTools.rpmBuild rec { - name = "nix-rpm"; - src = jobs.tarball; - diskImage = (diskImageFun vmTools.diskImageFuns) - { extraPackages = - [ "sqlite" "sqlite-devel" "bzip2-devel" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" "libsodium-devel" "boost-devel" "bison" "flex" ] - ++ extraPackages; }; - # At most 2047MB can be simulated in qemu-system-i386 - memSize = 2047; - meta.schedulingPriority = 50; - postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; - #enableParallelBuilding = true; - }; - - - makeDeb_i686 = makeDeb "i686-linux"; - makeDeb_x86_64 = makeDeb "x86_64-linux"; - - makeDeb = - system: diskImageFun: extraPackages: extraDebPackages: - - with import nixpkgs { inherit system; }; - - releaseTools.debBuild { - name = "nix-deb"; - src = jobs.tarball; - diskImage = (diskImageFun vmTools.diskImageFuns) - { extraPackages = - [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libseccomp-dev" "libsodium-dev" "libboost-all-dev" ] - ++ extraPackages; }; - memSize = 2047; - meta.schedulingPriority = 50; - postInstall = "make installcheck"; - configureFlags = "--sysconfdir=/etc"; - debRequires = - [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" "libseccomp2" ] - ++ extraDebPackages; - debMaintainer = "Eelco Dolstra "; - doInstallCheck = true; - #enableParallelBuilding = true; - }; - - in jobs diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 87c4c2b05..49076bd5c 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -39,7 +39,7 @@ EOF poly_configure_nix_daemon_service() { _sudo "to set up the nix-daemon as a LaunchDaemon" \ - ln -sfn "/nix/var/nix/profiles/default$PLIST_DEST" "$PLIST_DEST" + cp -f "/nix/var/nix/profiles/default$PLIST_DEST" "$PLIST_DEST" _sudo "to load the LaunchDaemon plist for nix-daemon" \ launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 4b65783a2..13762cba3 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -19,9 +19,6 @@ readonly BLUE_UL='\033[38;4;34m' readonly GREEN='\033[38;32m' readonly GREEN_UL='\033[38;4;32m' readonly RED='\033[38;31m' -readonly RED_UL='\033[38;4;31m' -readonly YELLOW='\033[38;33m' -readonly YELLOW_UL='\033[38;4;33m' readonly NIX_USER_COUNT="32" readonly NIX_BUILD_GROUP_ID="30000" @@ -240,10 +237,16 @@ EOF } trap finish_fail EXIT +channel_update_failed=0 function finish_success { finish_cleanup ok "Alright! We're done!" + if [ "x$channel_update_failed" = x1 ]; then + echo "" + echo "But fetching the nixpkgs channel failed. (Are you offline?)" + echo "To try again later, run \"sudo -i nix-channel --update nixpkgs\"." + fi cat < /dev/null >&2; then - failure < "$SCRATCH/nix.conf" build-users-group = $NIX_BUILD_GROUP_NAME - -max-jobs = $NIX_USER_COUNT -cores = 1 EOF _sudo "to place the default nix daemon configuration (part 2)" \ install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf @@ -753,9 +656,13 @@ main() { if [ "$(uname -s)" = "Darwin" ]; then # shellcheck source=./install-darwin-multi-user.sh . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" - elif [ "$(uname -s)" = "Linux" ] && [ -e /run/systemd/system ]; then - # shellcheck source=./install-systemd-multi-user.sh - . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" + elif [ "$(uname -s)" = "Linux" ]; then + if [ -e /run/systemd/system ]; then + # shellcheck source=./install-systemd-multi-user.sh + . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" + else + failure "Sorry, the multi-user installation requires systemd on Linux (detected using /run/systemd/system)" + fi else failure "Sorry, I don't know what to do on $(uname)" fi @@ -763,9 +670,7 @@ main() { welcome_to_nix chat_about_sudo - if [ "${ALLOW_PREEXISTING_INSTALLATION:-}" = "" ]; then - validate_starting_assumptions - fi + validate_starting_assumptions setup_report diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index fc633fa23..3fea7e056 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -12,7 +12,7 @@ if ! [ -e "$self/.reginfo" ]; then echo "$0: incomplete installer (.reginfo is missing)" >&2 fi -if [ -z "$USER" ]; then +if [ -z "$USER" ] && ! USER=$(id -u -n); then echo "$0: \$USER is not set" >&2 exit 1 fi @@ -22,10 +22,12 @@ if [ -z "$HOME" ]; then exit 1 fi -# macOS support for 10.10 or higher +# macOS support for 10.12.6 or higher if [ "$(uname -s)" = "Darwin" ]; then - if [ $(($(sw_vers -productVersion | cut -d '.' -f 2))) -lt 10 ]; then - echo "$0: macOS $(sw_vers -productVersion) is not supported, upgrade to 10.10 or higher" + macos_major=$(sw_vers -productVersion | cut -d '.' -f 2) + macos_minor=$(sw_vers -productVersion | cut -d '.' -f 3) + if [ "$macos_major" -lt 12 ] || { [ "$macos_major" -eq 12 ] && [ "$macos_minor" -lt 6 ]; }; then + echo "$0: macOS $(sw_vers -productVersion) is not supported, upgrade to 10.12.6 or higher" exit 1 fi fi @@ -100,7 +102,7 @@ for i in $(cd "$self/store" >/dev/null && echo ./*); do rm -rf "$i_tmp" fi if ! [ -e "$dest/store/$i" ]; then - cp -Rp "$self/store/$i" "$i_tmp" + cp -RPp "$self/store/$i" "$i_tmp" chmod -R a-w "$i_tmp" chmod +w "$i_tmp" mv "$i_tmp" "$dest/store/$i" @@ -132,15 +134,16 @@ if ! $nix/bin/nix-channel --list | grep -q "^nixpkgs "; then $nix/bin/nix-channel --add https://nixos.org/channels/nixpkgs-unstable fi if [ -z "$_NIX_INSTALLER_TEST" ]; then - $nix/bin/nix-channel --update nixpkgs + if ! $nix/bin/nix-channel --update nixpkgs; then + echo "Fetching the nixpkgs channel failed. (Are you offline?)" + echo "To try again later, run \"nix-channel --update nixpkgs\"." + fi fi added= +p=$HOME/.nix-profile/etc/profile.d/nix.sh if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then - # Make the shell source nix.sh during login. - p=$HOME/.nix-profile/etc/profile.d/nix.sh - for i in .bash_profile .bash_login .profile; do fn="$HOME/$i" if [ -w "$fn" ]; then @@ -152,7 +155,6 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then break fi done - fi if [ -z "$added" ]; then diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh old mode 100644 new mode 100755 index 04bc539a1..bef3ac4f9 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -9,6 +9,38 @@ readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service readonly SOCKET_SRC=/lib/systemd/system/nix-daemon.socket readonly SOCKET_DEST=/etc/systemd/system/nix-daemon.socket + +# Path for the systemd override unit file to contain the proxy settings +readonly SERVICE_OVERRIDE=${SERVICE_DEST}.d/override.conf + +create_systemd_override() { + header "Configuring proxy for the nix-daemon service" + _sudo "create directory for systemd unit override" mkdir -p "$(dirname $SERVICE_OVERRIDE)" + cat < /dev/null 2>&1 || command -v "$1" > /dev/null 2>&1 || + command -v "$1" > /dev/null 2>&1 || oops "you do not have '$1' installed, which I need to $2" } @@ -30,22 +30,21 @@ case "$(uname -s).$(uname -m)" in *) oops "sorry, there is no binary distribution of Nix for your platform";; esac -url="https://nixos.org/releases/nix/nix-@nixVersion@/nix-@nixVersion@-$system.tar.bz2" +url="https://nixos.org/releases/nix/nix-@nixVersion@/nix-@nixVersion@-$system.tar.xz" -tarball="$tmpDir/$(basename "$tmpDir/nix-@nixVersion@-$system.tar.bz2")" +tarball="$tmpDir/$(basename "$tmpDir/nix-@nixVersion@-$system.tar.xz")" require_util curl "download the binary tarball" -require_util bzcat "decompress the binary tarball" require_util tar "unpack the binary tarball" echo "downloading Nix @nixVersion@ binary tarball for $system from '$url' to '$tmpDir'..." curl -L "$url" -o "$tarball" || oops "failed to download '$url'" -if type sha256sum > /dev/null 2>&1; then +if command -v sha256sum > /dev/null 2>&1; then hash2="$(sha256sum -b "$tarball" | cut -c1-64)" -elif type shasum > /dev/null 2>&1; then +elif command -v shasum > /dev/null 2>&1; then hash2="$(shasum -a 256 -b "$tarball" | cut -c1-64)" -elif type openssl > /dev/null 2>&1; then +elif command -v openssl > /dev/null 2>&1; then hash2="$(openssl dgst -r -sha256 "$tarball" | cut -c1-64)" else oops "cannot verify the SHA-256 hash of '$url'; you need one of 'shasum', 'sha256sum', or 'openssl'" @@ -57,7 +56,7 @@ fi unpack=$tmpDir/unpack mkdir -p "$unpack" -< "$tarball" bzcat | tar -xf - -C "$unpack" || oops "failed to unpack '$url'" +tar -xJf "$tarball" -C "$unpack" || oops "failed to unpack '$url'" script=$(echo "$unpack"/*/install) diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index 6940969cc..4bc7c6fc5 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -2,54 +2,8 @@ if [ -n "${__ETC_PROFILE_NIX_SOURCED:-}" ]; then return; fi __ETC_PROFILE_NIX_SOURCED=1 -# Set up secure multi-user builds: non-root users build through the -# Nix daemon. -if [ "$USER" != root -o ! -w @localstatedir@/nix/db ]; then - export NIX_REMOTE=daemon -fi - -export NIX_USER_PROFILE_DIR="@localstatedir@/nix/profiles/per-user/$USER" export NIX_PROFILES="@localstatedir@/nix/profiles/default $HOME/.nix-profile" -# Set up the per-user profile. -mkdir -m 0755 -p $NIX_USER_PROFILE_DIR -if ! test -O "$NIX_USER_PROFILE_DIR"; then - echo "WARNING: bad ownership on $NIX_USER_PROFILE_DIR" >&2 -fi - -if test -w $HOME; then - if ! test -L $HOME/.nix-profile; then - if test "$USER" != root; then - ln -s $NIX_USER_PROFILE_DIR/profile $HOME/.nix-profile - else - # Root installs in the system-wide profile by default. - ln -s @localstatedir@/nix/profiles/default $HOME/.nix-profile - fi - fi - - # Subscribe the root user to the NixOS channel by default. - if [ "$USER" = root -a ! -e $HOME/.nix-channels ]; then - echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > $HOME/.nix-channels - fi - - # Create the per-user garbage collector roots directory. - NIX_USER_GCROOTS_DIR=@localstatedir@/nix/gcroots/per-user/$USER - mkdir -m 0755 -p $NIX_USER_GCROOTS_DIR - if ! test -O "$NIX_USER_GCROOTS_DIR"; then - echo "WARNING: bad ownership on $NIX_USER_GCROOTS_DIR" >&2 - fi - - # Set up a default Nix expression from which to install stuff. - if [ ! -e $HOME/.nix-defexpr -o -L $HOME/.nix-defexpr ]; then - rm -f $HOME/.nix-defexpr - mkdir -p $HOME/.nix-defexpr - if [ "$USER" != root ]; then - ln -s @localstatedir@/nix/profiles/per-user/root/channels $HOME/.nix-defexpr/channels_root - fi - fi -fi - - # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. if [ ! -z "${NIX_SSL_CERT_FILE:-}" ]; then : # Allow users to override the NIX_SSL_CERT_FILE @@ -70,5 +24,4 @@ else done fi -export NIX_PATH="nixpkgs=@localstatedir@/nix/profiles/per-user/root/channels/nixpkgs:@localstatedir@/nix/profiles/per-user/root/channels" export PATH="$HOME/.nix-profile/bin:@localstatedir@/nix/profiles/default/bin:$PATH" diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index db03e16ba..8cba1c522 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -1,64 +1,13 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then - __savedpath="$PATH" - export PATH=@coreutils@ # Set up the per-user profile. # This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix NIX_LINK=$HOME/.nix-profile - NIX_USER_PROFILE_DIR=@localstatedir@/nix/profiles/per-user/$USER - - mkdir -m 0755 -p "$NIX_USER_PROFILE_DIR" - - if [ "$(stat --printf '%u' "$NIX_USER_PROFILE_DIR")" != "$(id -u)" ]; then - echo "Nix: WARNING: bad ownership on "$NIX_USER_PROFILE_DIR", should be $(id -u)" >&2 - fi - - if [ -w "$HOME" ]; then - if ! [ -L "$NIX_LINK" ]; then - echo "Nix: creating $NIX_LINK" >&2 - if [ "$USER" != root ]; then - if ! ln -s "$NIX_USER_PROFILE_DIR"/profile "$NIX_LINK"; then - echo "Nix: WARNING: could not create $NIX_LINK -> $NIX_USER_PROFILE_DIR/profile" >&2 - fi - else - # Root installs in the system-wide profile by default. - ln -s @localstatedir@/nix/profiles/default "$NIX_LINK" - fi - fi - - # Subscribe the user to the unstable Nixpkgs channel by default. - if [ ! -e "$HOME/.nix-channels" ]; then - echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$HOME/.nix-channels" - fi - - # Create the per-user garbage collector roots directory. - __user_gcroots=@localstatedir@/nix/gcroots/per-user/"$USER" - mkdir -m 0755 -p "$__user_gcroots" - if [ "$(stat --printf '%u' "$__user_gcroots")" != "$(id -u)" ]; then - echo "Nix: WARNING: bad ownership on $__user_gcroots, should be $(id -u)" >&2 - fi - unset __user_gcroots - - # Set up a default Nix expression from which to install stuff. - __nix_defexpr="$HOME"/.nix-defexpr - [ -L "$__nix_defexpr" ] && rm -f "$__nix_defexpr" - mkdir -m 0755 -p "$__nix_defexpr" - if [ "$USER" != root ] && [ ! -L "$__nix_defexpr"/channels_root ]; then - ln -s @localstatedir@/nix/profiles/per-user/root/channels "$__nix_defexpr"/channels_root - fi - unset __nix_defexpr - fi - - # Append ~/.nix-defexpr/channels/nixpkgs to $NIX_PATH so that - # paths work when the user has fetched the Nixpkgs - # channel. - export NIX_PATH="${NIX_PATH:+$NIX_PATH:}nixpkgs=$HOME/.nix-defexpr/channels/nixpkgs" - # Set up environment. # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix - NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_USER_PROFILE_DIR" + export NIX_PROFILES="@localstatedir@/nix/profiles/default $HOME/.nix-profile" # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch @@ -79,6 +28,6 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then export MANPATH="$NIX_LINK/share/man:$MANPATH" fi - export PATH="$NIX_LINK/bin:$__savedpath" - unset __savedpath NIX_LINK NIX_USER_PROFILE_DIR NIX_PROFILES + export PATH="$NIX_LINK/bin:$PATH" + unset NIX_LINK fi diff --git a/shell.nix b/shell.nix index 73e75fb29..e5a2b2c91 100644 --- a/shell.nix +++ b/shell.nix @@ -1,13 +1,13 @@ { useClang ? false }: -with import (builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-19.03"; }) {}; +with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-19.09.tar.gz) {}; with import ./release-common.nix { inherit pkgs; }; (if useClang then clangStdenv else stdenv).mkDerivation { name = "nix"; - buildInputs = buildDeps ++ tarballDeps ++ perlDeps; + buildInputs = buildDeps ++ tarballDeps ++ perlDeps ++ [ pkgs.rustfmt ]; inherit configureFlags; diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 279ae62f6..69d1c6f7e 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -88,7 +88,7 @@ static int _main(int argc, char * * argv) return 0; } - string drvPath; + std::optional drvPath; string storeUri; while (true) { @@ -100,7 +100,7 @@ static int _main(int argc, char * * argv) auto amWilling = readInt(source); auto neededSystem = readString(source); - source >> drvPath; + drvPath = store->parseStorePath(readString(source)); auto requiredFeatures = readStrings>(source); auto canBuildLocally = amWilling @@ -188,7 +188,7 @@ static int _main(int argc, char * * argv) Store::Params storeParams; if (hasPrefix(bestMachine->storeUri, "ssh://")) { - storeParams["max-connections"] ="1"; + storeParams["max-connections"] = "1"; storeParams["log-fd"] = "4"; if (bestMachine->sshKey != "") storeParams["ssh-key"] = bestMachine->sshKey; @@ -236,26 +236,27 @@ connected: { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri)); - copyPaths(store, ref(sshStore), inputs, NoRepair, NoCheckSigs, substitute); + copyPaths(store, ref(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute); } uploadLock = -1; - BasicDerivation drv(readDerivation(store->realStoreDir + "/" + baseNameOf(drvPath))); - drv.inputSrcs = inputs; + BasicDerivation drv(readDerivation(*store, store->realStoreDir + "/" + std::string(drvPath->to_string()))); + drv.inputSrcs = store->parseStorePathSet(inputs); - auto result = sshStore->buildDerivation(drvPath, drv); + auto result = sshStore->buildDerivation(*drvPath, drv); if (!result.success()) - throw Error("build of '%s' on '%s' failed: %s", drvPath, storeUri, result.errorMsg); + throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); - PathSet missing; + StorePathSet missing; for (auto & path : outputs) - if (!store->isValidPath(path)) missing.insert(path); + if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path)); if (!missing.empty()) { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri)); - store->locksHeld.insert(missing.begin(), missing.end()); /* FIXME: ugly */ + for (auto & i : missing) + store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */ copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute); } diff --git a/src/cpptoml/cpptoml.h b/src/cpptoml/cpptoml.h index 07de010b1..5a00da3b4 100644 --- a/src/cpptoml/cpptoml.h +++ b/src/cpptoml/cpptoml.h @@ -4,8 +4,8 @@ * @date May 2013 */ -#ifndef _CPPTOML_H_ -#define _CPPTOML_H_ +#ifndef CPPTOML_H +#define CPPTOML_H #include #include @@ -84,11 +84,12 @@ class option return &value_; } - const T& value_or(const T& alternative) const + template + T value_or(U&& alternative) const { if (!empty_) return value_; - return alternative; + return static_cast(std::forward(alternative)); } private: @@ -295,13 +296,12 @@ struct valid_value_or_string_convertible }; template -struct value_traits:: - value>::type> +struct value_traits::value>::type> { - using value_type = typename std:: - conditional::type>::value, - typename std::decay::type, std::string>::type; + using value_type = typename std::conditional< + valid_value::type>::value, + typename std::decay::type, std::string>::type; using type = value; @@ -312,12 +312,11 @@ struct value_traits -struct value_traits::value - && std::is_floating_point< - typename std::decay::type>::value>:: - type> +struct value_traits< + T, + typename std::enable_if< + !valid_value_or_string_convertible::value + && std::is_floating_point::type>::value>::type> { using value_type = typename std::decay::type; @@ -330,11 +329,11 @@ struct value_traits -struct value_traits::value - && std::is_signed:: - type>::value>::type> +struct value_traits< + T, typename std::enable_if< + !valid_value_or_string_convertible::value + && !std::is_floating_point::type>::value + && std::is_signed::type>::value>::type> { using value_type = int64_t; @@ -356,11 +355,10 @@ struct value_traits -struct value_traits::value - && std::is_unsigned:: - type>::value>::type> +struct value_traits< + T, typename std::enable_if< + !valid_value_or_string_convertible::value + && std::is_unsigned::type>::value>::type> { using value_type = int64_t; @@ -395,10 +393,15 @@ struct array_of_trait template inline std::shared_ptr::type> make_value(T&& val); inline std::shared_ptr make_array(); + +namespace detail +{ template inline std::shared_ptr make_element(); +} + inline std::shared_ptr make_table(); -inline std::shared_ptr make_table_array(); +inline std::shared_ptr make_table_array(bool is_inline = false); #if defined(CPPTOML_NO_RTTI) /// Base type used to store underlying data type explicitly if RTTI is disabled @@ -576,7 +579,7 @@ class base : public std::enable_shared_from_this #if defined(CPPTOML_NO_RTTI) base_type type() const { - return type_; + return type_; } protected: @@ -698,7 +701,7 @@ inline std::shared_ptr> base::as() if (type() == base_type::INT) { auto v = std::static_pointer_cast>(shared_from_this()); - return make_value(static_cast(v->get()));; + return make_value(static_cast(v->get())); } #else if (auto v = std::dynamic_pointer_cast>(shared_from_this())) @@ -731,7 +734,8 @@ inline std::shared_ptr> base::as() const { #if defined(CPPTOML_NO_RTTI) if (type() == base_type::FLOAT) - return std::static_pointer_cast>(shared_from_this()); + return std::static_pointer_cast>( + shared_from_this()); if (type() == base_type::INT) { @@ -1027,11 +1031,14 @@ inline std::shared_ptr make_array() return std::make_shared(); } +namespace detail +{ template <> inline std::shared_ptr make_element() { return make_array(); } +} // namespace detail /** * Obtains a option>. The option will be empty if the array @@ -1060,7 +1067,7 @@ class table; class table_array : public base { friend class table; - friend std::shared_ptr make_table_array(); + friend std::shared_ptr make_table_array(bool); public: std::shared_ptr clone() const override; @@ -1152,14 +1159,25 @@ class table_array : public base array_.reserve(n); } + /** + * Whether or not the table array is declared inline. This mostly + * matters for parsing, where statically defined arrays cannot be + * appended to using the array-of-table syntax. + */ + bool is_inline() const + { + return is_inline_; + } + private: #if defined(CPPTOML_NO_RTTI) - table_array() : base(base_type::TABLE_ARRAY) + table_array(bool is_inline = false) + : base(base_type::TABLE_ARRAY), is_inline_(is_inline) { // nothing } #else - table_array() + table_array(bool is_inline = false) : is_inline_(is_inline) { // nothing } @@ -1169,26 +1187,30 @@ class table_array : public base table_array& operator=(const table_array& rhs) = delete; std::vector> array_; + const bool is_inline_ = false; }; -inline std::shared_ptr make_table_array() +inline std::shared_ptr make_table_array(bool is_inline) { struct make_shared_enabler : public table_array { - make_shared_enabler() + make_shared_enabler(bool mse_is_inline) : table_array(mse_is_inline) { // nothing } }; - return std::make_shared(); + return std::make_shared(is_inline); } +namespace detail +{ template <> inline std::shared_ptr make_element() { - return make_table_array(); + return make_table_array(true); } +} // namespace detail // The below are overloads for fetching specific value types out of a value // where special casting behavior (like bounds checking) is desired @@ -1679,11 +1701,14 @@ std::shared_ptr
make_table() return std::make_shared(); } +namespace detail +{ template <> inline std::shared_ptr
make_element
() { return make_table(); } +} // namespace detail template std::shared_ptr value::clone() const @@ -1702,7 +1727,7 @@ inline std::shared_ptr array::clone() const inline std::shared_ptr table_array::clone() const { - auto result = make_table_array(); + auto result = make_table_array(is_inline()); result->reserve(array_.size()); for (const auto& ptr : array_) result->array_.push_back(ptr->clone()->as_table()); @@ -1738,6 +1763,11 @@ inline bool is_number(char c) return c >= '0' && c <= '9'; } +inline bool is_hex(char c) +{ + return is_number(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); +} + /** * Helper object for consuming expected characters. */ @@ -1766,6 +1796,13 @@ class consumer [&](char c) { (*this)(c); }); } + void eat_or(char a, char b) + { + if (it_ == end_ || (*it_ != a && *it_ != b)) + on_error_(); + ++it_; + } + int eat_digits(int len) { int val = 0; @@ -1830,7 +1867,7 @@ inline std::istream& getline(std::istream& input, std::string& line) line.push_back(static_cast(c)); } } -} +} // namespace detail /** * The parser class. @@ -1914,21 +1951,25 @@ class parser std::string full_table_name; bool inserted = false; - while (it != end && *it != ']') - { - auto part = parse_key(it, end, - [](char c) { return c == '.' || c == ']'; }); + auto key_end = [](char c) { return c == ']'; }; + + auto key_part_handler = [&](const std::string& part) { if (part.empty()) throw_parse_exception("Empty component of table name"); if (!full_table_name.empty()) - full_table_name += "."; + full_table_name += '.'; full_table_name += part; if (curr_table->contains(part)) { +#if !defined(__PGI) auto b = curr_table->get(part); +#else + // Workaround for PGI compiler + std::shared_ptr b = curr_table->get(part); +#endif if (b->is_table()) curr_table = static_cast(b.get()); else if (b->is_table_array()) @@ -1946,16 +1987,23 @@ class parser curr_table->insert(part, make_table()); curr_table = static_cast(curr_table->get(part).get()); } - consume_whitespace(it, end); - if (it != end && *it == '.') - ++it; - consume_whitespace(it, end); - } + }; + + key_part_handler(parse_key(it, end, key_end, key_part_handler)); if (it == end) throw_parse_exception( "Unterminated table declaration; did you forget a ']'?"); + if (*it != ']') + { + std::string errmsg{"Unexpected character in table definition: "}; + errmsg += '"'; + errmsg += *it; + errmsg += '"'; + throw_parse_exception(errmsg); + } + // table already existed if (!inserted) { @@ -1969,8 +2017,9 @@ class parser // since it has already been defined. If there aren't any // values, then it was implicitly created by something like // [a.b] - if (curr_table->empty() || std::any_of(curr_table->begin(), - curr_table->end(), is_value)) + if (curr_table->empty() + || std::any_of(curr_table->begin(), curr_table->end(), + is_value)) { throw_parse_exception("Redefinition of table " + full_table_name); @@ -1989,36 +2038,45 @@ class parser if (it == end || *it == ']') throw_parse_exception("Table array name cannot be empty"); - std::string full_ta_name; - while (it != end && *it != ']') - { - auto part = parse_key(it, end, - [](char c) { return c == '.' || c == ']'; }); + auto key_end = [](char c) { return c == ']'; }; + std::string full_ta_name; + auto key_part_handler = [&](const std::string& part) { if (part.empty()) throw_parse_exception("Empty component of table array name"); if (!full_ta_name.empty()) - full_ta_name += "."; + full_ta_name += '.'; full_ta_name += part; - consume_whitespace(it, end); - if (it != end && *it == '.') - ++it; - consume_whitespace(it, end); - if (curr_table->contains(part)) { +#if !defined(__PGI) auto b = curr_table->get(part); +#else + // Workaround for PGI compiler + std::shared_ptr b = curr_table->get(part); +#endif // if this is the end of the table array name, add an - // element to the table array that we just looked up + // element to the table array that we just looked up, + // provided it was not declared inline if (it != end && *it == ']') { if (!b->is_table_array()) + { throw_parse_exception("Key " + full_ta_name + " is not a table array"); + } + auto v = b->as_table_array(); + + if (v->is_inline()) + { + throw_parse_exception("Static array " + full_ta_name + + " cannot be appended to"); + } + v->get().push_back(make_table()); curr_table = v->get().back().get(); } @@ -2059,15 +2117,16 @@ class parser = static_cast(curr_table->get(part).get()); } } - } + }; + + key_part_handler(parse_key(it, end, key_end, key_part_handler)); // consume the last "]]" - if (it == end) + auto eat = make_consumer(it, end, [this]() { throw_parse_exception("Unterminated table array name"); - ++it; - if (it == end) - throw_parse_exception("Unterminated table array name"); - ++it; + }); + eat(']'); + eat(']'); consume_whitespace(it, end); eol_or_comment(it, end); @@ -2076,7 +2135,35 @@ class parser void parse_key_value(std::string::iterator& it, std::string::iterator& end, table* curr_table) { - auto key = parse_key(it, end, [](char c) { return c == '='; }); + auto key_end = [](char c) { return c == '='; }; + + auto key_part_handler = [&](const std::string& part) { + // two cases: this key part exists already, in which case it must + // be a table, or it doesn't exist in which case we must create + // an implicitly defined table + if (curr_table->contains(part)) + { + auto val = curr_table->get(part); + if (val->is_table()) + { + curr_table = static_cast(val.get()); + } + else + { + throw_parse_exception("Key " + part + + " already exists as a value"); + } + } + else + { + auto newtable = make_table(); + curr_table->insert(part, newtable); + curr_table = newtable.get(); + } + }; + + auto key = parse_key(it, end, key_end, key_part_handler); + if (curr_table->contains(key)) throw_parse_exception("Key " + key + " already present"); if (it == end || *it != '=') @@ -2087,18 +2174,57 @@ class parser consume_whitespace(it, end); } - template - std::string parse_key(std::string::iterator& it, - const std::string::iterator& end, Function&& fun) + template + std::string + parse_key(std::string::iterator& it, const std::string::iterator& end, + KeyEndFinder&& key_end, KeyPartHandler&& key_part_handler) + { + // parse the key as a series of one or more simple-keys joined with '.' + while (it != end && !key_end(*it)) + { + auto part = parse_simple_key(it, end); + consume_whitespace(it, end); + + if (it == end || key_end(*it)) + { + return part; + } + + if (*it != '.') + { + std::string errmsg{"Unexpected character in key: "}; + errmsg += '"'; + errmsg += *it; + errmsg += '"'; + throw_parse_exception(errmsg); + } + + key_part_handler(part); + + // consume the dot + ++it; + } + + throw_parse_exception("Unexpected end of key"); + } + + std::string parse_simple_key(std::string::iterator& it, + const std::string::iterator& end) { consume_whitespace(it, end); - if (*it == '"') + + if (it == end) + throw_parse_exception("Unexpected end of key (blank key?)"); + + if (*it == '"' || *it == '\'') { - return parse_quoted_key(it, end); + return string_literal(it, end, *it); } else { - auto bke = std::find_if(it, end, std::forward(fun)); + auto bke = std::find_if(it, end, [](char c) { + return c == '.' || c == '=' || c == ']'; + }); return parse_bare_key(it, bke); } } @@ -2142,12 +2268,6 @@ class parser return key; } - std::string parse_quoted_key(std::string::iterator& it, - const std::string::iterator& end) - { - return string_literal(it, end, '"'); - } - enum class parse_type { STRING = 1, @@ -2193,7 +2313,7 @@ class parser parse_type determine_value_type(const std::string::iterator& it, const std::string::iterator& end) { - if(it == end) + if (it == end) { throw_parse_exception("Failed to parse value type"); } @@ -2209,7 +2329,11 @@ class parser { return *dtype; } - else if (is_number(*it) || *it == '-' || *it == '+') + else if (is_number(*it) || *it == '-' || *it == '+' + || (*it == 'i' && it + 1 != end && it[1] == 'n' + && it + 2 != end && it[2] == 'f') + || (*it == 'n' && it + 1 != end && it[1] == 'a' + && it + 2 != end && it[2] == 'n')) { return determine_number_type(it, end); } @@ -2235,6 +2359,13 @@ class parser auto check_it = it; if (*check_it == '-' || *check_it == '+') ++check_it; + + if (check_it == end) + throw_parse_exception("Malformed number"); + + if (*check_it == 'i' || *check_it == 'n') + return parse_type::FLOAT; + while (check_it != end && is_number(*check_it)) ++check_it; if (check_it != end && *check_it == '.') @@ -2283,57 +2414,56 @@ class parser bool consuming = false; std::shared_ptr> ret; - auto handle_line - = [&](std::string::iterator& local_it, - std::string::iterator& local_end) { - if (consuming) - { - local_it = std::find_if_not(local_it, local_end, is_ws); + auto handle_line = [&](std::string::iterator& local_it, + std::string::iterator& local_end) { + if (consuming) + { + local_it = std::find_if_not(local_it, local_end, is_ws); - // whole line is whitespace - if (local_it == local_end) - return; - } + // whole line is whitespace + if (local_it == local_end) + return; + } - consuming = false; + consuming = false; - while (local_it != local_end) - { - // handle escaped characters - if (delim == '"' && *local_it == '\\') - { - auto check = local_it; - // check if this is an actual escape sequence or a - // whitespace escaping backslash - ++check; - consume_whitespace(check, local_end); - if (check == local_end) - { - consuming = true; - break; - } + while (local_it != local_end) + { + // handle escaped characters + if (delim == '"' && *local_it == '\\') + { + auto check = local_it; + // check if this is an actual escape sequence or a + // whitespace escaping backslash + ++check; + consume_whitespace(check, local_end); + if (check == local_end) + { + consuming = true; + break; + } - ss << parse_escape_code(local_it, local_end); - continue; - } + ss << parse_escape_code(local_it, local_end); + continue; + } - // if we can end the string - if (std::distance(local_it, local_end) >= 3) - { - auto check = local_it; - // check for """ - if (*check++ == delim && *check++ == delim - && *check++ == delim) - { - local_it = check; - ret = make_value(ss.str()); - break; - } - } + // if we can end the string + if (std::distance(local_it, local_end) >= 3) + { + auto check = local_it; + // check for """ + if (*check++ == delim && *check++ == delim + && *check++ == delim) + { + local_it = check; + ret = make_value(ss.str()); + break; + } + } - ss << *local_it++; - } - }; + ss << *local_it++; + } + }; // handle the remainder of the current line handle_line(it, end); @@ -2514,17 +2644,13 @@ class parser return value; } - bool is_hex(char c) - { - return is_number(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); - } - uint32_t hex_to_digit(char c) { if (is_number(c)) return static_cast(c - '0'); - return 10 + static_cast( - c - ((c >= 'a' && c <= 'f') ? 'a' : 'A')); + return 10 + + static_cast(c + - ((c >= 'a' && c <= 'f') ? 'a' : 'A')); } std::shared_ptr parse_number(std::string::iterator& it, @@ -2538,25 +2664,6 @@ class parser ++check_it; }; - eat_sign(); - - auto eat_numbers = [&]() { - auto beg = check_it; - while (check_it != end && is_number(*check_it)) - { - ++check_it; - if (check_it != end && *check_it == '_') - { - ++check_it; - if (check_it == end || !is_number(*check_it)) - throw_parse_exception("Malformed number"); - } - } - - if (check_it == beg) - throw_parse_exception("Malformed number"); - }; - auto check_no_leading_zero = [&]() { if (check_it != end && *check_it == '0' && check_it + 1 != check_end && check_it[1] != '.') @@ -2565,7 +2672,80 @@ class parser } }; + auto eat_digits = [&](bool (*check_char)(char)) { + auto beg = check_it; + while (check_it != end && check_char(*check_it)) + { + ++check_it; + if (check_it != end && *check_it == '_') + { + ++check_it; + if (check_it == end || !check_char(*check_it)) + throw_parse_exception("Malformed number"); + } + } + + if (check_it == beg) + throw_parse_exception("Malformed number"); + }; + + auto eat_hex = [&]() { eat_digits(&is_hex); }; + + auto eat_numbers = [&]() { eat_digits(&is_number); }; + + if (check_it != end && *check_it == '0' && check_it + 1 != check_end + && (check_it[1] == 'x' || check_it[1] == 'o' || check_it[1] == 'b')) + { + ++check_it; + char base = *check_it; + ++check_it; + if (base == 'x') + { + eat_hex(); + return parse_int(it, check_it, 16); + } + else if (base == 'o') + { + auto start = check_it; + eat_numbers(); + auto val = parse_int(start, check_it, 8, "0"); + it = start; + return val; + } + else // if (base == 'b') + { + auto start = check_it; + eat_numbers(); + auto val = parse_int(start, check_it, 2); + it = start; + return val; + } + } + + eat_sign(); check_no_leading_zero(); + + if (check_it != end && check_it + 1 != end && check_it + 2 != end) + { + if (check_it[0] == 'i' && check_it[1] == 'n' && check_it[2] == 'f') + { + auto val = std::numeric_limits::infinity(); + if (*it == '-') + val = -val; + it = check_it + 3; + return make_value(val); + } + else if (check_it[0] == 'n' && check_it[1] == 'a' + && check_it[2] == 'n') + { + auto val = std::numeric_limits::quiet_NaN(); + if (*it == '-') + val = -val; + it = check_it + 3; + return make_value(val); + } + } + eat_numbers(); if (check_it != end @@ -2604,14 +2784,17 @@ class parser } std::shared_ptr> parse_int(std::string::iterator& it, - const std::string::iterator& end) + const std::string::iterator& end, + int base = 10, + const char* prefix = "") { std::string v{it, end}; + v = prefix + v; v.erase(std::remove(v.begin(), v.end(), '_'), v.end()); it = end; try { - return make_value(std::stoll(v)); + return make_value(std::stoll(v, nullptr, base)); } catch (const std::invalid_argument& ex) { @@ -2674,18 +2857,33 @@ class parser std::string::iterator find_end_of_number(std::string::iterator it, std::string::iterator end) { - return std::find_if(it, end, [](char c) { + auto ret = std::find_if(it, end, [](char c) { return !is_number(c) && c != '_' && c != '.' && c != 'e' && c != 'E' - && c != '-' && c != '+'; + && c != '-' && c != '+' && c != 'x' && c != 'o' && c != 'b'; }); + if (ret != end && ret + 1 != end && ret + 2 != end) + { + if ((ret[0] == 'i' && ret[1] == 'n' && ret[2] == 'f') + || (ret[0] == 'n' && ret[1] == 'a' && ret[2] == 'n')) + { + ret = ret + 3; + } + } + return ret; } std::string::iterator find_end_of_date(std::string::iterator it, std::string::iterator end) { - return std::find_if(it, end, [](char c) { - return !is_number(c) && c != 'T' && c != 'Z' && c != ':' && c != '-' - && c != '+' && c != '.'; + auto end_of_date = std::find_if(it, end, [](char c) { + return !is_number(c) && c != '-'; + }); + if (end_of_date != end && *end_of_date == ' ' && end_of_date + 1 != end + && is_number(end_of_date[1])) + end_of_date++; + return std::find_if(end_of_date, end, [](char c) { + return !is_number(c) && c != 'T' && c != 'Z' && c != ':' + && c != '-' && c != '+' && c != '.'; }); } @@ -2754,7 +2952,7 @@ class parser if (it == date_end) return make_value(ldate); - eat('T'); + eat.eat_or('T', ' '); local_datetime ldt; static_cast(ldt) = ldate; @@ -2850,9 +3048,9 @@ class parser auto arr = make_array(); while (it != end && *it != ']') { - auto value = parse_value(it, end); - if (auto v = value->as()) - arr->get().push_back(value); + auto val = parse_value(it, end); + if (auto v = val->as()) + arr->get().push_back(val); else throw_parse_exception("Arrays must be homogeneous"); skip_whitespace_and_comments(it, end); @@ -2871,7 +3069,7 @@ class parser std::string::iterator& it, std::string::iterator& end) { - auto arr = make_element(); + auto arr = detail::make_element(); while (it != end && *it != ']') { @@ -2881,7 +3079,7 @@ class parser arr->get().push_back(((*this).*fun)(it, end)); skip_whitespace_and_comments(it, end); - if (*it != ',') + if (it == end || *it != ',') break; ++it; @@ -2906,8 +3104,11 @@ class parser throw_parse_exception("Unterminated inline table"); consume_whitespace(it, end); - parse_key_value(it, end, tbl.get()); - consume_whitespace(it, end); + if (it != end && *it != '}') + { + parse_key_value(it, end, tbl.get()); + consume_whitespace(it, end); + } } while (*it == ','); if (it == end || *it != '}') @@ -2987,7 +3188,8 @@ class parser if (it[4] != '-' || it[7] != '-') return {}; - if (len >= 19 && it[10] == 'T' && is_time(it + 11, date_end)) + if (len >= 19 && (it[10] == 'T' || it[10] == ' ') + && is_time(it + 11, date_end)) { // datetime type auto time_end = find_end_of_time(it + 11, date_end); @@ -3243,7 +3445,7 @@ class toml_writer { res += "\\\\"; } - else if ((const uint32_t)*it <= 0x001f) + else if (static_cast(*it) <= UINT32_C(0x001f)) { res += "\\u"; std::stringstream ss; @@ -3274,12 +3476,21 @@ class toml_writer */ void write(const value& v) { - std::ios::fmtflags flags{stream_.flags()}; + std::stringstream ss; + ss << std::showpoint + << std::setprecision(std::numeric_limits::max_digits10) + << v.get(); - stream_ << std::showpoint; - write(v.get()); + auto double_str = ss.str(); + auto pos = double_str.find("e0"); + if (pos != std::string::npos) + double_str.replace(pos, 2, "e"); + pos = double_str.find("e-0"); + if (pos != std::string::npos) + double_str.replace(pos, 3, "e-"); - stream_.flags(flags); + stream_ << double_str; + has_naked_endline_ = false; } /** @@ -3287,9 +3498,9 @@ class toml_writer * offset_datetime. */ template - typename std::enable_if::value>::type + typename std::enable_if< + is_one_of::value>::type write(const value& v) { write(v.get()); @@ -3453,5 +3664,5 @@ inline std::ostream& operator<<(std::ostream& stream, const array& a) a.accept(writer); return stream; } -} -#endif +} // namespace cpptoml +#endif // CPPTOML_H diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index fde7b76b9..1b6a60da7 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -93,4 +93,35 @@ Ptr findAlongAttrPath(EvalState & state, const string & attrPath, } +Pos findDerivationFilename(EvalState & state, Value & v, std::string what) +{ + Value * v2; + try { + v2 = findAlongAttrPath(state, "meta.position", *state.emptyBindings, v); + } catch (Error &) { + throw Error("package '%s' has no source location information", what); + } + + // FIXME: is it possible to extract the Pos object instead of doing this + // toString + parsing? + auto pos = state.forceString(*v2); + + auto colon = pos.rfind(':'); + if (colon == std::string::npos) + throw Error("cannot parse meta.position attribute '%s'", pos); + + std::string filename(pos, 0, colon); + unsigned int lineno; + try { + lineno = std::stoi(std::string(pos, colon + 1)); + } catch (std::invalid_argument & e) { + throw Error("cannot parse line number '%s'", pos); + } + + Symbol file = state.symbols.create(filename); + + return { file, lineno, 0 }; +} + + } diff --git a/src/libexpr/attr-path.hh b/src/libexpr/attr-path.hh index c82b3672d..8c4cc75d7 100644 --- a/src/libexpr/attr-path.hh +++ b/src/libexpr/attr-path.hh @@ -10,4 +10,7 @@ namespace nix { Ptr findAlongAttrPath(EvalState & state, const string & attrPath, Bindings & autoArgs, Value & vIn); +/* Heuristic to find the filename and lineno or a nix value. */ +Pos findDerivationFilename(EvalState & state, Value & v, std::string what); + } diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index 0321cf5c2..ccb1ab9d1 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -77,6 +77,14 @@ public: return {}; } + Attr & need(const Symbol & name, const Pos & pos = noPos) + { + auto a = get(name); + if (!a) + throw Error("attribute '%s' missing, at %s", name, pos); + return **a; + } + iterator begin() { return &attrs[0]; } iterator end() { return &attrs[size_]; } diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc index d042f8349..e804746de 100644 --- a/src/libexpr/common-eval-args.cc +++ b/src/libexpr/common-eval-args.cc @@ -45,9 +45,11 @@ Ptr MixEvalArgs::getAutoArgs(EvalState & state) Path lookupFileArg(EvalState & state, string s) { - if (isUri(s)) - return getDownloader()->downloadCached(state.store, s, true); - else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { + if (isUri(s)) { + CachedDownloadRequest request(s); + request.unpack = true; + return getDownloader()->downloadCached(state.store, request).path; + } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { Path p = s.substr(1, s.size() - 2); return state.findFile(p); } else diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7652662d3..1d70b01d1 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -7,8 +7,10 @@ #include "eval-inline.hh" #include "download.hh" #include "json.hh" +#include "function-trace.hh" #include +#include #include #include #include @@ -16,7 +18,6 @@ #include #include -#include #include namespace nix { @@ -28,11 +29,10 @@ static void printValue(std::ostream & str, std::set & active, con { checkInterrupt(); - if (active.find(&v) != active.end()) { + if (!active.insert(&v).second) { str << ""; return; } - active.insert(&v); switch (v.type) { case tInt: @@ -239,15 +239,14 @@ EvalState::EvalState(const Strings & _searchPath, ref store) , baseEnv(allocEnv(128)) , staticBaseEnv(false, 0) { - countCalls = getEnv("NIX_COUNT_CALLS", "0") != "0"; + countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0"; static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { - Strings paths = parseNixPath(getEnv("NIX_PATH", "")); for (auto & i : _searchPath) addToSearchPath(i); - for (auto & i : paths) addToSearchPath(i); + for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i); } addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true)); @@ -261,10 +260,10 @@ EvalState::EvalState(const Strings & _searchPath, ref store) auto path = r.second; if (store->isInStore(r.second)) { - PathSet closure; - store->computeFSClosure(store->toStorePath(r.second), closure); + StorePathSet closure; + store->computeFSClosure(store->parseStorePath(store->toStorePath(r.second)), closure); for (auto & path : closure) - allowedPaths->insert(path); + allowedPaths->insert(store->printStorePath(path)); } else allowedPaths->insert(r.second); } @@ -361,7 +360,7 @@ Path EvalState::toRealPath(const Path & path, const PathSet & context) !context.empty() && store->isInStore(path) ? store->toRealPath(path) : path; -}; +} Value * EvalState::addConstant(const string & name, Value & v) @@ -447,9 +446,9 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const ExprLambda & fun throw TypeError(format(s) % fun.showNamePos() % s2 % pos); } -LocalNoInlineNoReturn(void throwAssertionError(const char * s, const Pos & pos)) +LocalNoInlineNoReturn(void throwAssertionError(const char * s, const string & s1, const Pos & pos)) { - throw AssertionError(format(s) % pos); + throw AssertionError(format(s) % s1 % pos); } LocalNoInlineNoReturn(void throwUndefinedVarError(const char * s, const string & s1, const Pos & pos)) @@ -473,13 +472,13 @@ LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2, } -Value & mkString(Value & v, const string & s, const PathSet & context) +Value & mkString(Value & v, std::string_view s, const PathSet & context) { if (context.empty()) - mkString(v, s.c_str()); + mkString(v, s); else { - v.string.s = String::alloc(s.c_str()); - v.string.context = nullptr; + v.string.s = gc.alloc(String::wordsFor(s.size()), s.size(), s.data()); + v.string.context = 0; v.type = tLongString; v.setContext(context); } @@ -780,7 +779,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) if (hasOverrides) { Value * vOverrides = (*v.attrs)[overrides->second.displ].value; state.forceAttrs(*vOverrides); - Bindings * newBnds = Bindings::allocBindings(v.attrs->size() + vOverrides->attrs->size()); + auto newBnds = Bindings::allocBindings(v.attrs->capacity() + vOverrides->attrs->size()); for (auto & i : *v.attrs) newBnds->push_back(i); for (auto & i : *vOverrides->attrs) { @@ -996,9 +995,10 @@ void EvalState::callPrimOp(Value & fun, Value & arg, Value & v, const Pos & pos) } } - void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & pos) { + auto trace = evalSettings.traceFunctionCalls ? std::make_unique(pos) : nullptr; + forceValue(fun, pos); if (fun.type == tPrimOp || fun.type == tPrimOpApp) { @@ -1150,8 +1150,11 @@ void ExprIf::eval(EvalState & state, Env & env, Value & v) void ExprAssert::eval(EvalState & state, Env & env, Value & v) { - if (!state.evalBool(env, cond, pos)) - throwAssertionError("assertion failed at %1%", pos); + if (!state.evalBool(env, cond, pos)) { + std::ostringstream out; + cond->show(out); + throwAssertionError("assertion '%1%' failed at %2%", out.str(), pos); + } body->eval(state, env, v); } @@ -1343,8 +1346,7 @@ void EvalState::forceValueDeep(Value & v) std::function recurse; recurse = [&](Value & v) { - if (seen.find(&v) != seen.end()) return; - seen.insert(&v); + if (!seen.insert(&v).second) return; forceValue(v); @@ -1460,6 +1462,19 @@ bool EvalState::isDerivation(Value & v) } +std::optional EvalState::tryAttrsToString(const Pos & pos, Value & v, + PathSet & context, bool coerceMore, bool copyToStore) +{ + auto i = v.attrs->find(sToString); + if (i != v.attrs->end()) { + auto v1 = allocValue(); + callFunction(*i->value, v, v1, pos); + return coerceToString(pos, v1, context, coerceMore, copyToStore); + } + + return {}; +} + string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context, bool coerceMore, bool copyToStore) { @@ -1478,13 +1493,11 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context, } if (v.type == tAttrs) { - auto i = v.attrs->find(sToString); - if (i != v.attrs->end()) { - auto v1 = allocValue(); - callFunction(*i->value, v, v1, pos); - return coerceToString(pos, v1, context, coerceMore, copyToStore); + auto maybeString = tryAttrsToString(pos, v, context, coerceMore, copyToStore); + if (maybeString) { + return *maybeString; } - i = v.attrs->find(sOutPath); + auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) throwTypeError("cannot coerce a set to a string, at %1%", pos); return coerceToString(pos, *i->value, context, coerceMore, copyToStore); } @@ -1528,15 +1541,16 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path) throwEvalError("file names are not allowed to end in '%1%'", drvExtension); Path dstPath; - if (srcToStore[path] != "") - dstPath = srcToStore[path]; + auto i = srcToStore.find(path); + if (i != srcToStore.end()) + dstPath = store->printStorePath(i->second); else { - dstPath = settings.readOnlyMode - ? store->computeStorePathForPath(baseNameOf(path), checkSourcePath(path)).first - : store->addToStore(baseNameOf(path), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair); - srcToStore[path] = dstPath; - printMsg(lvlChatty, format("copied source '%1%' -> '%2%'") - % path % dstPath); + auto p = settings.readOnlyMode + ? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first + : store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair); + dstPath = store->printStorePath(p); + srcToStore.insert_or_assign(path, std::move(p)); + printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath); } context.insert(dstPath); @@ -1640,7 +1654,7 @@ bool EvalState::eqValues(Value & v1, Value & v2) void EvalState::printStats() { - bool showStats = getEnv("NIX_SHOW_STATS", "0") != "0"; + bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0"; struct rusage buf; getrusage(RUSAGE_SELF, &buf); @@ -1652,7 +1666,7 @@ void EvalState::printStats() uint64_t bAttrsets = nrAttrsets * sizeof(Bindings) + nrAttrsInAttrsets * sizeof(Attr); if (showStats) { - auto outPath = getEnv("NIX_SHOW_STATS_PATH","-"); + auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-"); std::fstream fs; if (outPath != "-") fs.open(outPath, std::fstream::out); @@ -1744,7 +1758,7 @@ void EvalState::printStats() } } - if (getEnv("NIX_SHOW_SYMBOLS", "0") != "0") { + if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") { auto list = topObj.list("symbols"); symbols.dump([&](const std::string & s) { list.elem(s); }); } @@ -1752,94 +1766,6 @@ void EvalState::printStats() } -// FIXME: move to gc.cc and make generic. -size_t valueSize(Value & v) -{ - std::set seen; - - std::function doValue; - std::function doEnv; - - doValue = [&](Value & v) -> size_t { - if (seen.find(&v) != seen.end()) return 0; - seen.insert(&v); - - size_t sz = sizeof(Value); - - switch (v.type) { - case tLongString: - sz += v.string.s->words() * WORD_SIZE; - break; - case tPath: - sz += v.path->words() * WORD_SIZE; - break; - case tAttrs: - if (seen.find(v.attrs) == seen.end()) { - seen.insert(v.attrs); - sz += sizeof(Bindings) + sizeof(Attr) * v.attrs->capacity(); - for (auto & i : *v.attrs) - sz += doValue(*i.value); - } - break; - case tList0: - case tList1: - case tList2: - case tListN: - if (seen.find(v.listElems()) == seen.end()) { - seen.insert(v.listElems()); - sz += v.listSize() * sizeof(Value *); - for (size_t n = 0; n < v.listSize(); ++n) - sz += doValue(*v.listElems()[n]); - } - break; - case tThunk: - sz += doEnv(*v.thunk.env); - break; - case tApp: - sz += doValue(*v.app.left); - sz += doValue(*v.app.right); - break; - case tLambda: - sz += doEnv(*v.lambda.env); - break; - case tPrimOpApp: - sz += doValue(*v.app.left); - sz += doValue(*v.app.right); - break; -#if 0 - case tExternal: - if (seen.find(v.external) != seen.end()) break; - seen.insert(v.external); - sz += v.external->valueSize(seen); - break; -#endif - default: - ; - } - - return sz; - }; - - doEnv = [&](Env & env) -> size_t { - if (seen.find(&env) != seen.end()) return 0; - seen.insert(&env); - - size_t sz = sizeof(Env) + sizeof(Value *) * env.getSize(); - - if (env.type != tWithExprEnv) - for (size_t i = 0; i < env.getSize(); ++i) - if (env.values[i]) - sz += doValue(*env.values[i]); - - if (env.up) sz += doEnv(*env.up); - - return sz; - }; - - return doValue(v); -} - - #if 0 string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const { @@ -1860,6 +1786,22 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) { #endif +EvalSettings::EvalSettings() +{ + auto var = getEnv("NIX_PATH"); + if (var) nixPath = parseNixPath(*var); +} + +Strings EvalSettings::getDefaultNixPath() +{ + Strings res; + auto add = [&](const Path & p) { if (pathExists(p)) { res.push_back(p); } }; + add(getHome() + "/.nix-defexpr/channels"); + add("nixpkgs=" + settings.nixStateDir + "/nix/profiles/per-user/root/channels/nixpkgs"); + add(settings.nixStateDir + "/nix/profiles/per-user/root/channels"); + return res; +} + EvalSettings evalSettings; static GlobalConfig::Register r1(&evalSettings); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 864c1bec4..6bdedb228 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -9,6 +9,7 @@ #include "gc.hh" #include +#include #include @@ -17,7 +18,8 @@ namespace nix { class Store; class EvalState; -struct Derivation; +struct Derivation; // FIXME: remove +struct StorePath; enum RepairFlag : bool; @@ -78,17 +80,17 @@ public: static size_t wordsFor(unsigned short size) { - return 2 + size; + return 1 + size; } }; -Value & mkString(Value & v, const string & s, const PathSet & context = PathSet()); +Value & mkString(Value & v, std::string_view s, const PathSet & context); /* Cache for calls to addToStore(); maps source paths to the store paths. */ -typedef std::map SrcToStore; +typedef std::map SrcToStore; std::ostream & operator << (std::ostream & str, const Value & v); @@ -222,6 +224,9 @@ public: set with attribute `type = "derivation"'). */ bool isDerivation(Value & v); + std::optional tryAttrsToString(const Pos & pos, Value & v, + PathSet & context, bool coerceMore = false, bool copyToStore = true); + /* String coercion. Converts strings, paths and derivations to a string. If `coerceMore' is set, also converts nulls, integers, booleans and lists to a string. If `copyToStore' is set, @@ -370,9 +375,16 @@ struct InvalidPathError : EvalError struct EvalSettings : Config { + EvalSettings(); + + static Strings getDefaultNixPath(); + Setting enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation", "Whether builtin functions that allow executing native code should be enabled."}; + Setting nixPath{this, getDefaultNixPath(), "nix-path", + "List of directories to be searched for <...> file references."}; + Setting restrictEval{this, false, "restrict-eval", "Whether to restrict file system access to paths in $NIX_PATH, " "and network access to the URI prefixes listed in 'allowed-uris'."}; @@ -385,6 +397,9 @@ struct EvalSettings : Config Setting allowedUris{this, {}, "allowed-uris", "Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."}; + + Setting traceFunctionCalls{this, false, "trace-function-calls", + "Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)"}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/function-trace.cc b/src/libexpr/function-trace.cc new file mode 100644 index 000000000..af1486f78 --- /dev/null +++ b/src/libexpr/function-trace.cc @@ -0,0 +1,17 @@ +#include "function-trace.hh" + +namespace nix { + +FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) { + auto duration = std::chrono::high_resolution_clock::now().time_since_epoch(); + auto ns = std::chrono::duration_cast(duration); + printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count()); +} + +FunctionCallTrace::~FunctionCallTrace() { + auto duration = std::chrono::high_resolution_clock::now().time_since_epoch(); + auto ns = std::chrono::duration_cast(duration); + printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count()); +} + +} diff --git a/src/libexpr/function-trace.hh b/src/libexpr/function-trace.hh new file mode 100644 index 000000000..472f2045e --- /dev/null +++ b/src/libexpr/function-trace.hh @@ -0,0 +1,15 @@ +#pragma once + +#include "eval.hh" + +#include + +namespace nix { + +struct FunctionCallTrace +{ + const Pos & pos; + FunctionCallTrace(const Pos & pos); + ~FunctionCallTrace(); +}; +} diff --git a/src/libexpr/gc.cc b/src/libexpr/gc.cc index a768dd55a..bf5302ca4 100644 --- a/src/libexpr/gc.cc +++ b/src/libexpr/gc.cc @@ -11,7 +11,7 @@ GC gc; GC::GC() { - nextSize = std::max((size_t) 2, parseSize(getEnv("GC_INITIAL_HEAP_SIZE", "131072")) / WORD_SIZE); + nextSize = std::max((size_t) 2, parseSize(getEnv("GC_INITIAL_HEAP_SIZE").value_or("131072")) / WORD_SIZE); // FIXME: placement new frontPtrSentinel = (Ptr *) malloc(sizeof(Ptr)); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index c7fe26298..688bb6637 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -19,27 +19,27 @@ DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs) DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) : state(&state), attrs(nullptr), attrPath("") { - auto spec = parseDrvPathWithOutputs(drvPathWithOutputs); + auto [drvPath, selectedOutputs] = store->parsePathWithOutputs(drvPathWithOutputs); - drvPath = spec.first; + this->drvPath = store->printStorePath(drvPath); auto drv = store->derivationFromPath(drvPath); - name = storePathToName(drvPath); + name = drvPath.name(); - if (spec.second.size() > 1) + if (selectedOutputs.size() > 1) throw Error("building more than one derivation output is not supported, in '%s'", drvPathWithOutputs); outputName = - spec.second.empty() - ? get(drv.env, "outputName", "out") - : *spec.second.begin(); + selectedOutputs.empty() + ? get(drv.env, "outputName").value_or("out") + : *selectedOutputs.begin(); auto i = drv.outputs.find(outputName); if (i == drv.outputs.end()) - throw Error("derivation '%s' does not have output '%s'", drvPath, outputName); + throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName); - outPath = i->second.path; + outPath = store->printStorePath(i->second.path); } @@ -278,8 +278,7 @@ static bool getDerivation(EvalState & state, Value & v, /* Remove spurious duplicates (e.g., a set like `rec { x = derivation {...}; y = x;}'. */ - if (done.find(v.attrs) != done.end()) return false; - done.insert(v.attrs); + if (!done.insert(v.attrs).second) return false; DrvInfo drv(state, attrPath, v.attrs); diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 50e824c30..a09592cf0 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -1,149 +1,162 @@ #include "json-to-value.hh" -#include +#include +#include + +using json = nlohmann::json; +using std::unique_ptr; namespace nix { +// for more information, refer to +// https://github.com/nlohmann/json/blob/master/include/nlohmann/detail/input/json_sax.hpp +class JSONSax : nlohmann::json_sax { -static void skipWhitespace(const char * & s) -{ - while (*s == ' ' || *s == '\t' || *s == '\n' || *s == '\r') s++; -} + class JSONState { + protected: + unique_ptr parent; + Ptr v; + public: + virtual unique_ptr resolve(EvalState &) + { + throw std::logic_error("tried to close toplevel json parser state"); + }; + explicit JSONState(unique_ptr && p) : parent(std::move(p)), v(nullptr) {}; + explicit JSONState(Value * v) : v(v) {}; + JSONState(JSONState & p) = delete; + Value & value(EvalState & state) + { + if (!v) + v = state.allocValue(); + return *v; + }; + virtual ~JSONState() {}; + virtual void add() {}; + }; - -static string parseJSONString(const char * & s) -{ - string res; - if (*s++ != '"') throw JSONParseError("expected JSON string"); - while (*s != '"') { - if (!*s) throw JSONParseError("got end-of-string in JSON string"); - if (*s == '\\') { - s++; - if (*s == '"') res += '"'; - else if (*s == '\\') res += '\\'; - else if (*s == '/') res += '/'; - else if (*s == '/') res += '/'; - else if (*s == 'b') res += '\b'; - else if (*s == 'f') res += '\f'; - else if (*s == 'n') res += '\n'; - else if (*s == 'r') res += '\r'; - else if (*s == 't') res += '\t'; - else if (*s == 'u') throw JSONParseError("\\u characters in JSON strings are currently not supported"); - else throw JSONParseError("invalid escaped character in JSON string"); - s++; - } else - res += *s++; - } - s++; - return res; -} - - -static void parseJSON(EvalState & state, const char * & s, Value & v) -{ - skipWhitespace(s); - - if (!*s) throw JSONParseError("expected JSON value"); - - if (*s == '[') { - s++; - ValueVector values; - values.reserve(128); - skipWhitespace(s); - while (1) { - if (values.empty() && *s == ']') break; - Ptr v2 = state.allocValue(); - parseJSON(state, s, *v2); - values.push_back(std::move(v2)); - skipWhitespace(s); - if (*s == ']') break; - if (*s != ',') throw JSONParseError("expected ',' or ']' after JSON array element"); - s++; + class JSONObjectState : public JSONState { + using JSONState::JSONState; + ValueMap attrs = ValueMap(); + virtual unique_ptr resolve(EvalState & state) override + { + Value & v = parent->value(state); + state.mkAttrs(v, attrs.size()); + for (auto & i : attrs) + v.attrs->push_back(Attr(i.first, i.second)); + return std::move(parent); } - s++; - state.mkList(v, values.size()); - for (size_t n = 0; n < values.size(); ++n) - v.listElems()[n] = values[n]; - } - - else if (*s == '{') { - s++; - ValueMap attrs; - while (1) { - skipWhitespace(s); - if (attrs.empty() && *s == '}') break; - string name = parseJSONString(s); - skipWhitespace(s); - if (*s != ':') throw JSONParseError("expected ':' in JSON object"); - s++; - Ptr v2 = state.allocValue(); - parseJSON(state, s, *v2); - attrs.emplace(state.symbols.create(name), std::move(v2)); - skipWhitespace(s); - if (*s == '}') break; - if (*s != ',') throw JSONParseError("expected ',' or '}' after JSON member"); - s++; + virtual void add() override { v = nullptr; }; + public: + void key(string_t & name, EvalState & state) + { + attrs[state.symbols.create(name)] = &value(state); } - state.mkAttrs(v, attrs.size()); - for (auto & i : attrs) - v.attrs->push_back(Attr(i.first, i.second)); - v.attrs->sort(); - s++; - } + }; - else if (*s == '"') { - mkString(v, parseJSONString(s)); - } - - else if (isdigit(*s) || *s == '-' || *s == '.' ) { - // Buffer into a string first, then use built-in C++ conversions - std::string tmp_number; - Tag number_type = tInt; - - while (isdigit(*s) || *s == '-' || *s == '.' || *s == 'e' || *s == 'E') { - if (*s == '.' || *s == 'e' || *s == 'E') - number_type = tFloat; - tmp_number += *s++; + class JSONListState : public JSONState { + ValueVector values = ValueVector(); + virtual unique_ptr resolve(EvalState & state) override + { + Value & v = parent->value(state); + state.mkList(v, values.size()); + for (size_t n = 0; n < values.size(); ++n) { + v.listElems()[n] = values[n]; + } + return std::move(parent); } - - try { - if (number_type == tFloat) - mkFloat(v, stod(tmp_number)); - else - mkInt(v, stol(tmp_number)); - } catch (std::invalid_argument e) { - throw JSONParseError("invalid JSON number"); - } catch (std::out_of_range e) { - throw JSONParseError("out-of-range JSON number"); + virtual void add() override { + values.push_back(v); + v = nullptr; + }; + public: + JSONListState(unique_ptr && p, std::size_t reserve) : JSONState(std::move(p)) + { + values.reserve(reserve); } + }; + + EvalState & state; + unique_ptr rs; + + template inline bool handle_value(T f, Args... args) + { + f(rs->value(state), args...); + rs->add(); + return true; } - else if (strncmp(s, "true", 4) == 0) { - s += 4; - mkBool(v, true); +public: + JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {}; + + bool null() + { + return handle_value(mkNull); } - else if (strncmp(s, "false", 5) == 0) { - s += 5; - mkBool(v, false); + bool boolean(bool val) + { + return handle_value(mkBool, val); } - else if (strncmp(s, "null", 4) == 0) { - s += 4; - mkNull(v); + bool number_integer(number_integer_t val) + { + return handle_value(mkInt, val); } - else throw JSONParseError("unrecognised JSON value"); -} + bool number_unsigned(number_unsigned_t val) + { + return handle_value(mkInt, val); + } + bool number_float(number_float_t val, const string_t& s) + { + return handle_value(mkFloat, val); + } + + bool string(string_t& val) + { + return handle_value(mkString, (std::string_view) val); + } + + bool start_object(std::size_t len) + { + rs = std::make_unique(std::move(rs)); + return true; + } + + bool key(string_t& name) + { + dynamic_cast(rs.get())->key(name, state); + return true; + } + + bool end_object() { + rs = rs->resolve(state); + rs->add(); + return true; + } + + bool end_array() { + return end_object(); + } + + bool start_array(size_t len) { + rs = std::make_unique(std::move(rs), + len != std::numeric_limits::max() ? len : 128); + return true; + } + + bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) { + throw JSONParseError(ex.what()); + } +}; void parseJSON(EvalState & state, const string & s_, Value & v) { - const char * s = s_.c_str(); - parseJSON(state, s, v); - skipWhitespace(s); - if (*s) throw JSONParseError(format("expected end-of-string while parsing JSON value: %1%") % s); + JSONSax parser(state, v); + bool res = json::sax_parse(s_, &parser); + if (!res) + throw JSONParseError("Invalid JSON Value"); } - } diff --git a/src/libexpr/json-to-value.hh b/src/libexpr/json-to-value.hh index 33f35b16c..3b0fdae11 100644 --- a/src/libexpr/json-to-value.hh +++ b/src/libexpr/json-to-value.hh @@ -6,7 +6,7 @@ namespace nix { -MakeError(JSONParseError, EvalError) +MakeError(JSONParseError, EvalError); void parseJSON(EvalState & state, const string & s, Value & v); diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index 59c60e4c9..fec705d28 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -6,7 +6,7 @@ libexpr_DIR := $(d) libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc -libexpr_LIBS = libutil libstore +libexpr_LIBS = libutil libstore libnixrust libexpr_LDFLAGS = ifneq ($(OS), FreeBSD) diff --git a/src/libexpr/names.cc b/src/libexpr/names.cc index 382088c78..d1c8a6101 100644 --- a/src/libexpr/names.cc +++ b/src/libexpr/names.cc @@ -16,14 +16,14 @@ DrvName::DrvName() a letter. The `version' part is the rest (excluding the separating dash). E.g., `apache-httpd-2.0.48' is parsed to (`apache-httpd', '2.0.48'). */ -DrvName::DrvName(const string & s) : hits(0) +DrvName::DrvName(std::string_view s) : hits(0) { - name = fullName = s; + name = fullName = std::string(s); for (unsigned int i = 0; i < s.size(); ++i) { /* !!! isalpha/isdigit are affected by the locale. */ if (s[i] == '-' && i + 1 < s.size() && !isalpha(s[i + 1])) { - name = string(s, 0, i); - version = string(s, i + 1); + name = s.substr(0, i); + version = s.substr(i + 1); break; } } diff --git a/src/libexpr/names.hh b/src/libexpr/names.hh index 13c3093e7..00e14b8c7 100644 --- a/src/libexpr/names.hh +++ b/src/libexpr/names.hh @@ -15,7 +15,7 @@ struct DrvName unsigned int hits; DrvName(); - DrvName(const string & s); + DrvName(std::string_view s); bool matches(DrvName & n); private: diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index f3a754663..6e0fdf77b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,14 +9,14 @@ namespace nix { -MakeError(EvalError, Error) -MakeError(ParseError, Error) -MakeError(AssertionError, EvalError) -MakeError(ThrownError, AssertionError) -MakeError(Abort, EvalError) -MakeError(TypeError, EvalError) -MakeError(UndefinedVarError, Error) -MakeError(RestrictedPathError, Error) +MakeError(EvalError, Error); +MakeError(ParseError, Error); +MakeError(AssertionError, EvalError); +MakeError(ThrownError, AssertionError); +MakeError(Abort, EvalError); +MakeError(TypeError, EvalError); +MakeError(UndefinedVarError, Error); +MakeError(RestrictedPathError, Error); /* Position objects. */ diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 787039307..afa1fd439 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -1,5 +1,5 @@ %glr-parser -%pure-parser +%define api.pure %locations %define parse.error verbose %defines @@ -20,6 +20,7 @@ #include "nixexpr.hh" #include "eval.hh" +#include "globals.hh" namespace nix { @@ -81,6 +82,8 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, AttrPath::iterator i; // All attrpaths have at least one attr assert(!attrPath.empty()); + // Checking attrPath validity. + // =========================== for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { if (i->symbol.set()) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); @@ -102,11 +105,29 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, attrs = nested; } } + // Expr insertion. + // ========================== if (i->symbol.set()) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); if (j != attrs->attrs.end()) { - dupAttr(attrPath, pos, j->second.pos); + // This attr path is already defined. However, if both + // e and the expr pointed by the attr path are two attribute sets, + // we want to merge them. + // Otherwise, throw an error. + auto ae = dynamic_cast(e); + auto jAttrs = dynamic_cast(j->second.e); + if (jAttrs && ae) { + for (auto & ad : ae->attrs) { + auto j2 = jAttrs->attrs.find(ad.first); + if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. + dupAttr(ad.first, j2->second.pos, ad.second.pos); + jAttrs->attrs[ad.first] = ad.second; + } + } else { + dupAttr(attrPath, pos, j->second.pos); + } } else { + // This attr path is not defined. Let's create it. attrs->attrs[i->symbol] = ExprAttrs::AttrDef(e, pos); e->setName(i->symbol); } @@ -118,11 +139,10 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, static void addFormal(const Pos & pos, Formals * formals, const Formal & formal) { - if (formals->argNames.find(formal.name) != formals->argNames.end()) + if (!formals->argNames.insert(formal.name).second) throw ParseError(format("duplicate formal function argument '%1%' at %2%") % formal.name % pos); formals->formals.push_front(formal); - formals->argNames.insert(formal.name); } @@ -382,7 +402,12 @@ expr_simple new ExprVar(data->symbols.create("__nixPath"))), new ExprString(data->symbols.create(path))); } - | URI { $$ = new ExprString(data->symbols.create($1)); } + | URI { + static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals"); + if (noURLLiterals) + throw ParseError("URL literals are disabled, at %s", CUR_POS); + $$ = new ExprString(data->symbols.create($1)); + } | '(' expr ')' { $$ = $2; } /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ @@ -551,12 +576,17 @@ Path resolveExprPath(Path path) { assert(path[0] == '/'); + unsigned int followCount = 0, maxFollow = 1024; + /* If `path' is a symlink, follow it. This is so that relative path references work. */ struct stat st; while (true) { + // Basic cycle/depth limit to avoid infinite loops. + if (++followCount >= maxFollow) + throw Error("too many symbolic links encountered while traversing the path '%s'", path); if (lstat(path.c_str(), &st)) - throw SysError(format("getting status of '%1%'") % path); + throw SysError("getting status of '%s'", path); if (!S_ISLNK(st.st_mode)) break; path = absPath(readLink(path), dirOf(path)); } @@ -657,7 +687,9 @@ std::pair EvalState::resolveSearchPathElem(const SearchPathEl if (isUri(elem.second)) { try { - res = { true, getDownloader()->downloadCached(store, elem.second, true) }; + CachedDownloadRequest request(elem.second); + request.unpack = true; + res = { true, getDownloader()->downloadCached(store, request).path }; } catch (DownloadError & e) { printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second); res = { false, "" }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 3c21f3b4b..84cd7f377 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -44,29 +44,28 @@ std::pair decodeContext(const string & s) InvalidPathError::InvalidPathError(const Path & path) : - EvalError(format("path '%1%' is not valid") % path), path(path) {} + EvalError("path '%s' is not valid", path), path(path) {} void EvalState::realiseContext(const PathSet & context) { - PathSet drvs; + std::vector drvs; for (auto & i : context) { std::pair decoded = decodeContext(i); - Path ctx = decoded.first; - assert(store->isStorePath(ctx)); + auto ctx = store->parseStorePath(decoded.first); if (!store->isValidPath(ctx)) - throw InvalidPathError(ctx); - if (!decoded.second.empty() && nix::isDerivation(ctx)) { - drvs.insert(decoded.first + "!" + decoded.second); + throw InvalidPathError(store->printStorePath(ctx)); + if (!decoded.second.empty() && ctx.isDerivation()) { + drvs.push_back(StorePathWithOutputs{ctx.clone(), {decoded.second}}); /* Add the output of this derivation to the allowed paths. */ if (allowedPaths) { - auto drv = store->derivationFromPath(decoded.first); + auto drv = store->derivationFromPath(store->parseStorePath(decoded.first)); DerivationOutputs::iterator i = drv.outputs.find(decoded.second); if (i == drv.outputs.end()) throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second); - allowedPaths->insert(i->second.path); + allowedPaths->insert(store->printStorePath(i->second.path)); } } } @@ -74,10 +73,11 @@ void EvalState::realiseContext(const PathSet & context) if (drvs.empty()) return; if (!evalSettings.enableImportFromDerivation) - throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); + throw EvalError("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false", + store->printStorePath(drvs.begin()->path)); /* For performance, prefetch all substitute info. */ - PathSet willBuild, willSubstitute, unknown; + StorePathSet willBuild, willSubstitute, unknown; unsigned long long downloadSize, narSize; store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); store->buildPaths(drvs); @@ -100,8 +100,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args Path realPath = state.checkSourcePath(state.toRealPath(path, context)); - if (state.store->isStorePath(path) && state.store->isValidPath(path) && isDerivation(path)) { - Derivation drv = readDerivation(realPath); + // FIXME + if (state.store->isStorePath(path) && state.store->isValidPath(state.store->parseStorePath(path)) && isDerivation(path)) { + Derivation drv = readDerivation(*state.store, realPath); auto w = state.allocValue(); state.mkAttrs(w, 3 + drv.outputs.size()); auto v2 = state.allocAttr(w, state.sDrvPath); @@ -114,7 +115,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args for (const auto & o : drv.outputs) { v2 = state.allocAttr(w, state.symbols.create(o.first)); - mkString(*v2, o.second.path, {"!" + o.first + "!" + path}); + mkString(*v2, state.store->printStorePath(o.second.path), {"!" + o.first + "!" + path}); outputsVal->listElems()[outputs_index] = state.allocValue(); mkString(*(outputsVal->listElems()[outputs_index++]), o.first); } @@ -396,8 +397,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar throw EvalError(format("attribute 'key' required, at %1%") % pos); state.forceValue(*key->value); - if (doneKeys.count(key->value)) continue; - doneKeys.insert(key->value); + if (!doneKeys.insert(key->value).second) continue; res.push_back(e); /* Call the `operator' function with `e' as argument. */ @@ -470,7 +470,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v) { string name = state.forceStringNoCtx(*args[0], pos); - mkString(v, evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name)); + mkString(v, evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name).value_or("")); } @@ -507,13 +507,6 @@ static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value } -void prim_valueSize(EvalState & state, const Pos & pos, Value * * args, Value & v) -{ - /* We're not forcing the argument on purpose. */ - mkInt(v, valueSize(*args[0])); -} - - /************************************************************* * Derivations *************************************************************/ @@ -684,24 +677,24 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * runs. */ if (path.at(0) == '=') { /* !!! This doesn't work if readOnlyMode is set. */ - PathSet refs; - state.store->computeFSClosure(string(path, 1), refs); + StorePathSet refs; + state.store->computeFSClosure(state.store->parseStorePath(std::string_view(path).substr(1)), refs); for (auto & j : refs) { - drv.inputSrcs.insert(j); - if (isDerivation(j)) - drv.inputDrvs[j] = state.store->queryDerivationOutputNames(j); + drv.inputSrcs.insert(j.clone()); + if (j.isDerivation()) + drv.inputDrvs[j.clone()] = state.store->queryDerivationOutputNames(j); } } /* Handle derivation outputs of the form ‘!!’. */ else if (path.at(0) == '!') { std::pair ctx = decodeContext(path); - drv.inputDrvs[ctx.first].insert(ctx.second); + drv.inputDrvs[state.store->parseStorePath(ctx.first)].insert(ctx.second); } /* Otherwise it's a source file. */ else - drv.inputSrcs.insert(path); + drv.inputSrcs.insert(state.store->parseStorePath(path)); } /* Do we have all required attributes? */ @@ -711,10 +704,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * throw EvalError(format("required attribute 'system' missing, at %1%") % posDrvName); /* Check whether the derivation name is valid. */ - checkStoreName(drvName); if (isDerivation(drvName)) - throw EvalError(format("derivation names are not allowed to end in '%1%', at %2%") - % drvExtension % posDrvName); + throw EvalError("derivation names are not allowed to end in '%s', at %s", drvExtension, posDrvName); if (outputHash) { /* Handle fixed-output derivations. */ @@ -724,54 +715,55 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo); Hash h(*outputHash, ht); - Path outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName); - if (!jsonObject) drv.env["out"] = outPath; - drv.outputs["out"] = DerivationOutput(outPath, - (outputHashRecursive ? "r:" : "") + printHashType(h.type), - h.to_string(Base16, false)); + auto outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName); + if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); + drv.outputs.insert_or_assign("out", DerivationOutput(std::move(outPath), + (outputHashRecursive ? "r:" : "") + printHashType(h.type), + h.to_string(Base16, false))); } else { - /* Construct the "masked" store derivation, which is the final - one except that in the list of outputs, the output paths - are empty, and the corresponding environment variables have - an empty value. This ensures that changes in the set of - output names do get reflected in the hash. */ + /* Compute a hash over the "masked" store derivation, which is + the final one except that in the list of outputs, the + output paths are empty strings, and the corresponding + environment variables have an empty value. This ensures + that changes in the set of output names do get reflected in + the hash. */ for (auto & i : outputs) { if (!jsonObject) drv.env[i] = ""; - drv.outputs[i] = DerivationOutput("", "", ""); + drv.outputs.insert_or_assign(i, + DerivationOutput(StorePath::dummy.clone(), "", "")); } - /* Use the masked derivation expression to compute the output - path. */ - Hash h = hashDerivationModulo(*state.store, drv); + Hash h = hashDerivationModulo(*state.store, Derivation(drv), true); - for (auto & i : drv.outputs) - if (i.second.path == "") { - Path outPath = state.store->makeOutputPath(i.first, h, drvName); - if (!jsonObject) drv.env[i.first] = outPath; - i.second.path = outPath; - } + for (auto & i : outputs) { + auto outPath = state.store->makeOutputPath(i, h, drvName); + if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath); + drv.outputs.insert_or_assign(i, + DerivationOutput(std::move(outPath), "", "")); + } } /* Write the resulting term into the Nix store directory. */ - Path drvPath = writeDerivation(state.store, drv, drvName, state.repair); + auto drvPath = writeDerivation(state.store, drv, drvName, state.repair); + auto drvPathS = state.store->printStorePath(drvPath); - if (state.derivationHook) state.derivationHook(drvPath, drv); + if (state.derivationHook) state.derivationHook(drvPathS, drv); - printMsg(lvlChatty, format("instantiated '%1%' -> '%2%'") - % drvName % drvPath); + printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS); /* Optimisation, but required in read-only mode! because in that case we don't actually write store derivations, so we can't read them later. */ - drvHashes[drvPath] = hashDerivationModulo(*state.store, drv); + drvHashes.insert_or_assign(drvPath.clone(), + hashDerivationModulo(*state.store, Derivation(drv), false)); state.mkAttrs(v, 1 + drv.outputs.size()); - mkString(*state.allocAttr(v, state.sDrvPath), drvPath, {"=" + drvPath}); + mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS}); for (auto & i : drv.outputs) { mkString(*state.allocAttr(v, state.symbols.create(i.first)), - i.second.path, {"!" + i.first + "!" + drvPath}); + state.store->printStorePath(i.second.path), {"!" + i.first + "!" + drvPathS}); } v.attrs->sort(); } @@ -824,7 +816,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V throw EvalError(format("path '%1%' is not in the Nix store, at %2%") % path % pos); Path path2 = state.store->toStorePath(path); if (!settings.readOnlyMode) - state.store->ensurePath(path2); + state.store->ensurePath(state.store->parseStorePath(path2)); context.insert(path2); mkString(v, path, context); } @@ -834,8 +826,14 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args, { PathSet context; Path path = state.coerceToPath(pos, *args[0], context); - if (!context.empty()) - throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); + try { + state.realiseContext(context); + } catch (InvalidPathError & e) { + throw EvalError(format( + "cannot check the existence of '%1%', since path '%2%' is not valid, at %3%") + % path % e.path % pos); + } + try { mkBool(v, pathExists(state.checkSourcePath(path))); } catch (SysError & e) { @@ -925,6 +923,20 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va mkPath(v, state.checkSourcePath(state.findFile(searchPath, path, pos)).c_str()); } +/* Return the cryptographic hash of a file in base-16. */ +static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + string type = state.forceStringNoCtx(*args[0], pos); + HashType ht = parseHashType(type); + if (ht == htUnknown) + throw Error(format("unknown hash type '%1%', at %2%") % type % pos); + + PathSet context; // discarded + Path p = state.coerceToPath(pos, *args[1], context); + + mkString(v, hashFile(ht, state.checkSourcePath(p)).to_string(Base16, false), context); +} + /* Read a directory (without . or ..) */ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Value & v) { @@ -1005,17 +1017,17 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu string name = state.forceStringNoCtx(*args[0], pos); string contents = state.forceString(*args[1], context, pos); - PathSet refs; + StorePathSet refs; for (auto path : context) { if (path.at(0) != '/') throw EvalError(format("in 'toFile': the file '%1%' cannot refer to derivation outputs, at %2%") % name % pos); - refs.insert(path); + refs.insert(state.store->parseStorePath(path)); } - Path storePath = settings.readOnlyMode + auto storePath = state.store->printStorePath(settings.readOnlyMode ? state.store->computeStorePathForText(name, contents, refs) - : state.store->addTextToStore(name, contents, refs, state.repair); + : state.store->addTextToStore(name, contents, refs, state.repair)); /* Note: we don't need to add `context' to the context of the result, since `storePath' itself has references to the paths @@ -1055,21 +1067,18 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con return state.forceBool(res, pos); }) : defaultPathFilter; - Path expectedStorePath; - if (expectedHash) { - expectedStorePath = - state.store->makeFixedOutputPath(recursive, expectedHash, name); - } + std::optional expectedStorePath; + if (expectedHash) + expectedStorePath = state.store->makeFixedOutputPath(recursive, expectedHash, name); Path dstPath; - if (!expectedHash || !state.store->isValidPath(expectedStorePath)) { - dstPath = settings.readOnlyMode + if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { + dstPath = state.store->printStorePath(settings.readOnlyMode ? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first - : state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair); - if (expectedHash && expectedStorePath != dstPath) { - throw Error(format("store path mismatch in (possibly filtered) path added from '%1%'") % path); - } + : state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair)); + if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath)) + throw Error("store path mismatch in (possibly filtered) path added from '%s'", path); } else - dstPath = expectedStorePath; + dstPath = state.store->printStorePath(*expectedStorePath); mkString(v, dstPath, {dstPath}); } @@ -1086,7 +1095,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args if (args[0]->type != tLambda) throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); - addPath(state, pos, baseNameOf(path), path, args[0], true, Hash(), v); + addPath(state, pos, std::string(baseNameOf(path)), path, args[0], true, Hash(), v); } static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v) @@ -1259,13 +1268,12 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args, string name = state.forceStringNoCtx(*j->value, pos); Symbol sym = state.symbols.create(name); - if (seen.find(sym) == seen.end()) { + if (seen.insert(sym).second) { Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue)); if (j2 == v2.attrs->end()) throw TypeError(format("'value' attribute missing in a call to 'listToAttrs', at %1%") % pos); v.attrs->push_back(Attr(sym, j2->value, j2->pos)); - seen.insert(sym); } } @@ -2044,9 +2052,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, const string & who, bool unpack, const std::string & defaultName) { - string url; - Hash expectedHash; - string name = defaultName; + CachedDownloadRequest request(""); + request.unpack = unpack; + request.name = defaultName; state.forceValue(*args[0]); @@ -2057,32 +2065,32 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, for (auto & attr : *args[0]->attrs) { string n(attr.name); if (n == "url") - url = state.forceStringNoCtx(*attr.value, *attr.pos); + request.uri = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "sha256") - expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); else if (n == "name") - name = state.forceStringNoCtx(*attr.value, *attr.pos); + request.name = state.forceStringNoCtx(*attr.value, *attr.pos); else throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos); } - if (url.empty()) + if (request.uri.empty()) throw EvalError(format("'url' argument required, at %1%") % pos); } else - url = state.forceStringNoCtx(*args[0], pos); + request.uri = state.forceStringNoCtx(*args[0], pos); - state.checkURI(url); + state.checkURI(request.uri); - if (evalSettings.pureEval && !expectedHash) + if (evalSettings.pureEval && !request.expectedHash) throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); - Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash); + auto res = getDownloader()->downloadCached(state.store, request); if (state.allowedPaths) - state.allowedPaths->insert(res); + state.allowedPaths->insert(res.path); - mkString(v, res, PathSet({res})); + mkString(v, res.storePath, PathSet({res.storePath})); } @@ -2148,7 +2156,7 @@ void EvalState::createBaseEnv() } if (!evalSettings.pureEval) { - mkString(v, settings.thisSystem); + mkString(v, settings.thisSystem.get()); addConstant("__currentSystem", v); } @@ -2196,7 +2204,6 @@ void EvalState::createBaseEnv() // Debugging addPrimOp("__trace", 2, prim_trace); - addPrimOp("__valueSize", 1, prim_valueSize); // Paths addPrimOp("__toPath", 1, prim_toPath); @@ -2210,6 +2217,7 @@ void EvalState::createBaseEnv() addPrimOp("__readFile", 1, prim_readFile); addPrimOp("__readDir", 1, prim_readDir); addPrimOp("__findFile", 2, prim_findFile); + addPrimOp("__hashFile", 2, prim_hashFile); // Creating files addPrimOp("__toXML", 1, prim_toXML); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index e0cee1f0d..46d67af07 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -148,7 +148,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg if (!state.store->isStorePath(i.name)) throw EvalError("Context key '%s' is not a store path, at %s", i.name, i.pos); if (!settings.readOnlyMode) - state.store->ensurePath(i.name); + state.store->ensurePath(state.store->parseStorePath(i.name)); state.forceAttrs(*i.value, *i.pos); auto iter = i.value->attrs->find(sPath); if (iter != i.value->attrs->end()) { diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index aaf02c856..4aee1073e 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -4,6 +4,7 @@ #include "store-api.hh" #include "pathlocks.hh" #include "hash.hh" +#include "tarfile.hh" #include @@ -38,16 +39,14 @@ GitInfo exportGit(ref store, const std::string & uri, try { runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" }); - } catch (ExecError e) { + } catch (ExecError & e) { if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw; clean = false; } if (!clean) { - /* This is an unclean working tree. So copy all tracked - files. */ - + /* This is an unclean working tree. So copy all tracked files. */ GitInfo gitInfo; gitInfo.rev = "0000000000000000000000000000000000000000"; gitInfo.shortRev = std::string(gitInfo.rev, 0, 7); @@ -70,7 +69,7 @@ GitInfo exportGit(ref store, const std::string & uri, return files.count(file); }; - gitInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter); + gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter)); return gitInfo; } @@ -94,7 +93,11 @@ GitInfo exportGit(ref store, const std::string & uri, runProgram("git", true, { "init", "--bare", cacheDir }); } - Path localRefFile = cacheDir + "/refs/heads/" + *ref; + Path localRefFile; + if (ref->compare(0, 5, "refs/") == 0) + localRefFile = cacheDir + "/" + *ref; + else + localRefFile = cacheDir + "/refs/heads/" + *ref; bool doFetch; time_t now = time(0); @@ -116,7 +119,7 @@ GitInfo exportGit(ref store, const std::string & uri, git fetch to update the local ref to the remote ref. */ struct stat st; doFetch = stat(localRefFile.c_str(), &st) != 0 || - st.st_mtime + settings.tarballTtl <= now; + (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now; } if (doFetch) { @@ -153,7 +156,7 @@ GitInfo exportGit(ref store, const std::string & uri, gitInfo.storePath = json["storePath"]; - if (store->isValidPath(gitInfo.storePath)) { + if (store->isValidPath(store->parseStorePath(gitInfo.storePath))) { gitInfo.revCount = json["revCount"]; return gitInfo; } @@ -162,16 +165,18 @@ GitInfo exportGit(ref store, const std::string & uri, if (e.errNo != ENOENT) throw; } - // FIXME: should pipe this, or find some better way to extract a - // revision. - auto tar = runProgram("git", true, { "-C", cacheDir, "archive", gitInfo.rev }); + auto source = sinkToSource([&](Sink & sink) { + RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev }); + gitOptions.standardOut = &sink; + runProgram2(gitOptions); + }); Path tmpDir = createTempDir(); AutoDelete delTmpDir(tmpDir, true); - runProgram("tar", true, { "x", "-C", tmpDir }, tar); + unpackTarfile(*source, tmpDir); - gitInfo.storePath = store->addToStore(name, tmpDir); + gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir)); gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", cacheDir, "rev-list", "--count", gitInfo.rev })); @@ -235,7 +240,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va v.attrs->sort(); if (state.allowedPaths) - state.allowedPaths->insert(gitInfo.storePath); + state.allowedPaths->insert(state.store->toRealPath(gitInfo.storePath)); } static RegisterPrimOp r("fetchGit", 1, prim_fetchGit); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 66f49f374..db274fa4f 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -63,7 +63,7 @@ HgInfo exportMercurial(ref store, const std::string & uri, return files.count(file); }; - hgInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter); + hgInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter)); return hgInfo; } @@ -80,7 +80,7 @@ HgInfo exportMercurial(ref store, const std::string & uri, time_t now = time(0); struct stat st; if (stat(stampFile.c_str(), &st) != 0 || - st.st_mtime + settings.tarballTtl <= now) + (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now) { /* Except that if this is a commit hash that we already have, we don't have to pull again. */ @@ -96,17 +96,14 @@ HgInfo exportMercurial(ref store, const std::string & uri, try { runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); } - catch (ExecError & e){ + catch (ExecError & e) { string transJournal = cacheDir + "/.hg/store/journal"; /* hg throws "abandoned transaction" error only if this file exists */ - if (pathExists(transJournal)) - { + if (pathExists(transJournal)) { runProgram("hg", true, { "recover", "-R", cacheDir }); runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); - } - else - { - throw ExecError(e.status, fmt("program hg '%1%' ", statusToString(e.status))); + } else { + throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status))); } } } else { @@ -137,7 +134,7 @@ HgInfo exportMercurial(ref store, const std::string & uri, hgInfo.storePath = json["storePath"]; - if (store->isValidPath(hgInfo.storePath)) { + if (store->isValidPath(store->parseStorePath(hgInfo.storePath))) { printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath); return hgInfo; } @@ -153,7 +150,7 @@ HgInfo exportMercurial(ref store, const std::string & uri, deletePath(tmpDir + "/.hg_archival.txt"); - hgInfo.storePath = store->addToStore(name, tmpDir); + hgInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir)); nlohmann::json json; json["storePath"] = hgInfo.storePath; @@ -214,7 +211,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar v.attrs->sort(); if (state.allowedPaths) - state.allowedPaths->insert(hgInfo.storePath); + state.allowedPaths->insert(state.store->toRealPath(hgInfo.storePath)); } static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial); diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 4128de05d..a84e569e9 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -49,6 +49,19 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va visit(*(v.listElems()[i] = state.allocValue()), t2->get()[i]); } + // Handle cases like 'a = [[{ a = true }]]', which IMHO should be + // parsed as a array containing an array containing a table, + // but instead are parsed as an array containing a table array + // containing a table. + else if (auto t2 = t->as_table_array()) { + size_t size = t2->get().size(); + + state.mkList(v, size); + + for (size_t j = 0; j < size; ++j) + visit(*(v.listElems()[j] = state.allocValue()), t2->get()[j]); + } + else if (t->is_value()) { if (auto val = t->as()) mkInt(v, val->get()); diff --git a/src/libexpr/symbol-table.hh b/src/libexpr/symbol-table.hh index f3b07ff2a..f0e639070 100644 --- a/src/libexpr/symbol-table.hh +++ b/src/libexpr/symbol-table.hh @@ -39,7 +39,12 @@ public: return s < s2.s; } - operator const string & () const + operator const std::string & () const + { + return *s; + } + + operator const std::string_view () const { return *s; } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 97389ee4a..03202ee17 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -42,7 +42,12 @@ void printValueAsJSON(EvalState & state, bool strict, break; case tAttrs: { - Bindings::iterator i = v.attrs->find(state.sOutPath); + auto maybeString = state.tryAttrsToString(noPos, v, context, false, false); + if (maybeString) { + out.write(*maybeString); + break; + } + auto i = v.attrs->find(state.sOutPath); if (i == v.attrs->end()) { auto obj(out.object()); StringSet names; diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 79ff10b79..28e08e099 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -107,10 +107,9 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, XMLOpenElement _(doc, "derivation", xmlAttrs); - if (drvPath != "" && drvsSeen.find(drvPath) == drvsSeen.end()) { - drvsSeen.insert(drvPath); + if (drvPath != "" && drvsSeen.insert(drvPath).second) showAttrs(state, strict, location, *v.attrs, doc, context, drvsSeen); - } else + else doc.writeEmptyElement("repeated"); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 5c26aabbb..ff9e14327 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -12,7 +12,6 @@ struct Env; struct Expr; struct ExprLambda; struct PrimOp; -struct Context; class Symbol; struct Pos; class EvalState; @@ -42,9 +41,6 @@ protected: /* Return a string to be used in builtins.typeOf */ virtual string typeOf() const = 0; - /* How much space does this value take up */ - virtual size_t valueSize(std::set & seen) const = 0; - /* Coerce the value to a string. Defaults to uncoercable, i.e. throws an * error */ @@ -205,11 +201,13 @@ public: return type == tShortString || type == tStaticString || type == tLongString; } - void setShortString(const char * s) + void setShortString(std::string_view s) { // FIXME: can't use strcpy here because gcc flags it as a // buffer overflow on 'misc'. - memcpy(getMiscData(), s, strlen(s) + 1); + auto p = getMiscData(); + memcpy(p, s.data(), s.size()); + p[s.size()] = 0; type = tShortString; } @@ -268,16 +266,16 @@ static inline void mkPrimOpApp(Value & v, Value & left, Value & right) } -static inline void mkString(Value & v, const char * s) +static inline Value & mkString(Value & v, std::string_view s) { - auto len = strlen(s); // FIXME: only need to know if > short - if (len < WORD_SIZE * 2 + Object::miscBytes) + if (s.size() < WORD_SIZE * 2 + Object::miscBytes) v.setShortString(s); else { - v.string.s = gc.alloc(String::wordsFor(len), len, s); + v.string.s = gc.alloc(String::wordsFor(s.size()), s.size(), s.data()); v.string.context = 0; v.type = tLongString; } + return v; } @@ -295,12 +293,6 @@ static inline void mkPath(Value & v, const std::string & s) } -/* Compute the size in bytes of the given value, including all values - and environments reachable from it. Static expressions (Exprs) are - not included. */ -size_t valueSize(Value & v); - - typedef std::vector> ValueVector; // FIXME: make more efficient typedef std::map> ValueMap; // FIXME: use Bindings? diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index 4c35a4199..9e1d7cee6 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -35,6 +35,15 @@ MixCommonArgs::MixCommonArgs(const string & programName) } }); + mkFlag() + .longName("max-jobs") + .shortName('j') + .label("jobs") + .description("maximum number of parallel builds") + .handler([=](std::string s) { + settings.set("max-jobs", s); + }); + std::string cat = "config"; globalConfig.convertToArgs(*this, cat); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 60dadfab7..2903a6963 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -33,25 +33,25 @@ void printGCWarning() } -void printMissing(ref store, const PathSet & paths, Verbosity lvl) +void printMissing(ref store, const std::vector & paths, Verbosity lvl) { unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; + StorePathSet willBuild, willSubstitute, unknown; store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize); printMissing(store, willBuild, willSubstitute, unknown, downloadSize, narSize, lvl); } -void printMissing(ref store, const PathSet & willBuild, - const PathSet & willSubstitute, const PathSet & unknown, +void printMissing(ref store, const StorePathSet & willBuild, + const StorePathSet & willSubstitute, const StorePathSet & unknown, unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl) { if (!willBuild.empty()) { printMsg(lvl, "these derivations will be built:"); - Paths sorted = store->topoSortPaths(willBuild); + auto sorted = store->topoSortPaths(willBuild); reverse(sorted.begin(), sorted.end()); for (auto & i : sorted) - printMsg(lvl, fmt(" %s", i)); + printMsg(lvl, fmt(" %s", store->printStorePath(i))); } if (!willSubstitute.empty()) { @@ -59,14 +59,14 @@ void printMissing(ref store, const PathSet & willBuild, downloadSize / (1024.0 * 1024.0), narSize / (1024.0 * 1024.0))); for (auto & i : willSubstitute) - printMsg(lvl, fmt(" %s", i)); + printMsg(lvl, fmt(" %s", store->printStorePath(i))); } if (!unknown.empty()) { printMsg(lvl, fmt("don't know how to build these paths%s:", (settings.readOnlyMode ? " (may be caused by read-only store access)" : ""))); for (auto & i : unknown) - printMsg(lvl, fmt(" %s", i)); + printMsg(lvl, fmt(" %s", store->printStorePath(i))); } } @@ -80,6 +80,7 @@ string getArg(const string & opt, } +#if OPENSSL_VERSION_NUMBER < 0x10101000L /* OpenSSL is not thread-safe by default - it will randomly crash unless the user supplies a mutex locking function. So let's do that. */ @@ -92,6 +93,7 @@ static void opensslLockCallback(int mode, int type, const char * file, int line) else opensslLocks[type].unlock(); } +#endif static void sigHandler(int signo) { } @@ -105,9 +107,11 @@ void initNix() std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf)); #endif +#if OPENSSL_VERSION_NUMBER < 0x10101000L /* Initialise OpenSSL locking. */ opensslLocks = std::vector(CRYPTO_num_locks()); CRYPTO_set_locking_callback(opensslLockCallback); +#endif loadConfFile(); @@ -125,6 +129,15 @@ void initNix() act.sa_handler = sigHandler; if (sigaction(SIGUSR1, &act, 0)) throw SysError("handling SIGUSR1"); +#if __APPLE__ + /* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH. + * Instead, add a dummy sigaction handler, and signalHandlerThread + * can handle the rest. */ + struct sigaction sa; + sa.sa_handler = sigHandler; + if (sigaction(SIGWINCH, &sa, 0)) throw SysError("handling SIGWINCH"); +#endif + /* Register a SIGSEGV handler to detect stack overflows. */ detectStackOverflow(); @@ -142,7 +155,7 @@ void initNix() sshd). This breaks build users because they don't have access to the TMPDIR, in particular in ‘nix-store --serve’. */ #if __APPLE__ - if (getuid() == 0 && hasPrefix(getEnv("TMPDIR"), "/var/folders/")) + if (getuid() == 0 && hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/")) unsetenv("TMPDIR"); #endif } @@ -175,10 +188,6 @@ LegacyArgs::LegacyArgs(const std::string & programName, .description("build from source if substitution fails") .set(&(bool&) settings.tryFallback, true); - mkFlag1('j', "max-jobs", "jobs", "maximum number of parallel builds", [=](std::string s) { - settings.set("max-jobs", s); - }); - auto intSettingAlias = [&](char shortName, const std::string & longName, const std::string & description, const std::string & dest) { mkFlag(shortName, longName, description, [=](unsigned int n) { @@ -228,7 +237,7 @@ bool LegacyArgs::processArgs(const Strings & args, bool finish) void parseCmdLine(int argc, char * * argv, std::function parseArg) { - parseCmdLine(baseNameOf(argv[0]), argvToStrings(argc, argv), parseArg); + parseCmdLine(std::string(baseNameOf(argv[0])), argvToStrings(argc, argv), parseArg); } diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index ab36b7c05..f047dd05c 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -3,6 +3,7 @@ #include "util.hh" #include "args.hh" #include "common-args.hh" +#include "path.hh" #include @@ -37,11 +38,15 @@ void printVersion(const string & programName); void printGCWarning(); class Store; +struct StorePathWithOutputs; -void printMissing(ref store, const PathSet & paths, Verbosity lvl = lvlInfo); +void printMissing( + ref store, + const std::vector & paths, + Verbosity lvl = lvlInfo); -void printMissing(ref store, const PathSet & willBuild, - const PathSet & willSubstitute, const PathSet & unknown, +void printMissing(ref store, const StorePathSet & willBuild, + const StorePathSet & willSubstitute, const StorePathSet & unknown, unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = lvlInfo); string getArg(const string & opt, diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 4527ee6ba..717faec92 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -10,10 +10,13 @@ #include "nar-info-disk-cache.hh" #include "nar-accessor.hh" #include "json.hh" +#include "thread-pool.hh" #include - #include +#include + +#include namespace nix { @@ -46,16 +49,16 @@ void BinaryCacheStore::init() throw Error(format("binary cache '%s' is for Nix stores with prefix '%s', not '%s'") % getUri() % value % storeDir); } else if (name == "WantMassQuery") { - wantMassQuery_ = value == "1"; + wantMassQuery.setDefault(value == "1" ? "true" : "false"); } else if (name == "Priority") { - string2Int(value, priority); + priority.setDefault(fmt("%d", std::stoi(value))); } } } } void BinaryCacheStore::getFile(const std::string & path, - Callback> callback) + Callback> callback) noexcept { try { callback(getFile(path)); @@ -88,19 +91,18 @@ std::shared_ptr BinaryCacheStore::getFile(const std::string & path) return sink.s; } -Path BinaryCacheStore::narInfoFileFor(const Path & storePath) +std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath) { - assertStorePath(storePath); - return storePathToHash(storePath) + ".narinfo"; + return storePathToHash(printStorePath(storePath)) + ".narinfo"; } void BinaryCacheStore::writeNarInfo(ref narInfo) { auto narInfoFile = narInfoFileFor(narInfo->path); - upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo"); + upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo"); - auto hashPart = storePathToHash(narInfo->path); + auto hashPart = storePathToHash(printStorePath(narInfo->path)); { auto state_(state.lock()); @@ -123,8 +125,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refcompare(0, narMagic.size(), narMagic) == 0); @@ -135,10 +137,15 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refnarHash = hashString(htSHA256, *nar); if (info.narHash && info.narHash != narInfo->narHash) - throw Error(format("refusing to copy corrupted path '%1%' to binary cache") % info.path); + throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path)); auto accessor_ = std::dynamic_pointer_cast(accessor); + auto narAccessor = makeNarAccessor(nar); + + if (accessor_) + accessor_->addToCache(printStorePath(info.path), *nar, narAccessor); + /* Optionally write a JSON file containing a listing of the contents of the NAR. */ if (writeNARListing) { @@ -148,23 +155,13 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refaddToCache(info.path, *nar, narAccessor); - { auto res = jsonRoot.placeholder("root"); listNar(res, narAccessor, "", true); } } - upsertFile(storePathToHash(info.path) + ".ls", jsonOut.str(), "application/json"); - } - - else { - if (accessor_) - accessor_->addToCache(info.path, *nar, makeNarAccessor(nar)); + upsertFile(storePathToHash(printStorePath(info.path)) + ".ls", jsonOut.str(), "application/json"); } /* Compress the NAR. */ @@ -176,17 +173,75 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const reffileSize = narCompressed->size(); auto duration = std::chrono::duration_cast(now2 - now1).count(); - printMsg(lvlTalkative, format("copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache") - % narInfo->path % narInfo->narSize - % ((1.0 - (double) narCompressed->size() / nar->size()) * 100.0) - % duration); + printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache", + printStorePath(narInfo->path), narInfo->narSize, + ((1.0 - (double) narCompressed->size() / nar->size()) * 100.0), + duration); - /* Atomically write the NAR file. */ narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar" + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "br" ? ".br" : ""); + + /* Optionally maintain an index of DWARF debug info files + consisting of JSON files named 'debuginfo/' that + specify the NAR file and member containing the debug info. */ + if (writeDebugInfo) { + + std::string buildIdDir = "/lib/debug/.build-id"; + + if (narAccessor->stat(buildIdDir).type == FSAccessor::tDirectory) { + + ThreadPool threadPool(25); + + auto doFile = [&](std::string member, std::string key, std::string target) { + checkInterrupt(); + + nlohmann::json json; + json["archive"] = target; + json["member"] = member; + + // FIXME: or should we overwrite? The previous link may point + // to a GC'ed file, so overwriting might be useful... + if (fileExists(key)) return; + + printMsg(lvlTalkative, "creating debuginfo link from '%s' to '%s'", key, target); + + upsertFile(key, json.dump(), "application/json"); + }; + + std::regex regex1("^[0-9a-f]{2}$"); + std::regex regex2("^[0-9a-f]{38}\\.debug$"); + + for (auto & s1 : narAccessor->readDirectory(buildIdDir)) { + auto dir = buildIdDir + "/" + s1; + + if (narAccessor->stat(dir).type != FSAccessor::tDirectory + || !std::regex_match(s1, regex1)) + continue; + + for (auto & s2 : narAccessor->readDirectory(dir)) { + auto debugPath = dir + "/" + s2; + + if (narAccessor->stat(debugPath).type != FSAccessor::tRegular + || !std::regex_match(s2, regex2)) + continue; + + auto buildId = s1 + s2; + + std::string key = "debuginfo/" + buildId; + std::string target = "../" + narInfo->url; + + threadPool.enqueue(std::bind(doFile, std::string(debugPath, 1), key, target)); + } + } + + threadPool.process(); + } + } + + /* Atomically write the NAR file. */ if (repair || !fileExists(narInfo->url)) { stats.narWrite++; upsertFile(narInfo->url, *narCompressed, "application/x-nix-nar"); @@ -198,14 +253,14 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refsign(*secretKey); + if (secretKey) narInfo->sign(*this, *secretKey); writeNarInfo(narInfo); stats.narInfoWrite++; } -bool BinaryCacheStore::isValidPathUncached(const Path & storePath) +bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) { // FIXME: this only checks whether a .narinfo with a matching hash // part exists. So ‘f4kb...-foo’ matches ‘f4kb...-bar’, even @@ -213,7 +268,7 @@ bool BinaryCacheStore::isValidPathUncached(const Path & storePath) return fileExists(narInfoFileFor(storePath)); } -void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink) +void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) { auto info = queryPathInfo(storePath).cast(); @@ -239,36 +294,39 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink) stats.narReadBytes += narSize; } -void BinaryCacheStore::queryPathInfoUncached(const Path & storePath, - Callback> callback) +void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, + Callback> callback) noexcept { auto uri = getUri(); + auto storePathS = printStorePath(storePath); auto act = std::make_shared(*logger, lvlTalkative, actQueryPathInfo, - fmt("querying info about '%s' on '%s'", storePath, uri), Logger::Fields{storePath, uri}); + fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri}); PushActivity pact(act->id); auto narInfoFile = narInfoFileFor(storePath); + auto callbackPtr = std::make_shared(std::move(callback)); + getFile(narInfoFile, {[=](std::future> fut) { try { auto data = fut.get(); - if (!data) return callback(nullptr); + if (!data) return (*callbackPtr)(nullptr); stats.narInfoRead++; - callback((std::shared_ptr) + (*callbackPtr)((std::shared_ptr) std::make_shared(*this, *data, narInfoFile)); (void) act; // force Activity into this lambda to ensure it stays alive } catch (...) { - callback.rethrow(); + callbackPtr->rethrow(); } }}); } -Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath, +StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { // FIXME: some cut&paste from LocalStore::addToStore(). @@ -287,20 +345,18 @@ Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath, h = hashString(hashAlgo, s); } - ValidPathInfo info; - info.path = makeFixedOutputPath(recursive, h, name); + ValidPathInfo info(makeFixedOutputPath(recursive, h, name)); addToStore(info, sink.s, repair, CheckSigs, nullptr); - return info.path; + return std::move(info.path); } -Path BinaryCacheStore::addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) +StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) { - ValidPathInfo info; - info.path = computeStorePathForText(name, s, references); - info.references = references; + ValidPathInfo info(computeStorePathForText(name, s, references)); + info.references = cloneStorePathSet(references); if (repair || !isValidPath(info.path)) { StringSink sink; @@ -308,7 +364,7 @@ Path BinaryCacheStore::addTextToStore(const string & name, const string & s, addToStore(info, sink.s, repair, CheckSigs, nullptr); } - return info.path; + return std::move(info.path); } ref BinaryCacheStore::getFSAccessor() @@ -316,7 +372,7 @@ ref BinaryCacheStore::getFSAccessor() return make_ref(ref(shared_from_this()), localNarCache); } -void BinaryCacheStore::addSignatures(const Path & storePath, const StringSet & sigs) +void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { /* Note: this is inherently racy since there is no locking on binary caches. In particular, with S3 this unreliable, even @@ -332,24 +388,22 @@ void BinaryCacheStore::addSignatures(const Path & storePath, const StringSet & s writeNarInfo(narInfo); } -std::shared_ptr BinaryCacheStore::getBuildLog(const Path & path) +std::shared_ptr BinaryCacheStore::getBuildLog(const StorePath & path) { - Path drvPath; + auto drvPath = path.clone(); - if (isDerivation(path)) - drvPath = path; - else { + if (!path.isDerivation()) { try { auto info = queryPathInfo(path); // FIXME: add a "Log" field to .narinfo - if (info->deriver == "") return nullptr; - drvPath = info->deriver; + if (!info->deriver) return nullptr; + drvPath = info->deriver->clone(); } catch (InvalidPath &) { return nullptr; } } - auto logPath = "log/" + baseNameOf(drvPath); + auto logPath = "log/" + std::string(baseNameOf(printStorePath(drvPath))); debug("fetching build log from binary cache '%s/%s'", getUri(), logPath); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 953f3b90a..aa13c1cb4 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -17,6 +17,7 @@ public: const Setting compression{this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"}; const Setting writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"}; + const Setting writeDebugInfo{this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"}; const Setting secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"}; const Setting localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"}; const Setting parallelCompression{this, false, "parallel-compression", @@ -47,15 +48,10 @@ public: /* Fetch the specified file and call the specified callback with the result. A subclass may implement this asynchronously. */ virtual void getFile(const std::string & path, - Callback> callback); + Callback> callback) noexcept; std::shared_ptr getFile(const std::string & path); -protected: - - bool wantMassQuery_ = false; - int priority = 50; - public: virtual void init(); @@ -64,49 +60,45 @@ private: std::string narMagic; - std::string narInfoFileFor(const Path & storePath); + std::string narInfoFileFor(const StorePath & storePath); void writeNarInfo(ref narInfo); public: - bool isValidPathUncached(const Path & path) override; + bool isValidPathUncached(const StorePath & path) override; - void queryPathInfoUncached(const Path & path, - Callback> callback) override; + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override; - Path queryPathFromHashPart(const string & hashPart) override + std::optional queryPathFromHashPart(const std::string & hashPart) override { unsupported("queryPathFromHashPart"); } - bool wantMassQuery() override { return wantMassQuery_; } - void addToStore(const ValidPathInfo & info, const ref & nar, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; - Path addToStore(const string & name, const Path & srcPath, + StorePath addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) override; - Path addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) override; + StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) override; - void narFromPath(const Path & path, Sink & sink) override; + void narFromPath(const StorePath & path, Sink & sink) override; - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override { unsupported("buildDerivation"); } - void ensurePath(const Path & path) override + void ensurePath(const StorePath & path) override { unsupported("ensurePath"); } ref getFSAccessor() override; - void addSignatures(const Path & storePath, const StringSet & sigs) override; + void addSignatures(const StorePath & storePath, const StringSet & sigs) override; - std::shared_ptr getBuildLog(const Path & path) override; - - int getPriority() override { return priority; } + std::shared_ptr getBuildLog(const StorePath & path) override; }; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 7fa82ae74..68a440556 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -13,6 +13,8 @@ #include "nar-info.hh" #include "parsed-derivations.hh" #include "machines.hh" +#include "daemon.hh" +#include "worker-protocol.hh" #include #include @@ -23,8 +25,8 @@ #include #include #include +#include -#include #include #include #include @@ -33,11 +35,13 @@ #include #include #include +#include #include #include #include #include #include +#include #include #include @@ -95,7 +99,7 @@ typedef set Goals; typedef list WeakGoals; /* A map of paths to goals (and the other way around). */ -typedef map WeakGoalMap; +typedef std::map WeakGoalMap; @@ -250,7 +254,7 @@ private: steady_time_point lastWokenUp; /* Cache for pathContentsGood(). */ - std::map pathContentsGoodCache; + std::map pathContentsGoodCache; public: @@ -265,6 +269,12 @@ public: /* Set if at least one derivation had a timeout. */ bool timedOut; + /* Set if at least one derivation fails with a hash mismatch. */ + bool hashMismatch; + + /* Set if at least one derivation is not deterministic in check mode. */ + bool checkMismatch; + LocalStore & store; std::unique_ptr hook; @@ -291,10 +301,10 @@ public: ~Worker(); /* Make a goal (with caching). */ - GoalPtr makeDerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); - std::shared_ptr makeBasicDerivationGoal(const Path & drvPath, + GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); + std::shared_ptr makeBasicDerivationGoal(StorePath && drvPath, const BasicDerivation & drv, BuildMode buildMode = bmNormal); - GoalPtr makeSubstitutionGoal(const Path & storePath, RepairFlag repair = NoRepair); + GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair); /* Remove a dead goal. */ void removeGoal(GoalPtr goal); @@ -342,9 +352,9 @@ public: /* Check whether the given valid path exists and has the right contents. */ - bool pathContentsGood(const Path & path); + bool pathContentsGood(const StorePath & path); - void markContentsGood(const Path & path); + void markContentsGood(StorePath && path); void updateProgress() { @@ -461,6 +471,31 @@ static void commonChildInit(Pipe & logPipe) close(fdDevNull); } +void handleDiffHook( + uid_t uid, uid_t gid, + const Path & tryA, const Path & tryB, + const Path & drvPath, const Path & tmpDir) +{ + auto diffHook = settings.diffHook; + if (diffHook != "" && settings.runDiffHook) { + try { + RunOptions diffHookOptions(diffHook,{tryA, tryB, drvPath, tmpDir}); + diffHookOptions.searchPath = true; + diffHookOptions.uid = uid; + diffHookOptions.gid = gid; + diffHookOptions.chdir = "/"; + + auto diffRes = runProgram(diffHookOptions); + if (!statusOk(diffRes.first)) + throw ExecError(diffRes.first, fmt("diff-hook program '%1%' %2%", diffHook, statusToString(diffRes.first))); + + if (diffRes.second != "") + printError(chomp(diffRes.second)); + } catch (Error & error) { + printError("diff hook execution failed: %s", error.what()); + } + } +} ////////////////////////////////////////////////////////////////////// @@ -468,12 +503,6 @@ static void commonChildInit(Pipe & logPipe) class UserLock { private: - /* POSIX locks suck. If we have a lock on a file, and we open and - close that file again (without closing the original file - descriptor), we lose the lock. So we have to be *very* careful - not to open a lock file on which we are holding a lock. */ - static Sync lockedPaths_; - Path fnUserLock; AutoCloseFD fdUserLock; @@ -484,7 +513,6 @@ private: public: UserLock(); - ~UserLock(); void kill(); @@ -498,9 +526,6 @@ public: }; -Sync UserLock::lockedPaths_; - - UserLock::UserLock() { assert(settings.buildUsersGroup != ""); @@ -537,48 +562,34 @@ UserLock::UserLock() fnUserLock = (format("%1%/userpool/%2%") % settings.nixStateDir % pw->pw_uid).str(); - { - auto lockedPaths(lockedPaths_.lock()); - if (lockedPaths->count(fnUserLock)) - /* We already have a lock on this one. */ - continue; - lockedPaths->insert(fnUserLock); - } + AutoCloseFD fd = open(fnUserLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); + if (!fd) + throw SysError(format("opening user lock '%1%'") % fnUserLock); - try { + if (lockFile(fd.get(), ltWrite, false)) { + fdUserLock = std::move(fd); + user = i; + uid = pw->pw_uid; - AutoCloseFD fd = open(fnUserLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); - if (!fd) - throw SysError(format("opening user lock '%1%'") % fnUserLock); - - if (lockFile(fd.get(), ltWrite, false)) { - fdUserLock = std::move(fd); - user = i; - uid = pw->pw_uid; - - /* Sanity check... */ - if (uid == getuid() || uid == geteuid()) - throw Error(format("the Nix user should not be a member of '%1%'") - % settings.buildUsersGroup); + /* Sanity check... */ + if (uid == getuid() || uid == geteuid()) + throw Error(format("the Nix user should not be a member of '%1%'") + % settings.buildUsersGroup); #if __linux__ - /* Get the list of supplementary groups of this build user. This - is usually either empty or contains a group such as "kvm". */ - supplementaryGIDs.resize(10); - int ngroups = supplementaryGIDs.size(); - int err = getgrouplist(pw->pw_name, pw->pw_gid, - supplementaryGIDs.data(), &ngroups); - if (err == -1) - throw Error(format("failed to get list of supplementary groups for '%1%'") % pw->pw_name); + /* Get the list of supplementary groups of this build user. This + is usually either empty or contains a group such as "kvm". */ + supplementaryGIDs.resize(10); + int ngroups = supplementaryGIDs.size(); + int err = getgrouplist(pw->pw_name, pw->pw_gid, + supplementaryGIDs.data(), &ngroups); + if (err == -1) + throw Error(format("failed to get list of supplementary groups for '%1%'") % pw->pw_name); - supplementaryGIDs.resize(ngroups); + supplementaryGIDs.resize(ngroups); #endif - return; - } - - } catch (...) { - lockedPaths_.lock()->erase(fnUserLock); + return; } } @@ -588,14 +599,6 @@ UserLock::UserLock() } -UserLock::~UserLock() -{ - auto lockedPaths(lockedPaths_.lock()); - assert(lockedPaths->count(fnUserLock)); - lockedPaths->erase(fnUserLock); -} - - void UserLock::kill() { killUser(uid); @@ -663,7 +666,7 @@ HookInstance::HookInstance() throw SysError("dupping builder's stdout/stderr"); Strings args = { - baseNameOf(settings.buildHook), + std::string(baseNameOf(settings.buildHook.get())), std::to_string(verbosity), }; @@ -699,23 +702,6 @@ HookInstance::~HookInstance() ////////////////////////////////////////////////////////////////////// -typedef map StringRewrites; - - -std::string rewriteStrings(std::string s, const StringRewrites & rewrites) -{ - for (auto & i : rewrites) { - size_t j = 0; - while ((j = s.find(i.first, j)) != string::npos) - s.replace(j, i.first.size(), i.second); - } - return s; -} - - -////////////////////////////////////////////////////////////////////// - - typedef enum {rpAccept, rpDecline, rpPostpone} HookReply; class SubstitutionGoal; @@ -727,7 +713,7 @@ private: bool useDerivation; /* The path of the derivation. */ - Path drvPath; + StorePath drvPath; /* The specific outputs that we need to build. Empty means all of them. */ @@ -752,17 +738,14 @@ private: /* All input paths (that is, the union of FS closures of the immediate input paths). */ - PathSet inputPaths; - - /* Referenceable paths (i.e., input and output paths). */ - PathSet allPaths; + StorePathSet inputPaths; /* Outputs that are already valid. If we're repairing, these are the outputs that are valid *and* not corrupt. */ - PathSet validPaths; + StorePathSet validPaths; /* Outputs that are corrupt or not valid. */ - PathSet missingPaths; + StorePathSet missingPaths; /* User selected for running the builder. */ std::unique_ptr buildUser; @@ -794,9 +777,13 @@ private: /* Pipe for the builder's standard output/error. */ Pipe builderOut; - /* Pipe for synchronising updates to the builder user namespace. */ + /* Pipe for synchronising updates to the builder namespaces. */ Pipe userNamespaceSync; + /* The mount namespace of the builder, used to add additional + paths to the sandbox as a result of recursive Nix calls. */ + AutoCloseFD sandboxMountNamespace; + /* The build hook. */ std::unique_ptr hook; @@ -837,8 +824,8 @@ private: #endif /* Hash rewriting. */ - StringRewrites inputRewrites, outputRewrites; - typedef map RedirectedOutputs; + StringMap inputRewrites, outputRewrites; + typedef map RedirectedOutputs; RedirectedOutputs redirectedOutputs; BuildMode buildMode; @@ -846,7 +833,7 @@ private: /* If we're repairing without a chroot, there may be outputs that are valid but corrupt. So we redirect these outputs to temporary paths. */ - PathSet redirectedBadOutputs; + StorePathSet redirectedBadOutputs; BuildResult result; @@ -875,13 +862,39 @@ private: /* The remote machine on which we're building. */ std::string machineName; + /* The recursive Nix daemon socket. */ + AutoCloseFD daemonSocket; + + /* The daemon main thread. */ + std::thread daemonThread; + + /* The daemon worker threads. */ + std::vector daemonWorkerThreads; + + /* Paths that were added via recursive Nix calls. */ + StorePathSet addedPaths; + + /* Recursive Nix calls are only allowed to build or realize paths + in the original input closure or added via a recursive Nix call + (so e.g. you can't do 'nix-store -r /nix/store/' where + /nix/store/ is some arbitrary path in a binary cache). */ + bool isAllowed(const StorePath & path) + { + return inputPaths.count(path) || addedPaths.count(path); + } + + friend struct RestrictedStore; + public: - DerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, + DerivationGoal(StorePath && drvPath, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode = bmNormal); - DerivationGoal(const Path & drvPath, const BasicDerivation & drv, + DerivationGoal(StorePath && drvPath, const BasicDerivation & drv, Worker & worker, BuildMode buildMode = bmNormal); ~DerivationGoal(); + /* Whether we need to perform hash rewriting if there are valid output paths. */ + bool needsHashRewrite(); + void timedOut() override; string key() override @@ -890,14 +903,14 @@ public: i.e. a derivation named "aardvark" always comes before "baboon". And substitution goals always happen before derivation goals (due to "b$"). */ - return "b$" + storePathToName(drvPath) + "$" + drvPath; + return "b$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath); } void work() override; - Path getDrvPath() + StorePath getDrvPath() { - return drvPath; + return drvPath.clone(); } /* Add wanted outputs to an already existing derivation goal. */ @@ -925,9 +938,20 @@ private: /* Fill in the environment for the builder. */ void initEnv(); + /* Setup tmp dir location. */ + void initTmpDir(); + /* Write a JSON file containing the derivation attributes. */ void writeStructuredAttrs(); + void startDaemon(); + + void stopDaemon(); + + /* Add 'path' to the set of paths that may be referenced by the + outputs, and make it appear in the sandbox. */ + void addDependency(const StorePath & path); + /* Make a file owned by the builder. */ void chownToBuilder(const Path & path); @@ -960,15 +984,12 @@ private: void flushLine(); /* Return the set of (in)valid paths. */ - PathSet checkPathValidity(bool returnValid, bool checkHash); - - /* Abort the goal if `path' failed to build. */ - bool pathFailed(const Path & path); + StorePathSet checkPathValidity(bool returnValid, bool checkHash); /* Forcibly kill the child process, if any. */ void killChild(); - Path addHashRewrite(const Path & path); + void addHashRewrite(const StorePath & path); void repairClosure(); @@ -979,23 +1000,23 @@ private: void done(BuildResult::Status status, const string & msg = ""); - PathSet exportReferences(PathSet storePaths); + StorePathSet exportReferences(const StorePathSet & storePaths); }; const Path DerivationGoal::homeDir = "/homeless-shelter"; -DerivationGoal::DerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, +DerivationGoal::DerivationGoal(StorePath && drvPath, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) : Goal(worker) , useDerivation(true) - , drvPath(drvPath) + , drvPath(std::move(drvPath)) , wantedOutputs(wantedOutputs) , buildMode(buildMode) { state = &DerivationGoal::getDerivation; - name = (format("building of '%1%'") % drvPath).str(); + name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -1003,16 +1024,16 @@ DerivationGoal::DerivationGoal(const Path & drvPath, const StringSet & wantedOut } -DerivationGoal::DerivationGoal(const Path & drvPath, const BasicDerivation & drv, +DerivationGoal::DerivationGoal(StorePath && drvPath, const BasicDerivation & drv, Worker & worker, BuildMode buildMode) : Goal(worker) , useDerivation(false) - , drvPath(drvPath) + , drvPath(std::move(drvPath)) , buildMode(buildMode) { - this->drv = std::unique_ptr(new BasicDerivation(drv)); + this->drv = std::make_unique(BasicDerivation(drv)); state = &DerivationGoal::haveDerivation; - name = (format("building of %1%") % showPaths(drv.outputPaths())).str(); + name = fmt("building of %s", worker.store.showPaths(drv.outputPaths())); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -1020,7 +1041,7 @@ DerivationGoal::DerivationGoal(const Path & drvPath, const BasicDerivation & drv /* Prevent the .chroot directory from being garbage-collected. (See isActiveTempFile() in gc.cc.) */ - worker.store.addTempRoot(drvPath); + worker.store.addTempRoot(this->drvPath); } @@ -1029,11 +1050,23 @@ DerivationGoal::~DerivationGoal() /* Careful: we should never ever throw an exception from a destructor. */ try { killChild(); } catch (...) { ignoreException(); } + try { stopDaemon(); } catch (...) { ignoreException(); } try { deleteTmpDir(false); } catch (...) { ignoreException(); } try { closeLogFile(); } catch (...) { ignoreException(); } } +inline bool DerivationGoal::needsHashRewrite() +{ +#if __linux__ + return !useChroot; +#else + /* Darwin requires hash rewriting even when sandboxing is enabled. */ + return true; +#endif +} + + void DerivationGoal::killChild() { if (pid != -1) { @@ -1082,10 +1115,8 @@ void DerivationGoal::addWantedOutputs(const StringSet & outputs) needRestart = true; } else for (auto & i : outputs) - if (wantedOutputs.find(i) == wantedOutputs.end()) { - wantedOutputs.insert(i); + if (wantedOutputs.insert(i).second) needRestart = true; - } } @@ -1112,7 +1143,7 @@ void DerivationGoal::loadDerivation() trace("loading derivation"); if (nrFailed != 0) { - printError(format("cannot build missing derivation '%1%'") % drvPath); + printError("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)); done(BuildResult::MiscFailure); return; } @@ -1141,7 +1172,7 @@ void DerivationGoal::haveDerivation() worker.store.addTempRoot(i.second.path); /* Check what outputs paths are not already valid. */ - PathSet invalidOutputs = checkPathValidity(false, buildMode == bmRepair); + auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair); /* If they are all valid, then we're done. */ if (invalidOutputs.size() == 0 && buildMode == bmNormal) { @@ -1149,12 +1180,12 @@ void DerivationGoal::haveDerivation() return; } - parsedDrv = std::make_unique(drvPath, *drv); + parsedDrv = std::make_unique(drvPath.clone(), *drv); /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ - if (settings.useSubstitutes && drv->substitutesAllowed()) + if (settings.useSubstitutes && parsedDrv->substitutesAllowed()) for (auto & i : invalidOutputs) addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair)); @@ -1170,7 +1201,9 @@ void DerivationGoal::outputsSubstituted() trace("all outputs substituted (maybe)"); if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) { - done(BuildResult::TransientFailure, (format("some substitutes for the outputs of derivation '%1%' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ") % drvPath).str()); + done(BuildResult::TransientFailure, + fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", + worker.store.printStorePath(drvPath))); return; } @@ -1197,14 +1230,15 @@ void DerivationGoal::outputsSubstituted() return; } if (buildMode == bmCheck && nrInvalid > 0) - throw Error(format("some outputs of '%1%' are not valid, so checking is not possible") % drvPath); + throw Error("some outputs of '%s' are not valid, so checking is not possible", + worker.store.printStorePath(drvPath)); /* Otherwise, at least one of the output paths could not be produced using a substitute. So we have to build instead. */ /* Make sure checkPathValidity() from now on checks all outputs. */ - wantedOutputs = PathSet(); + wantedOutputs.clear(); /* The inputs must be built before we can build this goal. */ if (useDerivation) @@ -1214,8 +1248,8 @@ void DerivationGoal::outputsSubstituted() for (auto & i : drv->inputSrcs) { if (worker.store.isValidPath(i)) continue; if (!settings.useSubstitutes) - throw Error(format("dependency '%1%' of '%2%' does not exist, and substitution is disabled") - % i % drvPath); + throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled", + worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); addWaitee(worker.makeSubstitutionGoal(i)); } @@ -1234,7 +1268,7 @@ void DerivationGoal::repairClosure() that produced those outputs. */ /* Get the output closure. */ - PathSet outputClosure; + StorePathSet outputClosure; for (auto & i : drv->outputs) { if (!wantOutput(i.first, wantedOutputs)) continue; worker.store.computeFSClosure(i.second.path, outputClosure); @@ -1247,26 +1281,26 @@ void DerivationGoal::repairClosure() /* Get all dependencies of this derivation so that we know which derivation is responsible for which path in the output closure. */ - PathSet inputClosure; + StorePathSet inputClosure; if (useDerivation) worker.store.computeFSClosure(drvPath, inputClosure); - std::map outputsToDrv; + std::map outputsToDrv; for (auto & i : inputClosure) - if (isDerivation(i)) { + if (i.isDerivation()) { Derivation drv = worker.store.derivationFromPath(i); for (auto & j : drv.outputs) - outputsToDrv[j.second.path] = i; + outputsToDrv.insert_or_assign(j.second.path.clone(), i.clone()); } /* Check each path (slow!). */ - PathSet broken; for (auto & i : outputClosure) { if (worker.pathContentsGood(i)) continue; - printError(format("found corrupted or missing path '%1%' in the output closure of '%2%'") % i % drvPath); - Path drvPath2 = outputsToDrv[i]; - if (drvPath2 == "") + printError("found corrupted or missing path '%s' in the output closure of '%s'", + worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); + auto drvPath2 = outputsToDrv.find(i); + if (drvPath2 == outputsToDrv.end()) addWaitee(worker.makeSubstitutionGoal(i, Repair)); else - addWaitee(worker.makeDerivationGoal(drvPath2, PathSet(), bmRepair)); + addWaitee(worker.makeDerivationGoal(drvPath2->second, StringSet(), bmRepair)); } if (waitees.empty()) { @@ -1282,7 +1316,8 @@ void DerivationGoal::closureRepaired() { trace("closure repaired"); if (nrFailed > 0) - throw Error(format("some paths in the output closure of derivation '%1%' could not be repaired") % drvPath); + throw Error("some paths in the output closure of derivation '%s' could not be repaired", + worker.store.printStorePath(drvPath)); done(BuildResult::AlreadyValid); } @@ -1293,10 +1328,9 @@ void DerivationGoal::inputsRealised() if (nrFailed != 0) { if (!useDerivation) - throw Error(format("some dependencies of '%1%' are missing") % drvPath); - printError( - format("cannot build derivation '%1%': %2% dependencies couldn't be built") - % drvPath % nrFailed); + throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath)); + printError("cannot build derivation '%s': %s dependencies couldn't be built", + worker.store.printStorePath(drvPath), nrFailed); done(BuildResult::DependencyFailed); return; } @@ -1309,12 +1343,6 @@ void DerivationGoal::inputsRealised() /* Gather information necessary for computing the closure and/or running the build hook. */ - /* The outputs are referenceable paths. */ - for (auto & i : drv->outputs) { - debug(format("building path '%1%'") % i.second.path); - allPaths.insert(i.second.path); - } - /* Determine the full set of input paths. */ /* First, the input derivations. */ @@ -1325,21 +1353,21 @@ void DerivationGoal::inputsRealised() that are specified as inputs. */ assert(worker.store.isValidPath(i.first)); Derivation inDrv = worker.store.derivationFromPath(i.first); - for (auto & j : i.second) - if (inDrv.outputs.find(j) != inDrv.outputs.end()) - worker.store.computeFSClosure(inDrv.outputs[j].path, inputPaths); + for (auto & j : i.second) { + auto k = inDrv.outputs.find(j); + if (k != inDrv.outputs.end()) + worker.store.computeFSClosure(k->second.path, inputPaths); else throw Error( - format("derivation '%1%' requires non-existent output '%2%' from input derivation '%3%'") - % drvPath % j % i.first); + "derivation '%s' requires non-existent output '%s' from input derivation '%s'", + worker.store.printStorePath(drvPath), j, worker.store.printStorePath(i.first)); + } } /* Second, the input sources. */ worker.store.computeFSClosure(drv->inputSrcs, inputPaths); - debug(format("added input paths %1%") % showPaths(inputPaths)); - - allPaths.insert(inputPaths.begin(), inputPaths.end()); + debug("added input paths %s", worker.store.showPaths(inputPaths)); /* Is this a fixed-output derivation? */ fixedOutput = drv->isFixedOutput(); @@ -1369,7 +1397,7 @@ void DerivationGoal::tryToBuild() few seconds and then retry this goal. */ PathSet lockFiles; for (auto & outPath : drv->outputPaths()) - lockFiles.insert(worker.store.toRealPath(outPath)); + lockFiles.insert(worker.store.toRealPath(worker.store.printStorePath(outPath))); if (!outputLocks.lockPaths(lockFiles, "", false)) { worker.waitForAWhile(shared_from_this()); @@ -1385,23 +1413,22 @@ void DerivationGoal::tryToBuild() build this derivation, so no further checks are necessary. */ validPaths = checkPathValidity(true, buildMode == bmRepair); if (buildMode != bmCheck && validPaths.size() == drv->outputs.size()) { - debug(format("skipping build of derivation '%1%', someone beat us to it") % drvPath); + debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); outputLocks.setDeletion(true); done(BuildResult::AlreadyValid); return; } - missingPaths = drv->outputPaths(); + missingPaths = cloneStorePathSet(drv->outputPaths()); if (buildMode != bmCheck) for (auto & i : validPaths) missingPaths.erase(i); /* If any of the outputs already exist but are not valid, delete them. */ for (auto & i : drv->outputs) { - Path path = i.second.path; - if (worker.store.isValidPath(path)) continue; - debug(format("removing invalid path '%1%'") % path); - deletePath(worker.store.toRealPath(path)); + if (worker.store.isValidPath(i.second.path)) continue; + debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path)); + deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second.path))); } /* Don't do a remote build if the derivation has the attribute @@ -1414,11 +1441,11 @@ void DerivationGoal::tryToBuild() buildMode == bmRepair ? "repairing outputs of '%s'" : buildMode == bmCheck ? "checking outputs of '%s'" : nrRounds > 1 ? "building '%s' (round %d/%d)" : - "building '%s'", drvPath, curRound, nrRounds); - fmt("building '%s'", drvPath); + "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds); + fmt("building '%s'", worker.store.printStorePath(drvPath)); if (hook) msg += fmt(" on '%s'", machineName); act = std::make_unique(*logger, lvlInfo, actBuild, msg, - Logger::Fields{drvPath, hook ? machineName : "", curRound, nrRounds}); + Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds}); mcRunningBuilds = std::make_unique>(worker.runningBuilds); worker.updateProgress(); }; @@ -1486,13 +1513,15 @@ void replaceValidPath(const Path & storePath, const Path tmpPath) Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str(); if (pathExists(storePath)) rename(storePath.c_str(), oldPath.c_str()); - if (rename(tmpPath.c_str(), storePath.c_str()) == -1) - throw SysError(format("moving '%1%' to '%2%'") % tmpPath % storePath); + if (rename(tmpPath.c_str(), storePath.c_str()) == -1) { + rename(oldPath.c_str(), storePath.c_str()); // attempt to recover + throw SysError("moving '%s' to '%s'", tmpPath, storePath); + } deletePath(oldPath); } -MakeError(NotDeterministic, BuildError) +MakeError(NotDeterministic, BuildError); void DerivationGoal::buildDone() @@ -1504,13 +1533,15 @@ void DerivationGoal::buildDone() uid and then messing around with our output. */ Finally releaseBuildUser([&]() { buildUser.reset(); }); + sandboxMountNamespace = -1; + /* Since we got an EOF on the logger pipe, the builder is presumed to have terminated. In fact, the builder could also have simply have closed its end of the pipe, so just to be sure, kill it. */ int status = hook ? hook->pid.kill() : pid.kill(); - debug(format("builder process for '%1%' finished") % drvPath); + debug("builder process for '%s' finished", worker.store.printStorePath(drvPath)); result.timesBuilt++; result.stopTime = time(0); @@ -1522,8 +1553,8 @@ void DerivationGoal::buildDone() if (hook) { hook->builderOut.readSide = -1; hook->fromHook.readSide = -1; - } - else builderOut.readSide = -1; + } else + builderOut.readSide = -1; /* Close the log file. */ closeLogFile(); @@ -1535,6 +1566,9 @@ void DerivationGoal::buildDone() root. */ if (buildUser) buildUser->kill(); + /* Terminate the recursive Nix daemon. */ + stopDaemon(); + bool diskFull = false; try { @@ -1563,12 +1597,15 @@ void DerivationGoal::buildDone() /* Move paths out of the chroot for easier debugging of build failures. */ if (useChroot && buildMode == bmNormal) - for (auto & i : missingPaths) - if (pathExists(chrootRootDir + i)) - rename((chrootRootDir + i).c_str(), i.c_str()); + for (auto & i : missingPaths) { + auto p = worker.store.printStorePath(i); + if (pathExists(chrootRootDir + p)) + rename((chrootRootDir + p).c_str(), p.c_str()); + } - std::string msg = (format("builder for '%1%' %2%") - % drvPath % statusToString(status)).str(); + auto msg = fmt("builder for '%s' %s", + worker.store.printStorePath(drvPath), + statusToString(status)); if (!settings.verboseBuild && !logTail.empty()) { msg += (format("; last %d log lines:") % logTail.size()).str(); @@ -1586,6 +1623,61 @@ void DerivationGoal::buildDone() being valid. */ registerOutputs(); + if (settings.postBuildHook != "") { + Activity act(*logger, lvlInfo, actPostBuildHook, + fmt("running post-build-hook '%s'", settings.postBuildHook), + Logger::Fields{worker.store.printStorePath(drvPath)}); + PushActivity pact(act.id); + auto outputPaths = drv->outputPaths(); + std::map hookEnvironment = getEnv(); + + hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath)); + hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", worker.store.printStorePathSet(outputPaths)))); + + RunOptions opts(settings.postBuildHook, {}); + opts.environment = hookEnvironment; + + struct LogSink : Sink { + Activity & act; + std::string currentLine; + + LogSink(Activity & act) : act(act) { } + + void operator() (const unsigned char * data, size_t len) override { + for (size_t i = 0; i < len; i++) { + auto c = data[i]; + + if (c == '\n') { + flushLine(); + } else { + currentLine += c; + } + } + } + + void flushLine() { + if (settings.verboseBuild) { + printError("post-build-hook: " + currentLine); + } else { + act.result(resPostBuildLogLine, currentLine); + } + currentLine.clear(); + } + + ~LogSink() { + if (currentLine != "") { + currentLine += '\n'; + flushLine(); + } + } + }; + LogSink sink(act); + + opts.standardOut = &sink; + opts.mergeStderrToStdout = true; + runProgram2(opts); + } + if (buildMode == bmCheck) { done(BuildResult::Built); return; @@ -1593,7 +1685,7 @@ void DerivationGoal::buildDone() /* Delete unused redirected outputs (when doing hash rewriting). */ for (auto & i : redirectedOutputs) - deletePath(i.second); + deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second))); /* Delete the chroot (if we were using one). */ autoDelChroot.reset(); /* this runs the destructor */ @@ -1658,7 +1750,7 @@ HookReply DerivationGoal::tryBuildHook() << "try" << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0) << drv->platform - << drvPath + << worker.store.printStorePath(drvPath) << parsedDrv->getRequiredSystemFeatures(); worker.hook->sink.flush(); @@ -1691,7 +1783,7 @@ HookReply DerivationGoal::tryBuildHook() else if (reply == "postpone") return rpPostpone; else if (reply != "accept") - throw Error(format("bad hook reply '%1%'") % reply); + throw Error("bad hook reply '%s'", reply); } catch (SysError & e) { if (e.errNo == EPIPE) { @@ -1709,11 +1801,11 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - hook->sink << inputPaths; + writeStorePaths(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ - hook->sink << missingPaths; + writeStorePaths(worker.store, hook->sink, missingPaths); hook->sink = FdSink(); hook->toHook.writeSide = -1; @@ -1730,10 +1822,10 @@ HookReply DerivationGoal::tryBuildHook() } -void chmod_(const Path & path, mode_t mode) +static void chmod_(const Path & path, mode_t mode) { if (chmod(path.c_str(), mode) == -1) - throw SysError(format("setting permissions on '%1%'") % path); + throw SysError("setting permissions on '%s'", path); } @@ -1744,33 +1836,25 @@ int childEntry(void * arg) } -PathSet DerivationGoal::exportReferences(PathSet storePaths) +StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths) { - PathSet paths; - - for (auto storePath : storePaths) { - - /* Check that the store path is valid. */ - if (!worker.store.isInStore(storePath)) - throw BuildError(format("'exportReferencesGraph' contains a non-store path '%1%'") - % storePath); - - storePath = worker.store.toStorePath(storePath); + StorePathSet paths; + for (auto & storePath : storePaths) { if (!inputPaths.count(storePath)) - throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", storePath); + throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath)); - worker.store.computeFSClosure(storePath, paths); + worker.store.computeFSClosure(singleton(storePath), paths); } /* If there are derivations in the graph, then include their outputs as well. This is useful if you want to do things like passing all build-time dependencies of some path to a derivation that builds a NixOS DVD image. */ - PathSet paths2(paths); + auto paths2 = cloneStorePathSet(paths); for (auto & j : paths2) { - if (isDerivation(j)) { + if (j.isDerivation()) { Derivation drv = worker.store.derivationFromPath(j); for (auto & k : drv.outputs) worker.store.computeFSClosure(k.second.path, paths); @@ -1796,6 +1880,21 @@ static void preloadNSS() { }); } + +void linkOrCopy(const Path & from, const Path & to) +{ + if (link(from.c_str(), to.c_str()) == -1) { + /* Hard-linking fails if we exceed the maximum link count on a + file (e.g. 32000 of ext3), which is quite possible after a + 'nix-store --optimise'. FIXME: actually, why don't we just + bind-mount in this case? */ + if (errno != EMLINK) + throw SysError("linking '%s' to '%s'", to, from); + copyPath(from, to); + } +} + + void DerivationGoal::startBuilder() { /* Right platform? */ @@ -1803,7 +1902,7 @@ void DerivationGoal::startBuilder() throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}", drv->platform, concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()), - drvPath, + worker.store.printStorePath(drvPath), settings.thisSystem, concatStringsSep(", ", settings.systemFeatures)); @@ -1819,12 +1918,12 @@ void DerivationGoal::startBuilder() auto noChroot = parsedDrv->getBoolAttr("__noChroot"); if (settings.sandboxMode == smEnabled) { if (noChroot) - throw Error(format("derivation '%1%' has '__noChroot' set, " - "but that's not allowed when 'sandbox' is 'true'") % drvPath); + throw Error("derivation '%s' has '__noChroot' set, " + "but that's not allowed when 'sandbox' is 'true'", worker.store.printStorePath(drvPath)); #if __APPLE__ if (additionalSandboxProfile != "") - throw Error(format("derivation '%1%' specifies a sandbox profile, " - "but this is only allowed when 'sandbox' is 'relaxed'") % drvPath); + throw Error("derivation '%s' specifies a sandbox profile, " + "but this is only allowed when 'sandbox' is 'relaxed'", worker.store.printStorePath(drvPath)); #endif useChroot = true; } @@ -1860,21 +1959,13 @@ void DerivationGoal::startBuilder() /* Create a temporary directory where the build will take place. */ - auto drvName = storePathToName(drvPath); - tmpDir = createTempDir("", "nix-build-" + drvName, false, false, 0700); + tmpDir = createTempDir("", "nix-build-" + std::string(drvPath.name()), false, false, 0700); - /* In a sandbox, for determinism, always use the same temporary - directory. */ -#if __linux__ - tmpDirInSandbox = useChroot ? settings.sandboxBuildDir : tmpDir; -#else - tmpDirInSandbox = tmpDir; -#endif chownToBuilder(tmpDir); /* Substitute output placeholders with the actual output paths. */ for (auto & output : drv->outputs) - inputRewrites[hashPlaceholder(output.first)] = output.second.path; + inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path); /* Construct the environment passed to the builder. */ initEnv(); @@ -1890,19 +1981,22 @@ void DerivationGoal::startBuilder() temporary build directory. The text files have the format used by `nix-store --register-validity'. However, the deriver fields are left empty. */ - string s = get(drv->env, "exportReferencesGraph"); + string s = get(drv->env, "exportReferencesGraph").value_or(""); Strings ss = tokenizeString(s); if (ss.size() % 2 != 0) throw BuildError(format("odd number of tokens in 'exportReferencesGraph': '%1%'") % s); for (Strings::iterator i = ss.begin(); i != ss.end(); ) { string fileName = *i++; - checkStoreName(fileName); /* !!! abuse of this function */ - Path storePath = *i++; + static std::regex regex("[A-Za-z_][A-Za-z0-9_.-]*"); + if (!std::regex_match(fileName, regex)) + throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName); + + auto storePath = worker.store.parseStorePath(*i++); /* Write closure info to . */ writeFile(tmpDir + "/" + fileName, worker.store.makeValidityRegistration( - exportReferences({storePath}), false, false)); + exportReferences(singleton(storePath)), false, false)); } } @@ -1932,17 +2026,19 @@ void DerivationGoal::startBuilder() dirsInChroot[tmpDirInSandbox] = tmpDir; /* Add the closure of store paths to the chroot. */ - PathSet closure; + StorePathSet closure; for (auto & i : dirsInChroot) try { if (worker.store.isInStore(i.second.source)) - worker.store.computeFSClosure(worker.store.toStorePath(i.second.source), closure); + worker.store.computeFSClosure(worker.store.parseStorePath(worker.store.toStorePath(i.second.source)), closure); } catch (InvalidPath & e) { } catch (Error & e) { throw Error(format("while processing 'sandbox-paths': %s") % e.what()); } - for (auto & i : closure) - dirsInChroot[i] = i; + for (auto & i : closure) { + auto p = worker.store.printStorePath(i); + dirsInChroot.insert_or_assign(p, p); + } PathSet allowedPaths = settings.allowedImpureHostPrefixes; @@ -1964,7 +2060,8 @@ void DerivationGoal::startBuilder() } } if (!found) - throw Error(format("derivation '%1%' requested impure path '%2%', but it was not in allowed-impure-host-deps") % drvPath % i); + throw Error("derivation '%s' requested impure path '%s', but it was not in allowed-impure-host-deps", + worker.store.printStorePath(drvPath), i); dirsInChroot[i] = i; } @@ -1974,7 +2071,7 @@ void DerivationGoal::startBuilder() environment using bind-mounts. We put it in the Nix store to ensure that we can create hard-links to non-directory inputs in the fake Nix store in the chroot (see below). */ - chrootRootDir = worker.store.toRealPath(drvPath) + ".chroot"; + chrootRootDir = worker.store.toRealPath(worker.store.printStorePath(drvPath)) + ".chroot"; deletePath(chrootRootDir); /* Clean up the chroot directory automatically. */ @@ -2033,28 +2130,15 @@ void DerivationGoal::startBuilder() throw SysError(format("cannot change ownership of '%1%'") % chrootStoreDir); for (auto & i : inputPaths) { - Path r = worker.store.toRealPath(i); + auto p = worker.store.printStorePath(i); + Path r = worker.store.toRealPath(p); struct stat st; if (lstat(r.c_str(), &st)) - throw SysError(format("getting attributes of path '%1%'") % i); + throw SysError("getting attributes of path '%s'", p); if (S_ISDIR(st.st_mode)) - dirsInChroot[i] = r; - else { - Path p = chrootRootDir + i; - debug("linking '%1%' to '%2%'", p, r); - if (link(r.c_str(), p.c_str()) == -1) { - /* Hard-linking fails if we exceed the maximum - link count on a file (e.g. 32000 of ext3), - which is quite possible after a `nix-store - --optimise'. */ - if (errno != EMLINK) - throw SysError(format("linking '%1%' to '%2%'") % p % i); - StringSink sink; - dumpPath(r, sink); - StringSource source(*sink.s); - restorePath(p, source); - } - } + dirsInChroot.insert_or_assign(p, r); + else + linkOrCopy(r, chrootRootDir + p); } /* If we're repairing, checking or rebuilding part of a @@ -2063,7 +2147,7 @@ void DerivationGoal::startBuilder() (typically the dependencies of /bin/sh). Throw them out. */ for (auto & i : drv->outputs) - dirsInChroot.erase(i.second.path); + dirsInChroot.erase(worker.store.printStorePath(i.second.path)); #elif __APPLE__ /* We don't really have any parent prep work to do (yet?) @@ -2073,7 +2157,7 @@ void DerivationGoal::startBuilder() #endif } - else { + if (needsHashRewrite()) { if (pathExists(homeDir)) throw Error(format("directory '%1%' exists; please remove it") % homeDir); @@ -2095,17 +2179,17 @@ void DerivationGoal::startBuilder() corrupt outputs in advance. So rewrite them as well. */ if (buildMode == bmRepair) for (auto & i : missingPaths) - if (worker.store.isValidPath(i) && pathExists(i)) { + if (worker.store.isValidPath(i) && pathExists(worker.store.printStorePath(i))) { addHashRewrite(i); - redirectedBadOutputs.insert(i); + redirectedBadOutputs.insert(i.clone()); } } if (useChroot && settings.preBuildHook != "" && dynamic_cast(drv.get())) { printMsg(lvlChatty, format("executing pre-build hook '%1%'") % settings.preBuildHook); - auto args = useChroot ? Strings({drvPath, chrootRootDir}) : - Strings({ drvPath }); + auto args = useChroot ? Strings({worker.store.printStorePath(drvPath), chrootRootDir}) : + Strings({ worker.store.printStorePath(drvPath) }); enum BuildHookState { stBegin, stExtraChrootDirs @@ -2138,6 +2222,11 @@ void DerivationGoal::startBuilder() } } + /* Fire up a Nix daemon to process recursive Nix calls from the + builder. */ + if (parsedDrv->getRequiredSystemFeatures().count("recursive-nix")) + startDaemon(); + /* Run the builder. */ printMsg(lvlChatty, format("executing builder '%1%'") % drv->builder); @@ -2145,7 +2234,48 @@ void DerivationGoal::startBuilder() Path logFile = openLogFile(); /* Create a pipe to get the output of the builder. */ - builderOut.create(); + //builderOut.create(); + + builderOut.readSide = posix_openpt(O_RDWR | O_NOCTTY); + if (!builderOut.readSide) + throw SysError("opening pseudoterminal master"); + + std::string slaveName(ptsname(builderOut.readSide.get())); + + if (buildUser) { + if (chmod(slaveName.c_str(), 0600)) + throw SysError("changing mode of pseudoterminal slave"); + + if (chown(slaveName.c_str(), buildUser->getUID(), 0)) + throw SysError("changing owner of pseudoterminal slave"); + } else { + if (grantpt(builderOut.readSide.get())) + throw SysError("granting access to pseudoterminal slave"); + } + + #if 0 + // Mount the pt in the sandbox so that the "tty" command works. + // FIXME: this doesn't work with the new devpts in the sandbox. + if (useChroot) + dirsInChroot[slaveName] = {slaveName, false}; + #endif + + if (unlockpt(builderOut.readSide.get())) + throw SysError("unlocking pseudoterminal"); + + builderOut.writeSide = open(slaveName.c_str(), O_RDWR | O_NOCTTY); + if (!builderOut.writeSide) + throw SysError("opening pseudoterminal slave"); + + // Put the pt into raw mode to prevent \n -> \r\n translation. + struct termios term; + if (tcgetattr(builderOut.writeSide.get(), &term)) + throw SysError("getting pseudoterminal attributes"); + + cfmakeraw(&term); + + if (tcsetattr(builderOut.writeSide.get(), TCSANOW, &term)) + throw SysError("putting pseudoterminal into raw mode"); result.startTime = time(0); @@ -2218,17 +2348,37 @@ void DerivationGoal::startBuilder() flags |= CLONE_NEWNET; pid_t child = clone(childEntry, stack + stackSize, flags, this); - if (child == -1 && errno == EINVAL) + if (child == -1 && errno == EINVAL) { /* Fallback for Linux < 2.13 where CLONE_NEWPID and CLONE_PARENT are not allowed together. */ - child = clone(childEntry, stack + stackSize, flags & ~CLONE_NEWPID, this); + flags &= ~CLONE_NEWPID; + child = clone(childEntry, stack + stackSize, flags, this); + } + if (child == -1 && (errno == EPERM || errno == EINVAL)) { + /* Some distros patch Linux to not allow unprivileged + * user namespaces. If we get EPERM or EINVAL, try + * without CLONE_NEWUSER and see if that works. + */ + flags &= ~CLONE_NEWUSER; + child = clone(childEntry, stack + stackSize, flags, this); + } + /* Otherwise exit with EPERM so we can handle this in the + parent. This is only done when sandbox-fallback is set + to true (the default). */ + if (child == -1 && (errno == EPERM || errno == EINVAL) && settings.sandboxFallback) + _exit(1); if (child == -1) throw SysError("cloning builder process"); writeFull(builderOut.writeSide.get(), std::to_string(child) + "\n"); _exit(0); }, options); - if (helper.wait() != 0) + int res = helper.wait(); + if (res != 0 && settings.sandboxFallback) { + useChroot = false; + initTmpDir(); + goto fallback; + } else if (res != 0) throw Error("unable to start build process"); userNamespaceSync.readSide = -1; @@ -2251,14 +2401,20 @@ void DerivationGoal::startBuilder() writeFile("/proc/" + std::to_string(pid) + "/gid_map", (format("%d %d 1") % sandboxGid % hostGid).str()); - /* Signal the builder that we've updated its user - namespace. */ + /* Save the mount namespace of the child. We have to do this + *before* the child does a chroot. */ + sandboxMountNamespace = open(fmt("/proc/%d/ns/mnt", (pid_t) pid).c_str(), O_RDONLY); + if (sandboxMountNamespace.get() == -1) + throw SysError("getting sandbox mount namespace"); + + /* Signal the builder that we've updated its user namespace. */ writeFull(userNamespaceSync.writeSide.get(), "1"); userNamespaceSync.writeSide = -1; } else #endif { + fallback: options.allowVfork = !buildUser && !drv->isBuiltin(); pid = startProcess([&]() { runChild(); @@ -2282,6 +2438,55 @@ void DerivationGoal::startBuilder() } +void DerivationGoal::initTmpDir() { + /* In a sandbox, for determinism, always use the same temporary + directory. */ +#if __linux__ + tmpDirInSandbox = useChroot ? settings.sandboxBuildDir : tmpDir; +#else + tmpDirInSandbox = tmpDir; +#endif + + /* In non-structured mode, add all bindings specified in the + derivation via the environment, except those listed in the + passAsFile attribute. Those are passed as file names pointing + to temporary files containing the contents. Note that + passAsFile is ignored in structure mode because it's not + needed (attributes are not passed through the environment, so + there is no size constraint). */ + if (!parsedDrv->getStructuredAttrs()) { + + StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile").value_or("")); + for (auto & i : drv->env) { + if (passAsFile.find(i.first) == passAsFile.end()) { + env[i.first] = i.second; + } else { + auto hash = hashString(htSHA256, i.first); + string fn = ".attr-" + hash.to_string(Base32, false); + Path p = tmpDir + "/" + fn; + writeFile(p, i.second); + chownToBuilder(p); + env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; + } + } + + } + + /* For convenience, set an environment pointing to the top build + directory. */ + env["NIX_BUILD_TOP"] = tmpDirInSandbox; + + /* Also set TMPDIR and variants to point to this directory. */ + env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDirInSandbox; + + /* Explicitly set PWD to prevent problems with chroot builds. In + particular, dietlibc cannot figure out the cwd because the + inode of the current directory doesn't appear in .. (because + getdents returns the inode of the mount point). */ + env["PWD"] = tmpDirInSandbox; +} + + void DerivationGoal::initEnv() { env.clear(); @@ -2308,43 +2513,7 @@ void DerivationGoal::initEnv() /* The maximum number of cores to utilize for parallel building. */ env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str(); - /* In non-structured mode, add all bindings specified in the - derivation via the environment, except those listed in the - passAsFile attribute. Those are passed as file names pointing - to temporary files containing the contents. Note that - passAsFile is ignored in structure mode because it's not - needed (attributes are not passed through the environment, so - there is no size constraint). */ - if (!parsedDrv->getStructuredAttrs()) { - - StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); - int fileNr = 0; - for (auto & i : drv->env) { - if (passAsFile.find(i.first) == passAsFile.end()) { - env[i.first] = i.second; - } else { - string fn = ".attr-" + std::to_string(fileNr++); - Path p = tmpDir + "/" + fn; - writeFile(p, i.second); - chownToBuilder(p); - env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; - } - } - - } - - /* For convenience, set an environment pointing to the top build - directory. */ - env["NIX_BUILD_TOP"] = tmpDirInSandbox; - - /* Also set TMPDIR and variants to point to this directory. */ - env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDirInSandbox; - - /* Explicitly set PWD to prevent problems with chroot builds. In - particular, dietlibc cannot figure out the cwd because the - inode of the current directory doesn't appear in .. (because - getdents returns the inode of the mount point). */ - env["PWD"] = tmpDirInSandbox; + initTmpDir(); /* Compatibility hack with Nix <= 0.7: if this is a fixed-output derivation, tell the builder, so that for instance `fetchurl' @@ -2363,13 +2532,16 @@ void DerivationGoal::initEnv() already know the cryptographic hash of the output). */ if (fixedOutput) { for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) - env[i] = getEnv(i); + env[i] = getEnv(i).value_or(""); } /* Currently structured log messages piggyback on stderr, but we may change that in the future. So tell the builder which file descriptor to use for that. */ env["NIX_LOG_FD"] = "2"; + + /* Trigger colored output in various tools. */ + env["TERM"] = "xterm-256color"; } @@ -2386,7 +2558,7 @@ void DerivationGoal::writeStructuredAttrs() /* Add an "outputs" object containing the output paths. */ nlohmann::json outputs; for (auto & i : drv->outputs) - outputs[i.first] = rewriteStrings(i.second.path, inputRewrites); + outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path), inputRewrites); json["outputs"] = outputs; /* Handle exportReferencesGraph. */ @@ -2396,9 +2568,9 @@ void DerivationGoal::writeStructuredAttrs() std::ostringstream str; { JSONPlaceholder jsonRoot(str, true); - PathSet storePaths; + StorePathSet storePaths; for (auto & p : *i) - storePaths.insert(p.get()); + storePaths.insert(worker.store.parseStorePath(p.get())); worker.store.pathInfoToJSON(jsonRoot, exportReferences(storePaths), false, true); } @@ -2407,6 +2579,7 @@ void DerivationGoal::writeStructuredAttrs() } writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites)); + chownToBuilder(tmpDir + "/.attrs.json"); /* As a convenience to bash scripts, write a shell file that maps all attributes that are representable in bash - @@ -2475,6 +2648,317 @@ void DerivationGoal::writeStructuredAttrs() } writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites)); + chownToBuilder(tmpDir + "/.attrs.sh"); +} + + +/* A wrapper around LocalStore that only allows building/querying of + paths that are in the input closures of the build or were added via + recursive Nix calls. */ +struct RestrictedStore : public LocalFSStore +{ + ref next; + + DerivationGoal & goal; + + RestrictedStore(const Params & params, ref next, DerivationGoal & goal) + : Store(params), LocalFSStore(params), next(next), goal(goal) + { } + + Path getRealStoreDir() override + { return next->realStoreDir; } + + std::string getUri() override + { return next->getUri(); } + + StorePathSet queryAllValidPaths() override + { + StorePathSet paths; + for (auto & p : goal.inputPaths) paths.insert(p.clone()); + for (auto & p : goal.addedPaths) paths.insert(p.clone()); + return paths; + } + + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override + { + if (goal.isAllowed(path)) { + try { + /* Censor impure information. */ + auto info = std::make_shared(*next->queryPathInfo(path)); + info->deriver.reset(); + info->registrationTime = 0; + info->ultimate = false; + info->sigs.clear(); + callback(info); + } catch (InvalidPath &) { + callback(nullptr); + } + } else + callback(nullptr); + }; + + void queryReferrers(const StorePath & path, StorePathSet & referrers) override + { } + + StorePathSet queryDerivationOutputs(const StorePath & path) override + { throw Error("queryDerivationOutputs"); } + + StringSet queryDerivationOutputNames(const StorePath & path) override + { throw Error("queryDerivationOutputNames"); } + + std::optional queryPathFromHashPart(const std::string & hashPart) override + { throw Error("queryPathFromHashPart"); } + + StorePath addToStore(const string & name, const Path & srcPath, + bool recursive = true, HashType hashAlgo = htSHA256, + PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override + { throw Error("addToStore"); } + + void addToStore(const ValidPathInfo & info, Source & narSource, + RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, + std::shared_ptr accessor = 0) override + { + next->addToStore(info, narSource, repair, checkSigs, accessor); + goal.addDependency(info.path); + } + + StorePath addToStoreFromDump(const string & dump, const string & name, + bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override + { + auto path = next->addToStoreFromDump(dump, name, recursive, hashAlgo, repair); + goal.addDependency(path); + return path; + } + + StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair = NoRepair) override + { + auto path = next->addTextToStore(name, s, references, repair); + goal.addDependency(path); + return path; + } + + void narFromPath(const StorePath & path, Sink & sink) override + { + if (!goal.isAllowed(path)) + throw InvalidPath("cannot dump unknown path '%s' in recursive Nix", printStorePath(path)); + LocalFSStore::narFromPath(path, sink); + } + + void ensurePath(const StorePath & path) override + { + if (!goal.isAllowed(path)) + throw InvalidPath("cannot substitute unknown path '%s' in recursive Nix", printStorePath(path)); + /* Nothing to be done; 'path' must already be valid. */ + } + + void buildPaths(const std::vector & paths, BuildMode buildMode) override + { + if (buildMode != bmNormal) throw Error("unsupported build mode"); + + StorePathSet newPaths; + + for (auto & path : paths) { + if (path.path.isDerivation()) { + if (!goal.isAllowed(path.path)) + throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); + auto drv = derivationFromPath(path.path); + for (auto & output : drv.outputs) + if (wantOutput(output.first, path.outputs)) + newPaths.insert(output.second.path.clone()); + } else if (!goal.isAllowed(path.path)) + throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); + } + + next->buildPaths(paths, buildMode); + + StorePathSet closure; + next->computeFSClosure(newPaths, closure); + for (auto & path : closure) + goal.addDependency(path); + } + + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode = bmNormal) override + { unsupported("buildDerivation"); } + + void addTempRoot(const StorePath & path) override + { } + + void addIndirectRoot(const Path & path) override + { } + + Roots findRoots(bool censor) override + { return Roots(); } + + void collectGarbage(const GCOptions & options, GCResults & results) override + { } + + void addSignatures(const StorePath & storePath, const StringSet & sigs) override + { unsupported("addSignatures"); } + + void queryMissing(const std::vector & targets, + StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, + unsigned long long & downloadSize, unsigned long long & narSize) override + { + /* This is slightly impure since it leaks information to the + client about what paths will be built/substituted or are + already present. Probably not a big deal. */ + + std::vector allowed; + for (auto & path : targets) { + if (goal.isAllowed(path.path)) + allowed.emplace_back(path); + else + unknown.insert(path.path.clone()); + } + + next->queryMissing(allowed, willBuild, willSubstitute, + unknown, downloadSize, narSize); + } +}; + + +void DerivationGoal::startDaemon() +{ + settings.requireExperimentalFeature("recursive-nix"); + + Store::Params params; + params["path-info-cache-size"] = "0"; + params["store"] = worker.store.storeDir; + params["root"] = worker.store.rootDir; + params["state"] = "/no-such-path"; + params["log"] = "/no-such-path"; + auto store = make_ref(params, + ref(std::dynamic_pointer_cast(worker.store.shared_from_this())), + *this); + + addedPaths.clear(); + + auto socketName = ".nix-socket"; + Path socketPath = tmpDir + "/" + socketName; + env["NIX_REMOTE"] = "unix://" + tmpDirInSandbox + "/" + socketName; + + daemonSocket = createUnixDomainSocket(socketPath, 0600); + + chownToBuilder(socketPath); + + daemonThread = std::thread([this, store]() { + + while (true) { + + /* Accept a connection. */ + struct sockaddr_un remoteAddr; + socklen_t remoteAddrLen = sizeof(remoteAddr); + + AutoCloseFD remote = accept(daemonSocket.get(), + (struct sockaddr *) &remoteAddr, &remoteAddrLen); + if (!remote) { + if (errno == EINTR) continue; + if (errno == EINVAL) break; + throw SysError("accepting connection"); + } + + closeOnExec(remote.get()); + + debug("received daemon connection"); + + auto workerThread = std::thread([store, remote{std::move(remote)}]() { + FdSource from(remote.get()); + FdSink to(remote.get()); + try { + daemon::processConnection(store, from, to, + daemon::NotTrusted, daemon::Recursive, "nobody", 65535); + debug("terminated daemon connection"); + } catch (SysError &) { + ignoreException(); + } + }); + + daemonWorkerThreads.push_back(std::move(workerThread)); + } + + debug("daemon shutting down"); + }); +} + + +void DerivationGoal::stopDaemon() +{ + if (daemonSocket && shutdown(daemonSocket.get(), SHUT_RDWR) == -1) + throw SysError("shutting down daemon socket"); + + if (daemonThread.joinable()) + daemonThread.join(); + + // FIXME: should prune worker threads more quickly. + // FIXME: shutdown the client socket to speed up worker termination. + for (auto & thread : daemonWorkerThreads) + thread.join(); + daemonWorkerThreads.clear(); + + daemonSocket = -1; +} + + +void DerivationGoal::addDependency(const StorePath & path) +{ + if (isAllowed(path)) return; + + addedPaths.insert(path.clone()); + + /* If we're doing a sandbox build, then we have to make the path + appear in the sandbox. */ + if (useChroot) { + + debug("materialising '%s' in the sandbox", worker.store.printStorePath(path)); + + #if __linux__ + + Path source = worker.store.toRealPath(worker.store.printStorePath(path)); + Path target = chrootRootDir + worker.store.printStorePath(path); + debug("bind-mounting %s -> %s", target, source); + + if (pathExists(target)) + throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path)); + + struct stat st; + if (lstat(source.c_str(), &st)) + throw SysError("getting attributes of path '%s'", source); + + if (S_ISDIR(st.st_mode)) { + + /* Bind-mount the path into the sandbox. This requires + entering its mount namespace, which is not possible + in multithreaded programs. So we do this in a + child process.*/ + Pid child(startProcess([&]() { + + if (setns(sandboxMountNamespace.get(), 0) == -1) + throw SysError("entering sandbox mount namespace"); + + createDirs(target); + + if (mount(source.c_str(), target.c_str(), "", MS_BIND, 0) == -1) + throw SysError("bind mount from '%s' to '%s' failed", source, target); + + _exit(0); + })); + + int status = child.wait(); + if (status != 0) + throw Error("could not add path '%s' to sandbox", worker.store.printStorePath(path)); + + } else + linkOrCopy(source, target); + + #else + throw Error("don't know how to make path '%s' (produced by a recursive Nix call) appear in the sandbox", + worker.store.printStorePath(path)); + #endif + + } } @@ -2500,17 +2984,17 @@ void setupSeccomp() seccomp_release(ctx); }); - if (settings.thisSystem == "x86_64-linux" && + if (nativeSystem == "x86_64-linux" && seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) throw SysError("unable to add 32-bit seccomp architecture"); - if (settings.thisSystem == "x86_64-linux" && + if (nativeSystem == "x86_64-linux" && seccomp_arch_add(ctx, SCMP_ARCH_X32) != 0) throw SysError("unable to add X32 seccomp architecture"); - if (settings.thisSystem == "aarch64-linux" && + if (nativeSystem == "aarch64-linux" && seccomp_arch_add(ctx, SCMP_ARCH_ARM) != 0) - printError("unsable to add ARM seccomp architecture; this may result in spurious build failures if running 32-bit ARM processes."); + printError("unable to add ARM seccomp architecture; this may result in spurious build failures if running 32-bit ARM processes"); /* Prevent builders from creating setuid/setgid binaries. */ for (int perm : { S_ISUID, S_ISGID }) { @@ -2613,15 +3097,30 @@ void DerivationGoal::runChild() outside of the namespace. Making a subtree private is local to the namespace, though, so setting MS_PRIVATE does not affect the outside world. */ - if (mount(0, "/", 0, MS_REC|MS_PRIVATE, 0) == -1) { - throw SysError("unable to make '/' private mount"); - } + if (mount(0, "/", 0, MS_PRIVATE | MS_REC, 0) == -1) + throw SysError("unable to make '/' private"); /* Bind-mount chroot directory to itself, to treat it as a different filesystem from /, as needed for pivot_root. */ if (mount(chrootRootDir.c_str(), chrootRootDir.c_str(), 0, MS_BIND, 0) == -1) throw SysError(format("unable to bind mount '%1%'") % chrootRootDir); + /* Bind-mount the sandbox's Nix store onto itself so that + we can mark it as a "shared" subtree, allowing bind + mounts made in *this* mount namespace to be propagated + into the child namespace created by the + unshare(CLONE_NEWNS) call below. + + Marking chrootRootDir as MS_SHARED causes pivot_root() + to fail with EINVAL. Don't know why. */ + Path chrootStoreDir = chrootRootDir + worker.store.storeDir; + + if (mount(chrootStoreDir.c_str(), chrootStoreDir.c_str(), 0, MS_BIND, 0) == -1) + throw SysError("unable to bind mount the Nix store", chrootStoreDir); + + if (mount(0, chrootStoreDir.c_str(), 0, MS_SHARED, 0) == -1) + throw SysError("unable to make '%s' shared", chrootStoreDir); + /* Set up a nearly empty /dev, unless the user asked to bind-mount the host /dev. */ Strings ss; @@ -2647,7 +3146,13 @@ void DerivationGoal::runChild() on. */ if (fixedOutput) { ss.push_back("/etc/resolv.conf"); - ss.push_back("/etc/nsswitch.conf"); + + // Only use nss functions to resolve hosts and + // services. Don’t use it for anything else that may + // be configured for this system. This limits the + // potential impurities introduced in fixed outputs. + writeFile(chrootRootDir + "/etc/nsswitch.conf", "hosts: files dns\nservices: files\n"); + ss.push_back("/etc/services"); ss.push_back("/etc/hosts"); if (pathExists("/var/run/nscd/socket")) @@ -2717,6 +3222,19 @@ void DerivationGoal::runChild() } } + /* Unshare this mount namespace. This is necessary because + pivot_root() below changes the root of the mount + namespace. This means that the call to setns() in + addDependency() would hide the host's filesystem, + making it impossible to bind-mount paths from the host + Nix store into the sandbox. Therefore, we save the + pre-pivot_root namespace in + sandboxMountNamespace. Since we made /nix/store a + shared subtree above, this allows addDependency() to + make paths appear in the sandbox. */ + if (unshare(CLONE_NEWNS) == -1) + throw SysError("unsharing mount namespace"); + /* Do the chroot(). */ if (chdir(chrootRootDir.c_str()) == -1) throw SysError(format("cannot change directory to '%1%'") % chrootRootDir); @@ -2824,7 +3342,7 @@ void DerivationGoal::runChild() ; } #if __APPLE__ - else if (getEnv("_NIX_TEST_NO_SANDBOX") == "") { + else { /* This has to appear before import statements. */ std::string sandboxProfile = "(version 1)\n"; @@ -2853,8 +3371,10 @@ void DerivationGoal::runChild() } /* Add all our input paths to the chroot */ - for (auto & i : inputPaths) - dirsInChroot[i] = i; + for (auto & i : inputPaths) { + auto p = worker.store.printStorePath(i); + dirsInChroot[p] = p; + } /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ if (settings.darwinLogSandboxViolations) { @@ -2870,9 +3390,13 @@ void DerivationGoal::runChild() /* Our rwx outputs */ sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & i : missingPaths) { - sandboxProfile += (format("\t(subpath \"%1%\")\n") % i.c_str()).str(); - } + for (auto & i : missingPaths) + sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(i)); + + /* Also add redirected outputs to the chroot */ + for (auto & i : redirectedOutputs) + sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(i.second)); + sandboxProfile += ")\n"; /* Our inputs (transitive dependencies and any impurities computed above) @@ -2891,19 +3415,19 @@ void DerivationGoal::runChild() if (lstat(path.c_str(), &st)) { if (i.second.optional && errno == ENOENT) continue; - throw SysError(format("getting attributes of path '%1%'") % path); + throw SysError("getting attributes of path '%s", path); } if (S_ISDIR(st.st_mode)) - sandboxProfile += (format("\t(subpath \"%1%\")\n") % path).str(); + sandboxProfile += fmt("\t(subpath \"%s\")\n", path); else - sandboxProfile += (format("\t(literal \"%1%\")\n") % path).str(); + sandboxProfile += fmt("\t(literal \"%s\")\n", path); } sandboxProfile += ")\n"; /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ sandboxProfile += "(allow file-read*\n"; for (auto & i : ancestry) { - sandboxProfile += (format("\t(literal \"%1%\")\n") % i.c_str()).str(); + sandboxProfile += fmt("\t(literal \"%s\")\n", i); } sandboxProfile += ")\n"; @@ -2922,31 +3446,36 @@ void DerivationGoal::runChild() /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ - Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); + Path globalTmpDir = canonPath(getEnv("TMPDIR").value_or("/tmp"), true); /* They don't like trailing slashes on subpath directives */ if (globalTmpDir.back() == '/') globalTmpDir.pop_back(); - builder = "/usr/bin/sandbox-exec"; - args.push_back("sandbox-exec"); - args.push_back("-f"); - args.push_back(sandboxFile); - args.push_back("-D"); - args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); - args.push_back("-D"); - args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/"); - if (allowLocalNetworking) { + if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { + builder = "/usr/bin/sandbox-exec"; + args.push_back("sandbox-exec"); + args.push_back("-f"); + args.push_back(sandboxFile); args.push_back("-D"); - args.push_back(string("_ALLOW_LOCAL_NETWORKING=1")); + args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); + args.push_back("-D"); + args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/"); + if (allowLocalNetworking) { + args.push_back("-D"); + args.push_back(string("_ALLOW_LOCAL_NETWORKING=1")); + } + args.push_back(drv->builder); + } else { + builder = drv->builder.c_str(); + args.push_back(std::string(baseNameOf(drv->builder))); } - args.push_back(drv->builder); } -#endif +#else else { builder = drv->builder.c_str(); - string builderBasename = baseNameOf(drv->builder); - args.push_back(builderBasename); + args.push_back(std::string(baseNameOf(drv->builder))); } +#endif for (auto & i : drv->args) args.push_back(rewriteStrings(i, inputRewrites)); @@ -2959,7 +3488,7 @@ void DerivationGoal::runChild() try { logger = makeJSONLogger(*logger); - BasicDerivation drv2(*drv); + BasicDerivation & drv2(*drv); for (auto & e : drv2.env) e.second = rewriteStrings(e.second, inputRewrites); @@ -2967,6 +3496,8 @@ void DerivationGoal::runChild() builtinFetchurl(drv2, netrcData); else if (drv->builder == "builtin:buildenv") builtinBuildenv(drv2); + else if (drv->builder == "builtin:unpack-channel") + builtinUnpackChannel(drv2); else throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8)); _exit(0); @@ -2990,16 +3521,15 @@ void DerivationGoal::runChild() /* Parse a list of reference specifiers. Each element must either be a store path, or the symbolic name of the output of the derivation (such as `out'). */ -PathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, const Strings & paths) +StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, const Strings & paths) { - PathSet result; + StorePathSet result; for (auto & i : paths) { if (store.isStorePath(i)) - result.insert(i); - else if (drv.outputs.find(i) != drv.outputs.end()) - result.insert(drv.outputs.find(i)->second.path); - else throw BuildError( - format("derivation contains an illegal reference specifier '%1%'") % i); + result.insert(store.parseStorePath(i)); + else if (drv.outputs.count(i)) + result.insert(drv.outputs.find(i)->second.path.clone()); + else throw BuildError("derivation contains an illegal reference specifier '%s'", i); } return result; } @@ -3025,22 +3555,38 @@ void DerivationGoal::registerOutputs() InodesSeen inodesSeen; Path checkSuffix = ".check"; - bool runDiffHook = settings.runDiffHook; - bool keepPreviousRound = settings.keepFailed || runDiffHook; + bool keepPreviousRound = settings.keepFailed || settings.runDiffHook; std::exception_ptr delayedException; + /* The paths that can be referenced are the input closures, the + output paths, and any paths that have been built via recursive + Nix calls. */ + StorePathSet referenceablePaths; + for (auto & p : inputPaths) referenceablePaths.insert(p.clone()); + for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path.clone()); + for (auto & p : addedPaths) referenceablePaths.insert(p.clone()); + /* Check whether the output paths were created, and grep each output path to determine what other paths it references. Also make all output paths read-only. */ for (auto & i : drv->outputs) { - Path path = i.second.path; - if (missingPaths.find(path) == missingPaths.end()) continue; - - ValidPathInfo info; + auto path = worker.store.printStorePath(i.second.path); + if (!missingPaths.count(i.second.path)) continue; Path actualPath = path; - if (useChroot) { + if (needsHashRewrite()) { + auto r = redirectedOutputs.find(i.second.path); + if (r != redirectedOutputs.end()) { + auto redirected = worker.store.toRealPath(worker.store.printStorePath(r->second)); + if (buildMode == bmRepair + && redirectedBadOutputs.count(i.second.path) + && pathExists(redirected)) + replaceValidPath(path, redirected); + if (buildMode == bmCheck) + actualPath = redirected; + } + } else if (useChroot) { actualPath = chrootRootDir + path; if (pathExists(actualPath)) { /* Move output paths from the chroot to the Nix store. */ @@ -3051,23 +3597,15 @@ void DerivationGoal::registerOutputs() throw SysError(format("moving build output '%1%' from the sandbox to the Nix store") % path); } if (buildMode != bmCheck) actualPath = worker.store.toRealPath(path); - } else { - Path redirected = redirectedOutputs[path]; - if (buildMode == bmRepair - && redirectedBadOutputs.find(path) != redirectedBadOutputs.end() - && pathExists(redirected)) - replaceValidPath(path, redirected); - if (buildMode == bmCheck && redirected != "") - actualPath = redirected; } struct stat st; if (lstat(actualPath.c_str(), &st) == -1) { if (errno == ENOENT) throw BuildError( - format("builder for '%1%' failed to produce output path '%2%'") - % drvPath % path); - throw SysError(format("getting attributes of path '%1%'") % actualPath); + "builder for '%s' failed to produce output path '%s'", + worker.store.printStorePath(drvPath), path); + throw SysError("getting attributes of path '%s'", actualPath); } #ifndef __CYGWIN__ @@ -3104,13 +3642,14 @@ void DerivationGoal::registerOutputs() /* Check that fixed-output derivations produced the right outputs (i.e., the content hash should match the specified hash). */ + std::string ca; + if (fixedOutput) { auto [recursive, h] = i.second.parseHashInfo(); if (!recursive) { - /* The output path should be a regular file without - execute permission. */ + /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0) throw BuildError( format("output path '%1%' should be a non-executable regular file") % path); @@ -3120,17 +3659,18 @@ void DerivationGoal::registerOutputs() the derivation to its content-addressed location. */ Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath); - Path dest = worker.store.makeFixedOutputPath(recursive, h2, storePathToName(path)); + auto dest = worker.store.makeFixedOutputPath(recursive, h2, i.second.path.name()); if (h != h2) { /* Throw an error after registering the path as valid. */ + worker.hashMismatch = true; delayedException = std::make_exception_ptr( BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", - dest, h.to_string(), h2.to_string())); + worker.store.printStorePath(dest), h.to_string(SRI), h2.to_string(SRI))); - Path actualDest = worker.store.toRealPath(dest); + Path actualDest = worker.store.toRealPath(worker.store.printStorePath(dest)); if (worker.store.isValidPath(dest)) std::rethrow_exception(delayedException); @@ -3139,16 +3679,16 @@ void DerivationGoal::registerOutputs() PathLocks outputLocks({actualDest}); deletePath(actualDest); if (rename(actualPath.c_str(), actualDest.c_str()) == -1) - throw SysError(format("moving '%1%' to '%2%'") % actualPath % dest); + throw SysError("moving '%s' to '%s'", actualPath, worker.store.printStorePath(dest)); } - path = dest; + path = worker.store.printStorePath(dest); actualPath = actualDest; } else - assert(path == dest); + assert(worker.store.parseStorePath(path) == dest); - info.ca = makeFixedOutputCA(recursive, h2); + ca = makeFixedOutputCA(recursive, h2); } /* Get rid of all weird permissions. This also checks that @@ -3162,61 +3702,69 @@ void DerivationGoal::registerOutputs() verify later on whether nobody has messed with the store. */ debug("scanning for references inside '%1%'", path); HashResult hash; - PathSet references = scanForReferences(actualPath, allPaths, hash); + auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash)); if (buildMode == bmCheck) { - if (!worker.store.isValidPath(path)) continue; - auto info = *worker.store.queryPathInfo(path); + if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue; + ValidPathInfo info(*worker.store.queryPathInfo(worker.store.parseStorePath(path))); if (hash.first != info.narHash) { - if (settings.keepFailed) { + worker.checkMismatch = true; + if (settings.runDiffHook || settings.keepFailed) { Path dst = worker.store.toRealPath(path + checkSuffix); deletePath(dst); if (rename(actualPath.c_str(), dst.c_str())) throw SysError(format("renaming '%1%' to '%2%'") % actualPath % dst); - throw Error(format("derivation '%1%' may not be deterministic: output '%2%' differs from '%3%'") - % drvPath % path % dst); + + handleDiffHook( + buildUser ? buildUser->getUID() : getuid(), + buildUser ? buildUser->getGID() : getgid(), + path, dst, worker.store.printStorePath(drvPath), tmpDir); + + throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'", + worker.store.printStorePath(drvPath), path, dst); } else - throw Error(format("derivation '%1%' may not be deterministic: output '%2%' differs") - % drvPath % path); + throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs", + worker.store.printStorePath(drvPath), path); } - /* Since we verified the build, it's now ultimately - trusted. */ + /* Since we verified the build, it's now ultimately trusted. */ if (!info.ultimate) { info.ultimate = true; worker.store.signPathInfo(info); - worker.store.registerValidPaths({info}); + ValidPathInfos infos; + infos.push_back(std::move(info)); + worker.store.registerValidPaths(infos); } continue; } - /* For debugging, print out the referenced and unreferenced - paths. */ + /* For debugging, print out the referenced and unreferenced paths. */ for (auto & i : inputPaths) { - PathSet::iterator j = references.find(i); + auto j = references.find(i); if (j == references.end()) - debug(format("unreferenced input: '%1%'") % i); + debug("unreferenced input: '%1%'", worker.store.printStorePath(i)); else - debug(format("referenced input: '%1%'") % i); + debug("referenced input: '%1%'", worker.store.printStorePath(i)); } if (curRound == nrRounds) { worker.store.optimisePath(actualPath); // FIXME: combine with scanForReferences() - worker.markContentsGood(path); + worker.markContentsGood(worker.store.parseStorePath(path)); } - info.path = path; + ValidPathInfo info(worker.store.parseStorePath(path)); info.narHash = hash.first; info.narSize = hash.second; - info.references = references; - info.deriver = drvPath; + info.references = std::move(references); + info.deriver = drvPath.clone(); info.ultimate = true; + info.ca = ca; worker.store.signPathInfo(info); if (!info.references.empty()) info.ca.clear(); - infos[i.first] = info; + infos.emplace(i.first, std::move(info)); } if (buildMode == bmCheck) return; @@ -3231,22 +3779,19 @@ void DerivationGoal::registerOutputs() for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j) if (!(*i == *j)) { result.isNonDeterministic = true; - Path prev = i->second.path + checkSuffix; + Path prev = worker.store.printStorePath(i->second.path) + checkSuffix; bool prevExists = keepPreviousRound && pathExists(prev); auto msg = prevExists - ? fmt("output '%1%' of '%2%' differs from '%3%' from previous round", i->second.path, drvPath, prev) - : fmt("output '%1%' of '%2%' differs from previous round", i->second.path, drvPath); + ? fmt("output '%s' of '%s' differs from '%s' from previous round", + worker.store.printStorePath(i->second.path), worker.store.printStorePath(drvPath), prev) + : fmt("output '%s' of '%s' differs from previous round", + worker.store.printStorePath(i->second.path), worker.store.printStorePath(drvPath)); - auto diffHook = settings.diffHook; - if (prevExists && diffHook != "" && runDiffHook) { - try { - auto diff = runProgram(diffHook, true, {prev, i->second.path}); - if (diff != "") - printError(chomp(diff)); - } catch (Error & error) { - printError("diff hook execution failed: %s", error.what()); - } - } + handleDiffHook( + buildUser ? buildUser->getUID() : getuid(), + buildUser ? buildUser->getGID() : getgid(), + prev, worker.store.printStorePath(i->second.path), + worker.store.printStorePath(drvPath), tmpDir); if (settings.enforceDeterminism) throw NotDeterministic(msg); @@ -3256,20 +3801,20 @@ void DerivationGoal::registerOutputs() } } - /* If this is the first round of several, then move the output out - of the way. */ + /* If this is the first round of several, then move the output out of the way. */ if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) { for (auto & i : drv->outputs) { - Path prev = i.second.path + checkSuffix; + auto path = worker.store.printStorePath(i.second.path); + Path prev = path + checkSuffix; deletePath(prev); - Path dst = i.second.path + checkSuffix; - if (rename(i.second.path.c_str(), dst.c_str())) - throw SysError(format("renaming '%1%' to '%2%'") % i.second.path % dst); + Path dst = path + checkSuffix; + if (rename(path.c_str(), dst.c_str())) + throw SysError("renaming '%s' to '%s'", path, dst); } } if (curRound < nrRounds) { - prevInfos = infos; + prevInfos = std::move(infos); return; } @@ -3277,7 +3822,7 @@ void DerivationGoal::registerOutputs() if the result was not determistic? */ if (curRound == nrRounds) { for (auto & i : drv->outputs) { - Path prev = i.second.path + checkSuffix; + Path prev = worker.store.printStorePath(i.second.path) + checkSuffix; deletePath(prev); } } @@ -3302,7 +3847,7 @@ void DerivationGoal::checkOutputs(const std::map & outputs) { std::map outputsByPath; for (auto & output : outputs) - outputsByPath.emplace(output.second.path, output.second); + outputsByPath.emplace(worker.store.printStorePath(output.second.path), output.second); for (auto & output : outputs) { auto & outputName = output.first; @@ -3318,76 +3863,77 @@ void DerivationGoal::checkOutputs(const std::map & outputs) /* Compute the closure and closure size of some output. This is slightly tricky because some of its references (namely other outputs) may not be valid yet. */ - auto getClosure = [&](const Path & path) + auto getClosure = [&](const StorePath & path) { uint64_t closureSize = 0; - PathSet pathsDone; - std::queue pathsLeft; - pathsLeft.push(path); + StorePathSet pathsDone; + std::queue pathsLeft; + pathsLeft.push(path.clone()); while (!pathsLeft.empty()) { - auto path = pathsLeft.front(); + auto path = pathsLeft.front().clone(); pathsLeft.pop(); - if (!pathsDone.insert(path).second) continue; + if (!pathsDone.insert(path.clone()).second) continue; - auto i = outputsByPath.find(path); + auto i = outputsByPath.find(worker.store.printStorePath(path)); if (i != outputsByPath.end()) { closureSize += i->second.narSize; for (auto & ref : i->second.references) - pathsLeft.push(ref); + pathsLeft.push(ref.clone()); } else { auto info = worker.store.queryPathInfo(path); closureSize += info->narSize; for (auto & ref : info->references) - pathsLeft.push(ref); + pathsLeft.push(ref.clone()); } } - return std::make_pair(pathsDone, closureSize); + return std::make_pair(std::move(pathsDone), closureSize); }; auto applyChecks = [&](const Checks & checks) { if (checks.maxSize && info.narSize > *checks.maxSize) throw BuildError("path '%s' is too large at %d bytes; limit is %d bytes", - info.path, info.narSize, *checks.maxSize); + worker.store.printStorePath(info.path), info.narSize, *checks.maxSize); if (checks.maxClosureSize) { uint64_t closureSize = getClosure(info.path).second; if (closureSize > *checks.maxClosureSize) throw BuildError("closure of path '%s' is too large at %d bytes; limit is %d bytes", - info.path, closureSize, *checks.maxClosureSize); + worker.store.printStorePath(info.path), closureSize, *checks.maxClosureSize); } auto checkRefs = [&](const std::optional & value, bool allowed, bool recursive) { if (!value) return; - PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); + auto spec = parseReferenceSpecifiers(worker.store, *drv, *value); - PathSet used = recursive ? getClosure(info.path).first : info.references; + auto used = recursive ? cloneStorePathSet(getClosure(info.path).first) : cloneStorePathSet(info.references); if (recursive && checks.ignoreSelfRefs) used.erase(info.path); - PathSet badPaths; + StorePathSet badPaths; for (auto & i : used) if (allowed) { if (!spec.count(i)) - badPaths.insert(i); + badPaths.insert(i.clone()); } else { if (spec.count(i)) - badPaths.insert(i); + badPaths.insert(i.clone()); } if (!badPaths.empty()) { string badPathsStr; for (auto & i : badPaths) { badPathsStr += "\n "; - badPathsStr += i; + badPathsStr += worker.store.printStorePath(i); } - throw BuildError("output '%s' is not allowed to refer to the following paths:%s", info.path, badPathsStr); + throw BuildError("output '%s' is not allowed to refer to the following paths:%s", + worker.store.printStorePath(info.path), badPathsStr); } }; @@ -3419,7 +3965,7 @@ void DerivationGoal::checkOutputs(const std::map & outputs) Strings res; for (auto j = i->begin(); j != i->end(); ++j) { if (!j->is_string()) - throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, worker.store.printStorePath(drvPath)); res.push_back(j->get()); } checks.disallowedRequisites = res; @@ -3456,7 +4002,7 @@ Path DerivationGoal::openLogFile() if (!settings.keepLog) return ""; - string baseName = baseNameOf(drvPath); + auto baseName = std::string(baseNameOf(worker.store.printStorePath(drvPath))); /* Create a log file. */ Path dir = fmt("%s/%s/%s/", worker.store.logDir, worker.store.drvsLogDir, string(baseName, 0, 2)); @@ -3495,9 +4041,7 @@ void DerivationGoal::deleteTmpDir(bool force) /* Don't keep temporary directories for builtins because they might have privileged stuff (like a copy of netrc). */ if (settings.keepFailed && !force && !drv->isBuiltin()) { - printError( - format("note: keeping build directory '%2%'") - % drvPath % tmpDir); + printError("note: keeping build directory '%s'", tmpDir); chmod(tmpDir.c_str(), 0755); } else @@ -3576,31 +4120,31 @@ void DerivationGoal::flushLine() } -PathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash) +StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash) { - PathSet result; + StorePathSet result; for (auto & i : drv->outputs) { if (!wantOutput(i.first, wantedOutputs)) continue; bool good = worker.store.isValidPath(i.second.path) && (!checkHash || worker.pathContentsGood(i.second.path)); - if (good == returnValid) result.insert(i.second.path); + if (good == returnValid) result.insert(i.second.path.clone()); } return result; } -Path DerivationGoal::addHashRewrite(const Path & path) +void DerivationGoal::addHashRewrite(const StorePath & path) { - string h1 = string(path, worker.store.storeDir.size() + 1, 32); - string h2 = string(hashString(htSHA256, "rewrite:" + drvPath + ":" + path).to_string(Base32, false), 0, 32); - Path p = worker.store.storeDir + "/" + h2 + string(path, worker.store.storeDir.size() + 33); - deletePath(p); - assert(path.size() == p.size()); + auto h1 = std::string(((std::string_view) path.to_string()).substr(0, 32)); + auto p = worker.store.makeStorePath( + "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()), + Hash(htSHA256), path.name()); + auto h2 = std::string(((std::string_view) p.to_string()).substr(0, 32)); + deletePath(worker.store.printStorePath(p)); inputRewrites[h1] = h2; outputRewrites[h2] = h1; - redirectedOutputs[path] = p; - return p; + redirectedOutputs.insert_or_assign(path.clone(), std::move(p)); } @@ -3638,7 +4182,7 @@ class SubstitutionGoal : public Goal private: /* The store path that should be realised through a substitute. */ - Path storePath; + StorePath storePath; /* The remaining substituters. */ std::list> subs; @@ -3674,7 +4218,7 @@ private: GoalState state; public: - SubstitutionGoal(const Path & storePath, Worker & worker, RepairFlag repair = NoRepair); + SubstitutionGoal(StorePath && storePath, Worker & worker, RepairFlag repair = NoRepair); ~SubstitutionGoal(); void timedOut() override { abort(); }; @@ -3683,7 +4227,7 @@ public: { /* "a$" ensures substitution goals happen before derivation goals. */ - return "a$" + storePathToName(storePath) + "$" + storePath; + return "a$" + std::string(storePath.name()) + "$" + worker.store.printStorePath(storePath); } void work() override; @@ -3700,7 +4244,7 @@ public: void handleChildOutput(int fd, const string & data) override; void handleEOF(int fd) override; - Path getStorePath() { return storePath; } + StorePath getStorePath() { return storePath.clone(); } void amDone(ExitCode result) override { @@ -3709,13 +4253,13 @@ public: }; -SubstitutionGoal::SubstitutionGoal(const Path & storePath, Worker & worker, RepairFlag repair) +SubstitutionGoal::SubstitutionGoal(StorePath && storePath, Worker & worker, RepairFlag repair) : Goal(worker) + , storePath(std::move(storePath)) , repair(repair) { - this->storePath = storePath; state = &SubstitutionGoal::init; - name = (format("substitution of '%1%'") % storePath).str(); + name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath)); trace("created"); maintainExpectedSubstitutions = std::make_unique>(worker.expectedSubstitutions); } @@ -3754,7 +4298,7 @@ void SubstitutionGoal::init() } if (settings.readOnlyMode) - throw Error(format("cannot substitute path '%1%' - no write access to the Nix store") % storePath); + throw Error("cannot substitute path '%s' - no write access to the Nix store", worker.store.printStorePath(storePath)); subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list>(); @@ -3769,7 +4313,7 @@ void SubstitutionGoal::tryNext() if (subs.size() == 0) { /* None left. Terminate this goal and let someone else deal with it. */ - debug(format("path '%1%' is required, but there is no substituter that can build it") % storePath); + debug("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)); /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a @@ -3833,7 +4377,7 @@ void SubstitutionGoal::tryNext() && !info->checkSignatures(worker.store, worker.store.getPublicKeys())) { printError("warning: substituter '%s' does not have a valid signature for path '%s'", - sub->getUri(), storePath); + sub->getUri(), worker.store.printStorePath(storePath)); tryNext(); return; } @@ -3856,7 +4400,7 @@ void SubstitutionGoal::referencesValid() trace("all references realised"); if (nrFailed > 0) { - debug(format("some references of path '%1%' could not be realised") % storePath); + debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)); amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed); return; } @@ -3883,17 +4427,6 @@ void SubstitutionGoal::tryToRun() return; } - /* If the store path is already locked (probably by a - DerivationGoal), then put this goal to sleep. Note: we don't - acquire a lock here since that breaks addToStore(), so below we - handle an AlreadyLocked exception from addToStore(). The check - here is just an optimisation to prevent having to redo a - download due to a locked path. */ - if (pathIsLockedByMe(worker.store.toRealPath(storePath))) { - worker.waitForAWhile(shared_from_this()); - return; - } - maintainRunningSubstitutions = std::make_unique>(worker.runningSubstitutions); worker.updateProgress(); @@ -3906,7 +4439,7 @@ void SubstitutionGoal::tryToRun() /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide = -1; }); - Activity act(*logger, actSubstitute, Logger::Fields{storePath, sub->getUri()}); + Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()}); PushActivity pact(act.id); copyStorePath(ref(sub), ref(worker.store.shared_from_this()), @@ -3933,12 +4466,6 @@ void SubstitutionGoal::finished() try { promise.get_future().get(); - } catch (AlreadyLocked & e) { - /* Probably a DerivationGoal is already building this store - path. Sleep for a while and try again. */ - state = &SubstitutionGoal::init; - worker.waitForAWhile(shared_from_this()); - return; } catch (std::exception & e) { printError(e.what()); @@ -3959,10 +4486,9 @@ void SubstitutionGoal::finished() return; } - worker.markContentsGood(storePath); + worker.markContentsGood(storePath.clone()); - printMsg(lvlChatty, - format("substitution of path '%1%' succeeded") % storePath); + printMsg(lvlChatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath)); maintainRunningSubstitutions.reset(); @@ -3998,9 +4524,6 @@ void SubstitutionGoal::handleEOF(int fd) ////////////////////////////////////////////////////////////////////// -static bool working = false; - - Worker::Worker(LocalStore & store) : act(*logger, actRealise) , actDerivations(*logger, actBuilds) @@ -4008,19 +4531,17 @@ Worker::Worker(LocalStore & store) , store(store) { /* Debugging: prevent recursive workers. */ - if (working) abort(); - working = true; nrLocalBuilds = 0; lastWokenUp = steady_time_point::min(); permanentFailure = false; timedOut = false; + hashMismatch = false; + checkMismatch = false; } Worker::~Worker() { - working = false; - /* Explicitly get rid of all strong pointers now. After this all goals that refer to this worker should be gone. (Otherwise we are in trouble, since goals may call childTerminated() etc. in @@ -4033,13 +4554,13 @@ Worker::~Worker() } -GoalPtr Worker::makeDerivationGoal(const Path & path, +GoalPtr Worker::makeDerivationGoal(const StorePath & path, const StringSet & wantedOutputs, BuildMode buildMode) { - GoalPtr goal = derivationGoals[path].lock(); + GoalPtr goal = derivationGoals[path.clone()].lock(); // FIXME if (!goal) { - goal = std::make_shared(path, wantedOutputs, *this, buildMode); - derivationGoals[path] = goal; + goal = std::make_shared(path.clone(), wantedOutputs, *this, buildMode); + derivationGoals.insert_or_assign(path.clone(), goal); wakeUp(goal); } else (dynamic_cast(goal.get()))->addWantedOutputs(wantedOutputs); @@ -4047,21 +4568,21 @@ GoalPtr Worker::makeDerivationGoal(const Path & path, } -std::shared_ptr Worker::makeBasicDerivationGoal(const Path & drvPath, +std::shared_ptr Worker::makeBasicDerivationGoal(StorePath && drvPath, const BasicDerivation & drv, BuildMode buildMode) { - auto goal = std::make_shared(drvPath, drv, *this, buildMode); + auto goal = std::make_shared(std::move(drvPath), drv, *this, buildMode); wakeUp(goal); return goal; } -GoalPtr Worker::makeSubstitutionGoal(const Path & path, RepairFlag repair) +GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair) { - GoalPtr goal = substitutionGoals[path].lock(); + GoalPtr goal = substitutionGoals[path.clone()].lock(); // FIXME if (!goal) { - goal = std::make_shared(path, *this, repair); - substitutionGoals[path] = goal; + goal = std::make_shared(path.clone(), *this, repair); + substitutionGoals.insert_or_assign(path.clone(), goal); wakeUp(goal); } return goal; @@ -4319,14 +4840,15 @@ void Worker::waitForInput() for (auto & k : fds2) { if (FD_ISSET(k, &fds)) { ssize_t rd = read(k, buffer.data(), buffer.size()); - if (rd == -1) { - if (errno != EINTR) - throw SysError(format("reading from %1%") - % goal->getName()); - } else if (rd == 0) { + // FIXME: is there a cleaner way to handle pt close + // than EIO? Is this even standard? + if (rd == 0 || (rd == -1 && errno == EIO)) { debug(format("%1%: got EOF") % goal->getName()); goal->handleEOF(k); j->fds.erase(k); + } else if (rd == -1) { + if (errno != EINTR) + throw SysError("%s: read failed", goal->getName()); } else { printMsg(lvlVomit, format("%1%: read %2% bytes") % goal->getName() % rd); @@ -4373,42 +4895,64 @@ void Worker::waitForInput() unsigned int Worker::exitStatus() { - return timedOut ? 101 : (permanentFailure ? 100 : 1); + /* + * 1100100 + * ^^^^ + * |||`- timeout + * ||`-- output hash mismatch + * |`--- build failure + * `---- not deterministic + */ + unsigned int mask = 0; + bool buildFailure = permanentFailure || timedOut || hashMismatch; + if (buildFailure) + mask |= 0x04; // 100 + if (timedOut) + mask |= 0x01; // 101 + if (hashMismatch) + mask |= 0x02; // 102 + if (checkMismatch) { + mask |= 0x08; // 104 + } + + if (mask) + mask |= 0x60; + return mask ? mask : 1; } -bool Worker::pathContentsGood(const Path & path) +bool Worker::pathContentsGood(const StorePath & path) { - std::map::iterator i = pathContentsGoodCache.find(path); + auto i = pathContentsGoodCache.find(path); if (i != pathContentsGoodCache.end()) return i->second; - printInfo(format("checking path '%1%'...") % path); + printInfo("checking path '%s'...", store.printStorePath(path)); auto info = store.queryPathInfo(path); bool res; - if (!pathExists(path)) + if (!pathExists(store.printStorePath(path))) res = false; else { - HashResult current = hashPath(info->narHash.type, path); + HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); Hash nullHash(htSHA256); res = info->narHash == nullHash || info->narHash == current.first; } - pathContentsGoodCache[path] = res; - if (!res) printError(format("path '%1%' is corrupted or missing!") % path); + pathContentsGoodCache.insert_or_assign(path.clone(), res); + if (!res) printError("path '%s' is corrupted or missing!", store.printStorePath(path)); return res; } -void Worker::markContentsGood(const Path & path) +void Worker::markContentsGood(StorePath && path) { - pathContentsGoodCache[path] = true; + pathContentsGoodCache.insert_or_assign(std::move(path), true); } ////////////////////////////////////////////////////////////////////// -static void primeCache(Store & store, const PathSet & paths) +static void primeCache(Store & store, const std::vector & paths) { - PathSet willBuild, willSubstitute, unknown; + StorePathSet willBuild, willSubstitute, unknown; unsigned long long downloadSize, narSize; store.queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize); @@ -4419,24 +4963,23 @@ static void primeCache(Store & store, const PathSet & paths) } -void LocalStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) +void LocalStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) { Worker worker(*this); primeCache(*this, drvPaths); Goals goals; - for (auto & i : drvPaths) { - DrvPathWithOutputs i2 = parseDrvPathWithOutputs(i); - if (isDerivation(i2.first)) - goals.insert(worker.makeDerivationGoal(i2.first, i2.second, buildMode)); + for (auto & path : drvPaths) { + if (path.path.isDerivation()) + goals.insert(worker.makeDerivationGoal(path.path, path.outputs, buildMode)); else - goals.insert(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair)); + goals.insert(worker.makeSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair)); } worker.run(goals); - PathSet failed; + StorePathSet failed; for (auto & i : goals) { if (i->getExitCode() != Goal::ecSuccess) { DerivationGoal * i2 = dynamic_cast(i.get()); @@ -4450,11 +4993,11 @@ void LocalStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) } -BuildResult LocalStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv, +BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { Worker worker(*this); - auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode); + auto goal = worker.makeBasicDerivationGoal(drvPath.clone(), drv, buildMode); BuildResult result; @@ -4470,12 +5013,12 @@ BuildResult LocalStore::buildDerivation(const Path & drvPath, const BasicDerivat } -void LocalStore::ensurePath(const Path & path) +void LocalStore::ensurePath(const StorePath & path) { /* If the path is already valid, we're done. */ if (isValidPath(path)) return; - primeCache(*this, {path}); + primeCache(*this, {StorePathWithOutputs(path)}); Worker worker(*this); GoalPtr goal = worker.makeSubstitutionGoal(path); @@ -4484,11 +5027,11 @@ void LocalStore::ensurePath(const Path & path) worker.run(goals); if (goal->getExitCode() != Goal::ecSuccess) - throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", path); + throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); } -void LocalStore::repairPath(const Path & path) +void LocalStore::repairPath(const StorePath & path) { Worker worker(*this); GoalPtr goal = worker.makeSubstitutionGoal(path, Repair); @@ -4499,13 +5042,13 @@ void LocalStore::repairPath(const Path & path) if (goal->getExitCode() != Goal::ecSuccess) { /* Since substituting the path didn't work, if we have a valid deriver, then rebuild the deriver. */ - auto deriver = queryPathInfo(path)->deriver; - if (deriver != "" && isValidPath(deriver)) { + auto info = queryPathInfo(path); + if (info->deriver && isValidPath(*info->deriver)) { goals.clear(); - goals.insert(worker.makeDerivationGoal(deriver, StringSet(), bmRepair)); + goals.insert(worker.makeDerivationGoal(*info->deriver, StringSet(), bmRepair)); worker.run(goals); } else - throw Error(worker.exitStatus(), "cannot repair path '%s'", path); + throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path)); } } diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index 0d2da873e..87d6ce665 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -7,5 +7,6 @@ namespace nix { // TODO: make pluggable. void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData); void builtinBuildenv(const BasicDerivation & drv); +void builtinUnpackChannel(const BasicDerivation & drv); } diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 74e706664..096593886 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -123,8 +123,7 @@ static Path out; static void addPkg(const Path & pkgDir, int priority) { - if (done.count(pkgDir)) return; - done.insert(pkgDir); + if (!done.insert(pkgDir).second) return; createLinks(pkgDir, out, priority); try { diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 92aec63a0..f6ae5d2e6 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -24,7 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) Path storePath = getAttr("out"); auto mainUrl = getAttr("url"); - bool unpack = get(drv.env, "unpack", "") == "1"; + bool unpack = get(drv.env, "unpack").value_or("") == "1"; /* Note: have to use a fresh downloader here because we're in a forked process. */ @@ -64,7 +64,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; auto ht = parseHashType(getAttr("outputHashAlgo")); - fetch(hashedMirror + printHashType(ht) + "/" + Hash(getAttr("outputHash"), ht).to_string(Base16, false)); + auto h = Hash(getAttr("outputHash"), ht); + fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc new file mode 100644 index 000000000..d18e3ddaf --- /dev/null +++ b/src/libstore/builtins/unpack-channel.cc @@ -0,0 +1,29 @@ +#include "builtins.hh" +#include "tarfile.hh" + +namespace nix { + +void builtinUnpackChannel(const BasicDerivation & drv) +{ + auto getAttr = [&](const string & name) { + auto i = drv.env.find(name); + if (i == drv.env.end()) throw Error("attribute '%s' missing", name); + return i->second; + }; + + Path out = getAttr("out"); + auto channelName = getAttr("channelName"); + auto src = getAttr("src"); + + createDirs(out); + + unpackTarfile(src, out); + + auto entries = readDirectory(out); + if (entries.size() != 1) + throw Error("channel tarball '%s' contains more than one file", src); + if (rename((out + "/" + entries[0].name).c_str(), (out + "/" + channelName).c_str()) == -1) + throw SysError("renaming channel directory"); +} + +} diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc new file mode 100644 index 000000000..8e9f9d71b --- /dev/null +++ b/src/libstore/daemon.cc @@ -0,0 +1,844 @@ +#include "daemon.hh" +#include "monitor-fd.hh" +#include "worker-protocol.hh" +#include "store-api.hh" +#include "local-store.hh" +#include "finally.hh" +#include "affinity.hh" +#include "archive.hh" +#include "derivations.hh" +#include "args.hh" + +namespace nix::daemon { + +Sink & operator << (Sink & sink, const Logger::Fields & fields) +{ + sink << fields.size(); + for (auto & f : fields) { + sink << f.type; + if (f.type == Logger::Field::tInt) + sink << f.i; + else if (f.type == Logger::Field::tString) + sink << f.s; + else abort(); + } + return sink; +} + +/* Logger that forwards log messages to the client, *if* we're in a + state where the protocol allows it (i.e., when canSendStderr is + true). */ +struct TunnelLogger : public Logger +{ + FdSink & to; + + struct State + { + bool canSendStderr = false; + std::vector pendingMsgs; + }; + + Sync state_; + + unsigned int clientVersion; + + TunnelLogger(FdSink & to, unsigned int clientVersion) + : to(to), clientVersion(clientVersion) { } + + void enqueueMsg(const std::string & s) + { + auto state(state_.lock()); + + if (state->canSendStderr) { + assert(state->pendingMsgs.empty()); + try { + to(s); + to.flush(); + } catch (...) { + /* Write failed; that means that the other side is + gone. */ + state->canSendStderr = false; + throw; + } + } else + state->pendingMsgs.push_back(s); + } + + void log(Verbosity lvl, const FormatOrString & fs) override + { + if (lvl > verbosity) return; + + StringSink buf; + buf << STDERR_NEXT << (fs.s + "\n"); + enqueueMsg(*buf.s); + } + + /* startWork() means that we're starting an operation for which we + want to send out stderr to the client. */ + void startWork() + { + auto state(state_.lock()); + state->canSendStderr = true; + + for (auto & msg : state->pendingMsgs) + to(msg); + + state->pendingMsgs.clear(); + + to.flush(); + } + + /* stopWork() means that we're done; stop sending stderr to the + client. */ + void stopWork(bool success = true, const string & msg = "", unsigned int status = 0) + { + auto state(state_.lock()); + + state->canSendStderr = false; + + if (success) + to << STDERR_LAST; + else { + to << STDERR_ERROR << msg; + if (status != 0) to << status; + } + } + + void startActivity(ActivityId act, Verbosity lvl, ActivityType type, + const std::string & s, const Fields & fields, ActivityId parent) override + { + if (GET_PROTOCOL_MINOR(clientVersion) < 20) { + if (!s.empty()) + log(lvl, s + "..."); + return; + } + + StringSink buf; + buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent; + enqueueMsg(*buf.s); + } + + void stopActivity(ActivityId act) override + { + if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; + StringSink buf; + buf << STDERR_STOP_ACTIVITY << act; + enqueueMsg(*buf.s); + } + + void result(ActivityId act, ResultType type, const Fields & fields) override + { + if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; + StringSink buf; + buf << STDERR_RESULT << act << type << fields; + enqueueMsg(*buf.s); + } +}; + +struct TunnelSink : Sink +{ + Sink & to; + TunnelSink(Sink & to) : to(to) { } + virtual void operator () (const unsigned char * data, size_t len) + { + to << STDERR_WRITE; + writeString(data, len, to); + } +}; + +struct TunnelSource : BufferedSource +{ + Source & from; + BufferedSink & to; + TunnelSource(Source & from, BufferedSink & to) : from(from), to(to) { } + size_t readUnbuffered(unsigned char * data, size_t len) override + { + to << STDERR_READ << len; + to.flush(); + size_t n = readString(data, len, from); + if (n == 0) throw EndOfFile("unexpected end-of-file"); + return n; + } +}; + +/* If the NAR archive contains a single file at top-level, then save + the contents of the file to `s'. Otherwise barf. */ +struct RetrieveRegularNARSink : ParseSink +{ + bool regular; + string s; + + RetrieveRegularNARSink() : regular(true) { } + + void createDirectory(const Path & path) + { + regular = false; + } + + void receiveContents(unsigned char * data, unsigned int len) + { + s.append((const char *) data, len); + } + + void createSymlink(const Path & path, const string & target) + { + regular = false; + } +}; + +struct ClientSettings +{ + bool keepFailed; + bool keepGoing; + bool tryFallback; + Verbosity verbosity; + unsigned int maxBuildJobs; + time_t maxSilentTime; + bool verboseBuild; + unsigned int buildCores; + bool useSubstitutes; + StringMap overrides; + + void apply(TrustedFlag trusted) + { + settings.keepFailed = keepFailed; + settings.keepGoing = keepGoing; + settings.tryFallback = tryFallback; + nix::verbosity = verbosity; + settings.maxBuildJobs.assign(maxBuildJobs); + settings.maxSilentTime = maxSilentTime; + settings.verboseBuild = verboseBuild; + settings.buildCores = buildCores; + settings.useSubstitutes = useSubstitutes; + + for (auto & i : overrides) { + auto & name(i.first); + auto & value(i.second); + + auto setSubstituters = [&](Setting & res) { + if (name != res.name && res.aliases.count(name) == 0) + return false; + StringSet trusted = settings.trustedSubstituters; + for (auto & s : settings.substituters.get()) + trusted.insert(s); + Strings subs; + auto ss = tokenizeString(value); + for (auto & s : ss) + if (trusted.count(s)) + subs.push_back(s); + else + warn("ignoring untrusted substituter '%s'", s); + res = subs; + return true; + }; + + try { + if (name == "ssh-auth-sock") // obsolete + ; + else if (trusted + || name == settings.buildTimeout.name + || name == "connect-timeout" + || (name == "builders" && value == "")) + settings.set(name, value); + else if (setSubstituters(settings.substituters)) + ; + else if (setSubstituters(settings.extraSubstituters)) + ; + else + warn("ignoring the user-specified setting '%s', because it is a restricted setting and you are not a trusted user", name); + } catch (UsageError & e) { + warn(e.what()); + } + } + } +}; + +static void performOp(TunnelLogger * logger, ref store, + TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion, + Source & from, BufferedSink & to, unsigned int op) +{ + switch (op) { + + case wopIsValidPath: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + bool result = store->isValidPath(path); + logger->stopWork(); + to << result; + break; + } + + case wopQueryValidPaths: { + auto paths = readStorePaths(*store, from); + logger->startWork(); + auto res = store->queryValidPaths(paths); + logger->stopWork(); + writeStorePaths(*store, to, res); + break; + } + + case wopHasSubstitutes: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + StorePathSet paths; // FIXME + paths.insert(path.clone()); + auto res = store->querySubstitutablePaths(paths); + logger->stopWork(); + to << (res.count(path) != 0); + break; + } + + case wopQuerySubstitutablePaths: { + auto paths = readStorePaths(*store, from); + logger->startWork(); + auto res = store->querySubstitutablePaths(paths); + logger->stopWork(); + writeStorePaths(*store, to, res); + break; + } + + case wopQueryPathHash: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + auto hash = store->queryPathInfo(path)->narHash; + logger->stopWork(); + to << hash.to_string(Base16, false); + break; + } + + case wopQueryReferences: + case wopQueryReferrers: + case wopQueryValidDerivers: + case wopQueryDerivationOutputs: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + StorePathSet paths; + if (op == wopQueryReferences) + for (auto & i : store->queryPathInfo(path)->references) + paths.insert(i.clone()); + else if (op == wopQueryReferrers) + store->queryReferrers(path, paths); + else if (op == wopQueryValidDerivers) + paths = store->queryValidDerivers(path); + else paths = store->queryDerivationOutputs(path); + logger->stopWork(); + writeStorePaths(*store, to, paths); + break; + } + + case wopQueryDerivationOutputNames: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + StringSet names; + names = store->queryDerivationOutputNames(path); + logger->stopWork(); + to << names; + break; + } + + case wopQueryDeriver: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + auto info = store->queryPathInfo(path); + logger->stopWork(); + to << (info->deriver ? store->printStorePath(*info->deriver) : ""); + break; + } + + case wopQueryPathFromHashPart: { + auto hashPart = readString(from); + logger->startWork(); + auto path = store->queryPathFromHashPart(hashPart); + logger->stopWork(); + to << (path ? store->printStorePath(*path) : ""); + break; + } + + case wopAddToStore: { + bool fixed, recursive; + std::string s, baseName; + from >> baseName >> fixed /* obsolete */ >> recursive >> s; + /* Compatibility hack. */ + if (!fixed) { + s = "sha256"; + recursive = true; + } + HashType hashAlgo = parseHashType(s); + + TeeSource savedNAR(from); + RetrieveRegularNARSink savedRegular; + + if (recursive) { + /* Get the entire NAR dump from the client and save it to + a string so that we can pass it to + addToStoreFromDump(). */ + ParseSink sink; /* null sink; just parse the NAR */ + parseDump(sink, savedNAR); + } else + parseDump(savedRegular, from); + + logger->startWork(); + if (!savedRegular.regular) throw Error("regular file expected"); + + auto path = store->addToStoreFromDump(recursive ? *savedNAR.data : savedRegular.s, baseName, recursive, hashAlgo); + logger->stopWork(); + + to << store->printStorePath(path); + break; + } + + case wopAddTextToStore: { + string suffix = readString(from); + string s = readString(from); + auto refs = readStorePaths(*store, from); + logger->startWork(); + auto path = store->addTextToStore(suffix, s, refs, NoRepair); + logger->stopWork(); + to << store->printStorePath(path); + break; + } + + case wopExportPath: { + auto path = store->parseStorePath(readString(from)); + readInt(from); // obsolete + logger->startWork(); + TunnelSink sink(to); + store->exportPath(path, sink); + logger->stopWork(); + to << 1; + break; + } + + case wopImportPaths: { + logger->startWork(); + TunnelSource source(from, to); + auto paths = store->importPaths(source, nullptr, + trusted ? NoCheckSigs : CheckSigs); + logger->stopWork(); + Strings paths2; + for (auto & i : paths) paths2.push_back(store->printStorePath(i)); + to << paths2; + break; + } + + case wopBuildPaths: { + std::vector drvs; + for (auto & s : readStrings(from)) + drvs.push_back(store->parsePathWithOutputs(s)); + BuildMode mode = bmNormal; + if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { + mode = (BuildMode) readInt(from); + + /* Repairing is not atomic, so disallowed for "untrusted" + clients. */ + if (mode == bmRepair && !trusted) + throw Error("repairing is not allowed because you are not in 'trusted-users'"); + } + logger->startWork(); + store->buildPaths(drvs, mode); + logger->stopWork(); + to << 1; + break; + } + + case wopBuildDerivation: { + auto drvPath = store->parseStorePath(readString(from)); + BasicDerivation drv; + readDerivation(from, *store, drv); + BuildMode buildMode = (BuildMode) readInt(from); + logger->startWork(); + if (!trusted) + throw Error("you are not privileged to build derivations"); + auto res = store->buildDerivation(drvPath, drv, buildMode); + logger->stopWork(); + to << res.status << res.errorMsg; + break; + } + + case wopEnsurePath: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + store->ensurePath(path); + logger->stopWork(); + to << 1; + break; + } + + case wopAddTempRoot: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + store->addTempRoot(path); + logger->stopWork(); + to << 1; + break; + } + + case wopAddIndirectRoot: { + Path path = absPath(readString(from)); + logger->startWork(); + store->addIndirectRoot(path); + logger->stopWork(); + to << 1; + break; + } + + case wopSyncWithGC: { + logger->startWork(); + store->syncWithGC(); + logger->stopWork(); + to << 1; + break; + } + + case wopFindRoots: { + logger->startWork(); + Roots roots = store->findRoots(!trusted); + logger->stopWork(); + + size_t size = 0; + for (auto & i : roots) + size += i.second.size(); + + to << size; + + for (auto & [target, links] : roots) + for (auto & link : links) + to << link << store->printStorePath(target); + + break; + } + + case wopCollectGarbage: { + GCOptions options; + options.action = (GCOptions::GCAction) readInt(from); + options.pathsToDelete = readStorePaths(*store, from); + from >> options.ignoreLiveness >> options.maxFreed; + // obsolete fields + readInt(from); + readInt(from); + readInt(from); + + GCResults results; + + logger->startWork(); + if (options.ignoreLiveness) + throw Error("you are not allowed to ignore liveness"); + store->collectGarbage(options, results); + logger->stopWork(); + + to << results.paths << results.bytesFreed << 0 /* obsolete */; + + break; + } + + case wopSetOptions: { + + ClientSettings clientSettings; + + clientSettings.keepFailed = readInt(from); + clientSettings.keepGoing = readInt(from); + clientSettings.tryFallback = readInt(from); + clientSettings.verbosity = (Verbosity) readInt(from); + clientSettings.maxBuildJobs = readInt(from); + clientSettings.maxSilentTime = readInt(from); + readInt(from); // obsolete useBuildHook + clientSettings.verboseBuild = lvlError == (Verbosity) readInt(from); + readInt(from); // obsolete logType + readInt(from); // obsolete printBuildTrace + clientSettings.buildCores = readInt(from); + clientSettings.useSubstitutes = readInt(from); + + if (GET_PROTOCOL_MINOR(clientVersion) >= 12) { + unsigned int n = readInt(from); + for (unsigned int i = 0; i < n; i++) { + string name = readString(from); + string value = readString(from); + clientSettings.overrides.emplace(name, value); + } + } + + logger->startWork(); + + // FIXME: use some setting in recursive mode. Will need to use + // non-global variables. + if (!recursive) + clientSettings.apply(trusted); + + logger->stopWork(); + break; + } + + case wopQuerySubstitutablePathInfo: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + SubstitutablePathInfos infos; + StorePathSet paths; + paths.insert(path.clone()); // FIXME + store->querySubstitutablePathInfos(paths, infos); + logger->stopWork(); + auto i = infos.find(path); + if (i == infos.end()) + to << 0; + else { + to << 1 + << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); + writeStorePaths(*store, to, i->second.references); + to << i->second.downloadSize + << i->second.narSize; + } + break; + } + + case wopQuerySubstitutablePathInfos: { + auto paths = readStorePaths(*store, from); + logger->startWork(); + SubstitutablePathInfos infos; + store->querySubstitutablePathInfos(paths, infos); + logger->stopWork(); + to << infos.size(); + for (auto & i : infos) { + to << store->printStorePath(i.first) + << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); + writeStorePaths(*store, to, i.second.references); + to << i.second.downloadSize << i.second.narSize; + } + break; + } + + case wopQueryAllValidPaths: { + logger->startWork(); + auto paths = store->queryAllValidPaths(); + logger->stopWork(); + writeStorePaths(*store, to, paths); + break; + } + + case wopQueryPathInfo: { + auto path = store->parseStorePath(readString(from)); + std::shared_ptr info; + logger->startWork(); + try { + info = store->queryPathInfo(path); + } catch (InvalidPath &) { + if (GET_PROTOCOL_MINOR(clientVersion) < 17) throw; + } + logger->stopWork(); + if (info) { + if (GET_PROTOCOL_MINOR(clientVersion) >= 17) + to << 1; + to << (info->deriver ? store->printStorePath(*info->deriver) : "") + << info->narHash.to_string(Base16, false); + writeStorePaths(*store, to, info->references); + to << info->registrationTime << info->narSize; + if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { + to << info->ultimate + << info->sigs + << info->ca; + } + } else { + assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); + to << 0; + } + break; + } + + case wopOptimiseStore: + logger->startWork(); + store->optimiseStore(); + logger->stopWork(); + to << 1; + break; + + case wopVerifyStore: { + bool checkContents, repair; + from >> checkContents >> repair; + logger->startWork(); + if (repair && !trusted) + throw Error("you are not privileged to repair paths"); + bool errors = store->verifyStore(checkContents, (RepairFlag) repair); + logger->stopWork(); + to << errors; + break; + } + + case wopAddSignatures: { + auto path = store->parseStorePath(readString(from)); + StringSet sigs = readStrings(from); + logger->startWork(); + if (!trusted) + throw Error("you are not privileged to add signatures"); + store->addSignatures(path, sigs); + logger->stopWork(); + to << 1; + break; + } + + case wopNarFromPath: { + auto path = store->parseStorePath(readString(from)); + logger->startWork(); + logger->stopWork(); + dumpPath(store->printStorePath(path), to); + break; + } + + case wopAddToStoreNar: { + bool repair, dontCheckSigs; + ValidPathInfo info(store->parseStorePath(readString(from))); + auto deriver = readString(from); + if (deriver != "") + info.deriver = store->parseStorePath(deriver); + info.narHash = Hash(readString(from), htSHA256); + info.references = readStorePaths(*store, from); + from >> info.registrationTime >> info.narSize >> info.ultimate; + info.sigs = readStrings(from); + from >> info.ca >> repair >> dontCheckSigs; + if (!trusted && dontCheckSigs) + dontCheckSigs = false; + if (!trusted) + info.ultimate = false; + + std::string saved; + std::unique_ptr source; + if (GET_PROTOCOL_MINOR(clientVersion) >= 21) + source = std::make_unique(from, to); + else { + TeeSink tee(from); + parseDump(tee, tee.source); + saved = std::move(*tee.source.data); + source = std::make_unique(saved); + } + + logger->startWork(); + + // FIXME: race if addToStore doesn't read source? + store->addToStore(info, *source, (RepairFlag) repair, + dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr); + + logger->stopWork(); + break; + } + + case wopQueryMissing: { + std::vector targets; + for (auto & s : readStrings(from)) + targets.push_back(store->parsePathWithOutputs(s)); + logger->startWork(); + StorePathSet willBuild, willSubstitute, unknown; + unsigned long long downloadSize, narSize; + store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); + logger->stopWork(); + writeStorePaths(*store, to, willBuild); + writeStorePaths(*store, to, willSubstitute); + writeStorePaths(*store, to, unknown); + to << downloadSize << narSize; + break; + } + + default: + throw Error(format("invalid operation %1%") % op); + } +} + +void processConnection( + ref store, + FdSource & from, + FdSink & to, + TrustedFlag trusted, + RecursiveFlag recursive, + const std::string & userName, + uid_t userId) +{ + auto monitor = !recursive ? std::make_unique(from.fd) : nullptr; + + /* Exchange the greeting. */ + unsigned int magic = readInt(from); + if (magic != WORKER_MAGIC_1) throw Error("protocol mismatch"); + to << WORKER_MAGIC_2 << PROTOCOL_VERSION; + to.flush(); + unsigned int clientVersion = readInt(from); + + if (clientVersion < 0x10a) + throw Error("the Nix client version is too old"); + + auto tunnelLogger = new TunnelLogger(to, clientVersion); + auto prevLogger = nix::logger; + // FIXME + if (!recursive) + logger = tunnelLogger; + + unsigned int opCount = 0; + + Finally finally([&]() { + _isInterrupted = false; + prevLogger->log(lvlDebug, fmt("%d operations", opCount)); + }); + + if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) { + auto affinity = readInt(from); + setAffinityTo(affinity); + } + + readInt(from); // obsolete reserveSpace + + /* Send startup error messages to the client. */ + tunnelLogger->startWork(); + + try { + + /* If we can't accept clientVersion, then throw an error + *here* (not above). */ + +#if 0 + /* Prevent users from doing something very dangerous. */ + if (geteuid() == 0 && + querySetting("build-users-group", "") == "") + throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!"); +#endif + + store->createUser(userName, userId); + + tunnelLogger->stopWork(); + to.flush(); + + /* Process client requests. */ + while (true) { + WorkerOp op; + try { + op = (WorkerOp) readInt(from); + } catch (Interrupted & e) { + break; + } catch (EndOfFile & e) { + break; + } + + opCount++; + + try { + performOp(tunnelLogger, store, trusted, recursive, clientVersion, from, to, op); + } catch (Error & e) { + /* If we're not in a state where we can send replies, then + something went wrong processing the input of the + client. This can happen especially if I/O errors occur + during addTextToStore() / importPath(). If that + happens, just send the error message and exit. */ + bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr; + tunnelLogger->stopWork(false, e.msg(), e.status); + if (!errorAllowed) throw; + } catch (std::bad_alloc & e) { + tunnelLogger->stopWork(false, "Nix daemon out of memory", 1); + throw; + } + + to.flush(); + + assert(!tunnelLogger->state_.lock()->canSendStderr); + }; + + } catch (std::exception & e) { + tunnelLogger->stopWork(false, e.what(), 1); + to.flush(); + return; + } +} + +} diff --git a/src/libstore/daemon.hh b/src/libstore/daemon.hh new file mode 100644 index 000000000..266932013 --- /dev/null +++ b/src/libstore/daemon.hh @@ -0,0 +1,18 @@ +#include "serialise.hh" +#include "store-api.hh" + +namespace nix::daemon { + +enum TrustedFlag : bool { NotTrusted = false, Trusted = true }; +enum RecursiveFlag : bool { NotRecursive = false, Recursive = true }; + +void processConnection( + ref store, + FdSource & from, + FdSink & to, + TrustedFlag trusted, + RecursiveFlag recursive, + const std::string & userName, + uid_t userId); + +} diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 3c5205a87..6eb60cb8b 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -19,24 +19,40 @@ std::pair DerivationOutput::parseHashInfo() const HashType hashType = parseHashType(recursive ? string(algo, 2) : algo); if (hashType == htUnknown) - throw Error(format("unknown hash algorithm '%1%'") % algo); + throw Error("unknown hash algorithm '%s'", algo); return {recursive, Hash(this->hash, hashType)}; } -Path BasicDerivation::findOutput(const string & id) const +BasicDerivation::BasicDerivation(const BasicDerivation & other) + : platform(other.platform) + , builder(other.builder) + , args(other.args) + , env(other.env) { - auto i = outputs.find(id); - if (i == outputs.end()) - throw Error(format("derivation has no output '%1%'") % id); - return i->second.path; + for (auto & i : other.outputs) + outputs.insert_or_assign(i.first, + DerivationOutput(i.second.path.clone(), std::string(i.second.hashAlgo), std::string(i.second.hash))); + for (auto & i : other.inputSrcs) + inputSrcs.insert(i.clone()); } -bool BasicDerivation::substitutesAllowed() const +Derivation::Derivation(const Derivation & other) + : BasicDerivation(other) { - return get(env, "allowSubstitutes", "1") == "1"; + for (auto & i : other.inputDrvs) + inputDrvs.insert_or_assign(i.first.clone(), i.second); +} + + +const StorePath & BasicDerivation::findOutput(const string & id) const +{ + auto i = outputs.find(id); + if (i == outputs.end()) + throw Error("derivation has no output '%s'", id); + return i->second.path; } @@ -46,18 +62,17 @@ bool BasicDerivation::isBuiltin() const } -Path writeDerivation(ref store, +StorePath writeDerivation(ref store, const Derivation & drv, const string & name, RepairFlag repair) { - PathSet references; - references.insert(drv.inputSrcs.begin(), drv.inputSrcs.end()); + auto references = cloneStorePathSet(drv.inputSrcs); for (auto & i : drv.inputDrvs) - references.insert(i.first); + references.insert(i.first.clone()); /* Note that the outputs of a derivation are *not* references (that can be missing (of course) and should not necessarily be held during a garbage collection). */ string suffix = name + drvExtension; - string contents = drv.unparse(); + string contents = drv.unparse(*store, false); return settings.readOnlyMode ? store->computeStorePathForText(suffix, contents, references) : store->addTextToStore(suffix, contents, references, repair); @@ -125,7 +140,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths) } -static Derivation parseDerivation(const string & s) +static Derivation parseDerivation(const Store & store, const string & s) { Derivation drv; istringstream_nocopy str(s); @@ -133,13 +148,12 @@ static Derivation parseDerivation(const string & s) /* Parse the list of outputs. */ while (!endOfList(str)) { - DerivationOutput out; - expect(str, "("); string id = parseString(str); - expect(str, ","); out.path = parsePath(str); - expect(str, ","); out.hashAlgo = parseString(str); - expect(str, ","); out.hash = parseString(str); + expect(str, "("); std::string id = parseString(str); + expect(str, ","); auto path = store.parseStorePath(parsePath(str)); + expect(str, ","); auto hashAlgo = parseString(str); + expect(str, ","); auto hash = parseString(str); expect(str, ")"); - drv.outputs[id] = out; + drv.outputs.emplace(id, DerivationOutput(std::move(path), std::move(hashAlgo), std::move(hash))); } /* Parse the list of input derivations. */ @@ -148,11 +162,11 @@ static Derivation parseDerivation(const string & s) expect(str, "("); Path drvPath = parsePath(str); expect(str, ",["); - drv.inputDrvs[drvPath] = parseStrings(str, false); + drv.inputDrvs.insert_or_assign(store.parseStorePath(drvPath), parseStrings(str, false)); expect(str, ")"); } - expect(str, ",["); drv.inputSrcs = parseStrings(str, true); + expect(str, ",["); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); expect(str, ","); drv.platform = parseString(str); expect(str, ","); drv.builder = parseString(str); @@ -175,25 +189,24 @@ static Derivation parseDerivation(const string & s) } -Derivation readDerivation(const Path & drvPath) +Derivation readDerivation(const Store & store, const Path & drvPath) { try { - return parseDerivation(readFile(drvPath)); + return parseDerivation(store, readFile(drvPath)); } catch (FormatError & e) { throw Error(format("error parsing derivation '%1%': %2%") % drvPath % e.msg()); } } -Derivation Store::derivationFromPath(const Path & drvPath) +Derivation Store::derivationFromPath(const StorePath & drvPath) { - assertStorePath(drvPath); ensurePath(drvPath); auto accessor = getFSAccessor(); try { - return parseDerivation(accessor->readFile(drvPath)); + return parseDerivation(*this, accessor->readFile(printStorePath(drvPath))); } catch (FormatError & e) { - throw Error(format("error parsing derivation '%1%': %2%") % drvPath % e.msg()); + throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg()); } } @@ -224,7 +237,8 @@ static void printStrings(string & res, ForwardIterator i, ForwardIterator j) } -string Derivation::unparse() const +string Derivation::unparse(const Store & store, bool maskOutputs, + std::map * actualInputs) const { string s; s.reserve(65536); @@ -234,7 +248,7 @@ string Derivation::unparse() const for (auto & i : outputs) { if (first) first = false; else s += ','; s += '('; printString(s, i.first); - s += ','; printString(s, i.second.path); + s += ','; printString(s, maskOutputs ? "" : store.printStorePath(i.second.path)); s += ','; printString(s, i.second.hashAlgo); s += ','; printString(s, i.second.hash); s += ')'; @@ -242,15 +256,25 @@ string Derivation::unparse() const s += "],["; first = true; - for (auto & i : inputDrvs) { - if (first) first = false; else s += ','; - s += '('; printString(s, i.first); - s += ','; printStrings(s, i.second.begin(), i.second.end()); - s += ')'; + if (actualInputs) { + for (auto & i : *actualInputs) { + if (first) first = false; else s += ','; + s += '('; printString(s, i.first); + s += ','; printStrings(s, i.second.begin(), i.second.end()); + s += ')'; + } + } else { + for (auto & i : inputDrvs) { + if (first) first = false; else s += ','; + s += '('; printString(s, store.printStorePath(i.first)); + s += ','; printStrings(s, i.second.begin(), i.second.end()); + s += ')'; + } } s += "],"; - printStrings(s, inputSrcs.begin(), inputSrcs.end()); + auto paths = store.printStorePathSet(inputSrcs); // FIXME: slow + printStrings(s, paths.begin(), paths.end()); s += ','; printString(s, platform); s += ','; printString(s, builder); @@ -261,7 +285,7 @@ string Derivation::unparse() const for (auto & i : env) { if (first) first = false; else s += ','; s += '('; printString(s, i.first); - s += ','; printString(s, i.second); + s += ','; printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second); s += ')'; } @@ -271,6 +295,7 @@ string Derivation::unparse() const } +// FIXME: remove bool isDerivation(const string & fileName) { return hasSuffix(fileName, drvExtension); @@ -308,7 +333,7 @@ DrvHashes drvHashes; paths have been replaced by the result of a recursive call to this function, and that for fixed-output derivations we return a hash of its output path. */ -Hash hashDerivationModulo(Store & store, Derivation drv) +Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs) { /* Return a fixed hash for fixed-output derivations. */ if (drv.isFixedOutput()) { @@ -316,42 +341,31 @@ Hash hashDerivationModulo(Store & store, Derivation drv) return hashString(htSHA256, "fixed:out:" + i->second.hashAlgo + ":" + i->second.hash + ":" - + i->second.path); + + store.printStorePath(i->second.path)); } /* For other derivations, replace the inputs paths with recursive calls to this function.*/ - DerivationInputs inputs2; + std::map inputs2; for (auto & i : drv.inputDrvs) { - Hash h = drvHashes[i.first]; - if (!h) { + auto h = drvHashes.find(i.first); + if (h == drvHashes.end()) { assert(store.isValidPath(i.first)); - Derivation drv2 = readDerivation(store.toRealPath(i.first)); - h = hashDerivationModulo(store, drv2); - drvHashes[i.first] = h; + h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store, + readDerivation(store, store.toRealPath(store.printStorePath(i.first))), false)).first; } - inputs2[h.to_string(Base16, false)] = i.second; + inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second); } - drv.inputDrvs = inputs2; - return hashString(htSHA256, drv.unparse()); + return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); } -DrvPathWithOutputs parseDrvPathWithOutputs(const string & s) -{ - size_t n = s.find("!"); - return n == s.npos - ? DrvPathWithOutputs(s, std::set()) - : DrvPathWithOutputs(string(s, 0, n), tokenizeString >(string(s, n + 1), ",")); -} - - -Path makeDrvPathWithOutputs(const Path & drvPath, const std::set & outputs) +std::string StorePathWithOutputs::to_string(const Store & store) const { return outputs.empty() - ? drvPath - : drvPath + "!" + concatStringsSep(",", outputs); + ? store.printStorePath(path) + : store.printStorePath(path) + "!" + concatStringsSep(",", outputs); } @@ -361,28 +375,28 @@ bool wantOutput(const string & output, const std::set & wanted) } -PathSet BasicDerivation::outputPaths() const +StorePathSet BasicDerivation::outputPaths() const { - PathSet paths; + StorePathSet paths; for (auto & i : outputs) - paths.insert(i.second.path); + paths.insert(i.second.path.clone()); return paths; } -Source & readDerivation(Source & in, Store & store, BasicDerivation & drv) +Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv) { drv.outputs.clear(); auto nr = readNum(in); for (size_t n = 0; n < nr; n++) { auto name = readString(in); - DerivationOutput o; - in >> o.path >> o.hashAlgo >> o.hash; - store.assertStorePath(o.path); - drv.outputs[name] = o; + auto path = store.parseStorePath(readString(in)); + auto hashAlgo = readString(in); + auto hash = readString(in); + drv.outputs.emplace(name, DerivationOutput(std::move(path), std::move(hashAlgo), std::move(hash))); } - drv.inputSrcs = readStorePaths(store, in); + drv.inputSrcs = readStorePaths(store, in); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -397,16 +411,16 @@ Source & readDerivation(Source & in, Store & store, BasicDerivation & drv) } -Sink & operator << (Sink & out, const BasicDerivation & drv) +void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) { out << drv.outputs.size(); for (auto & i : drv.outputs) - out << i.first << i.second.path << i.second.hashAlgo << i.second.hash; - out << drv.inputSrcs << drv.platform << drv.builder << drv.args; + out << i.first << store.printStorePath(i.second.path) << i.second.hashAlgo << i.second.hash; + writeStorePaths(store, out, drv.inputSrcs); + out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) out << i.first << i.second; - return out; } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index b21713bad..39e671d74 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -10,26 +10,18 @@ namespace nix { -/* Extension of derivations in the Nix store. */ -const string drvExtension = ".drv"; - - /* Abstract syntax of derivations. */ struct DerivationOutput { - Path path; - string hashAlgo; /* hash used for expected hash computation */ - string hash; /* expected hash, may be null */ - DerivationOutput() - { - } - DerivationOutput(Path path, string hashAlgo, string hash) - { - this->path = path; - this->hashAlgo = hashAlgo; - this->hash = hash; - } + StorePath path; + std::string hashAlgo; /* hash used for expected hash computation */ + std::string hash; /* expected hash, may be null */ + DerivationOutput(StorePath && path, std::string && hashAlgo, std::string && hash) + : path(std::move(path)) + , hashAlgo(std::move(hashAlgo)) + , hash(std::move(hash)) + { } std::pair parseHashInfo() const; }; @@ -37,26 +29,26 @@ typedef std::map DerivationOutputs; /* For inputs that are sub-derivations, we specify exactly which output IDs we are interested in. */ -typedef std::map DerivationInputs; +typedef std::map DerivationInputs; typedef std::map StringPairs; struct BasicDerivation { DerivationOutputs outputs; /* keyed on symbolic IDs */ - PathSet inputSrcs; /* inputs that are sources */ + StorePathSet inputSrcs; /* inputs that are sources */ string platform; Path builder; Strings args; StringPairs env; + BasicDerivation() { } + explicit BasicDerivation(const BasicDerivation & other); virtual ~BasicDerivation() { }; /* Return the path corresponding to the output identifier `id' in the given derivation. */ - Path findOutput(const string & id) const; - - bool substitutesAllowed() const; + const StorePath & findOutput(const std::string & id) const; bool isBuiltin() const; @@ -64,7 +56,7 @@ struct BasicDerivation bool isFixedOutput() const; /* Return the output paths of a derivation. */ - PathSet outputPaths() const; + StorePathSet outputPaths() const; }; @@ -73,7 +65,12 @@ struct Derivation : BasicDerivation DerivationInputs inputDrvs; /* inputs that are sub-derivations */ /* Print a derivation. */ - std::string unparse() const; + std::string unparse(const Store & store, bool maskOutputs, + std::map * actualInputs = nullptr) const; + + Derivation() { } + Derivation(Derivation && other) = default; + explicit Derivation(const Derivation & other); }; @@ -81,38 +78,29 @@ class Store; /* Write a derivation to the Nix store, and return its path. */ -Path writeDerivation(ref store, +StorePath writeDerivation(ref store, const Derivation & drv, const string & name, RepairFlag repair = NoRepair); /* Read a derivation from a file. */ -Derivation readDerivation(const Path & drvPath); +Derivation readDerivation(const Store & store, const Path & drvPath); -/* Check whether a file name ends with the extension for - derivations. */ +// FIXME: remove bool isDerivation(const string & fileName); -Hash hashDerivationModulo(Store & store, Derivation drv); +Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs); /* Memoisation of hashDerivationModulo(). */ -typedef std::map DrvHashes; +typedef std::map DrvHashes; extern DrvHashes drvHashes; // FIXME: global, not thread-safe -/* Split a string specifying a derivation and a set of outputs - (/nix/store/hash-foo!out1,out2,...) into the derivation path and - the outputs. */ -typedef std::pair > DrvPathWithOutputs; -DrvPathWithOutputs parseDrvPathWithOutputs(const string & s); - -Path makeDrvPathWithOutputs(const Path & drvPath, const std::set & outputs); - bool wantOutput(const string & output, const std::set & wanted); struct Source; struct Sink; -Source & readDerivation(Source & in, Store & store, BasicDerivation & drv); -Sink & operator << (Sink & out, const BasicDerivation & drv); +Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv); +void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv); std::string hashPlaceholder(const std::string & outputName); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 22382ab1d..149c84765 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -8,6 +8,7 @@ #include "compression.hh" #include "pathlocks.hh" #include "finally.hh" +#include "tarfile.hh" #ifdef ENABLE_S3 #include @@ -30,26 +31,14 @@ using namespace std::string_literals; namespace nix { -struct DownloadSettings : Config -{ - Setting enableHttp2{this, true, "http2", - "Whether to enable HTTP/2 support."}; - - Setting userAgentSuffix{this, "", "user-agent-suffix", - "String appended to the user agent in HTTP requests."}; - - Setting httpConnections{this, 25, "http-connections", - "Number of parallel HTTP connections.", - {"binary-caches-parallel-connections"}}; - - Setting connectTimeout{this, 0, "connect-timeout", - "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; -}; - -static DownloadSettings downloadSettings; +DownloadSettings downloadSettings; static GlobalConfig::Register r1(&downloadSettings); +CachedDownloadRequest::CachedDownloadRequest(const std::string & uri) + : uri(uri), ttl(settings.tarballTtl) +{ } + std::string resolveUri(const std::string & uri) { if (uri.compare(0, 8, "channel:") == 0) @@ -87,19 +76,24 @@ struct CurlDownloader : public Downloader std::string encoding; + bool acceptRanges = false; + + curl_off_t writtenToSink = 0; + DownloadItem(CurlDownloader & downloader, const DownloadRequest & request, - Callback callback) + Callback && callback) : downloader(downloader) , request(request) , act(*logger, lvlTalkative, actDownload, fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri), {request.uri}, request.parentAct) - , callback(callback) + , callback(std::move(callback)) , finalSink([this](const unsigned char * data, size_t len) { - if (this->request.dataCallback) + if (this->request.dataCallback) { + writtenToSink += len; this->request.dataCallback((char *) data, len); - else + } else this->result.data->append((char *) data, len); }) { @@ -177,6 +171,7 @@ struct CurlDownloader : public Downloader status = ss.size() >= 2 ? ss[1] : ""; result.data = std::make_shared(); result.bodySize = 0; + acceptRanges = false; encoding = ""; } else { auto i = line.find(':'); @@ -194,7 +189,9 @@ struct CurlDownloader : public Downloader return 0; } } else if (name == "content-encoding") - encoding = trim(string(line, i + 1));; + encoding = trim(string(line, i + 1)); + else if (name == "accept-ranges" && toLower(trim(std::string(line, i + 1))) == "bytes") + acceptRanges = true; } } return realSize; @@ -244,8 +241,6 @@ struct CurlDownloader : public Downloader return ((DownloadItem *) userp)->readCallback(buffer, size, nitems); } - long lowSpeedTimeout = 300; - void init() { if (!req) req = curl_easy_init(); @@ -270,6 +265,8 @@ struct CurlDownloader : public Downloader #if LIBCURL_VERSION_NUM >= 0x072f00 if (downloadSettings.enableHttp2) curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS); + else + curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1); #endif curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, DownloadItem::writeCallbackWrapper); curl_easy_setopt(req, CURLOPT_WRITEDATA, this); @@ -293,6 +290,7 @@ struct CurlDownloader : public Downloader } if (request.verifyTLS) { + debug("verify TLS: Nix CA file = '%s'", settings.caFile); if (settings.caFile != "") curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); } else { @@ -303,13 +301,16 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, downloadSettings.connectTimeout.get()); curl_easy_setopt(req, CURLOPT_LOW_SPEED_LIMIT, 1L); - curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, lowSpeedTimeout); + curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, downloadSettings.stalledDownloadTimeout.get()); /* If no file exist in the specified path, curl continues to work anyway as if netrc support was disabled. */ curl_easy_setopt(req, CURLOPT_NETRC_FILE, settings.netrcFile.get().c_str()); curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); + if (writtenToSink) + curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink); + result.data = std::make_shared(); result.bodySize = 0; } @@ -319,16 +320,21 @@ struct CurlDownloader : public Downloader long httpStatus = 0; curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); - char * effectiveUrlCStr; - curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr); - if (effectiveUrlCStr) - result.effectiveUrl = effectiveUrlCStr; + char * effectiveUriCStr; + curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr); + if (effectiveUriCStr) + result.effectiveUri = effectiveUriCStr; debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", request.verb(), request.uri, code, httpStatus, result.bodySize); - if (decompressionSink) - decompressionSink->finish(); + if (decompressionSink) { + try { + decompressionSink->finish(); + } catch (...) { + writeException = std::current_exception(); + } + } if (code == CURLE_WRITE_ERROR && result.etag == request.expectedETag) { code = CURLE_OK; @@ -339,18 +345,12 @@ struct CurlDownloader : public Downloader failEx(writeException); else if (code == CURLE_OK && - (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) + (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 206 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) { result.cached = httpStatus == 304; + act.progress(result.bodySize, result.bodySize); done = true; - - try { - act.progress(result.bodySize, result.bodySize); - callback(std::move(result)); - } catch (...) { - done = true; - callback.rethrow(); - } + callback(std::move(result)); } else { @@ -363,9 +363,10 @@ struct CurlDownloader : public Downloader } else if (httpStatus == 401 || httpStatus == 403 || httpStatus == 407) { // Don't retry on authentication/authorization failures err = Forbidden; - } else if (httpStatus >= 400 && httpStatus < 500 && httpStatus != 408) { + } else if (httpStatus >= 400 && httpStatus < 500 && httpStatus != 408 && httpStatus != 429) { // Most 4xx errors are client errors and are probably not worth retrying: // * 408 means the server timed out waiting for us, so we try again + // * 429 means too many requests, so we retry (with a delay) err = Misc; } else if (httpStatus == 501 || httpStatus == 505 || httpStatus == 511) { // Let's treat most 5xx (server) errors as transient, except for a handful: @@ -412,10 +413,20 @@ struct CurlDownloader : public Downloader request.verb(), request.uri, curl_easy_strerror(code), code)); /* If this is a transient error, then maybe retry the - download after a while. */ - if (err == Transient && attempt < request.tries) { + download after a while. If we're writing to a + sink, we can only retry if the server supports + ranged requests. */ + if (err == Transient + && attempt < request.tries + && (!this->request.dataCallback + || writtenToSink == 0 + || (acceptRanges && encoding.empty()))) + { int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(downloader.mt19937)); - printError(format("warning: %s; retrying in %d ms") % exc.what() % ms); + if (writtenToSink) + warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms); + else + warn("%s; retrying in %d ms", exc.what(), ms); embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms); downloader.enqueueItem(shared_from_this()); } @@ -588,7 +599,7 @@ struct CurlDownloader : public Downloader workerThreadMain(); } catch (nix::Interrupted & e) { } catch (std::exception & e) { - printError(format("unexpected error in download thread: %s") % e.what()); + printError("unexpected error in download thread: %s", e.what()); } { @@ -640,10 +651,10 @@ struct CurlDownloader : public Downloader #ifdef ENABLE_S3 auto [bucketName, key, params] = parseS3Uri(request.uri); - std::string profile = get(params, "profile", ""); - std::string region = get(params, "region", Aws::Region::US_EAST_1); - std::string scheme = get(params, "scheme", ""); - std::string endpoint = get(params, "endpoint", ""); + std::string profile = get(params, "profile").value_or(""); + std::string region = get(params, "region").value_or(Aws::Region::US_EAST_1); + std::string scheme = get(params, "scheme").value_or(""); + std::string endpoint = get(params, "endpoint").value_or(""); S3Helper s3Helper(profile, region, scheme, endpoint); @@ -661,7 +672,7 @@ struct CurlDownloader : public Downloader return; } - enqueueItem(std::make_shared(*this, request, callback)); + enqueueItem(std::make_shared(*this, request, std::move(callback))); } }; @@ -790,20 +801,26 @@ void Downloader::download(DownloadRequest && request, Sink & sink) } } -Path Downloader::downloadCached(ref store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl) +CachedDownloadResult Downloader::downloadCached( + ref store, const CachedDownloadRequest & request) { - auto url = resolveUri(url_); + auto url = resolveUri(request.uri); + auto name = request.name; if (name == "") { auto p = url.rfind('/'); if (p != string::npos) name = string(url, p + 1); } - Path expectedStorePath; - if (expectedHash) { - expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name); - if (store->isValidPath(expectedStorePath)) - return store->toRealPath(expectedStorePath); + std::optional expectedStorePath; + if (request.expectedHash) { + expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name); + if (store->isValidPath(*expectedStorePath)) { + CachedDownloadResult result; + result.storePath = store->printStorePath(*expectedStorePath); + result.path = store->toRealPath(result.storePath); + return result; + } } Path cacheDir = getCacheDir() + "/nix/tarballs"; @@ -816,95 +833,105 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa PathLocks lock({fileLink}, fmt("waiting for lock on '%1%'...", fileLink)); - Path storePath; + std::optional storePath; string expectedETag; bool skip = false; + CachedDownloadResult result; + if (pathExists(fileLink) && pathExists(dataFile)) { - storePath = readLink(fileLink); - store->addTempRoot(storePath); - if (store->isValidPath(storePath)) { + storePath = store->parseStorePath(readLink(fileLink)); + // FIXME + store->addTempRoot(*storePath); + if (store->isValidPath(*storePath)) { auto ss = tokenizeString>(readFile(dataFile), "\n"); if (ss.size() >= 3 && ss[0] == url) { time_t lastChecked; - if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) { + if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) { skip = true; - if (effectiveUrl) - *effectiveUrl = url_; + result.effectiveUri = request.uri; + result.etag = ss[1]; } else if (!ss[1].empty()) { debug(format("verifying previous ETag '%1%'") % ss[1]); expectedETag = ss[1]; } } } else - storePath = ""; + storePath.reset(); } if (!skip) { try { - DownloadRequest request(url); - request.expectedETag = expectedETag; - auto res = download(request); - if (effectiveUrl) - *effectiveUrl = res.effectiveUrl; + DownloadRequest request2(url); + request2.expectedETag = expectedETag; + auto res = download(request2); + result.effectiveUri = res.effectiveUri; + result.etag = res.etag; if (!res.cached) { - ValidPathInfo info; StringSink sink; dumpString(*res.data, sink); - Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); - info.path = store->makeFixedOutputPath(false, hash, name); + Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data); + ValidPathInfo info(store->makeFixedOutputPath(false, hash, name)); info.narHash = hashString(htSHA256, *sink.s); info.narSize = sink.s->size(); info.ca = makeFixedOutputCA(false, hash); store->addToStore(info, sink.s, NoRepair, NoCheckSigs); - storePath = info.path; + storePath = info.path.clone(); } - assert(!storePath.empty()); - replaceSymlink(storePath, fileLink); + assert(storePath); + replaceSymlink(store->printStorePath(*storePath), fileLink); writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n"); } catch (DownloadError & e) { - if (storePath.empty()) throw; - printError(format("warning: %1%; using cached result") % e.msg()); + if (!storePath) throw; + warn("warning: %s; using cached result", e.msg()); + result.etag = expectedETag; } } - if (unpack) { - Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked"; + if (request.unpack) { + Path unpackedLink = cacheDir + "/" + ((std::string) storePath->to_string()) + "-unpacked"; PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink)); - Path unpackedStorePath; + std::optional unpackedStorePath; if (pathExists(unpackedLink)) { - unpackedStorePath = readLink(unpackedLink); - store->addTempRoot(unpackedStorePath); - if (!store->isValidPath(unpackedStorePath)) - unpackedStorePath = ""; + unpackedStorePath = store->parseStorePath(readLink(unpackedLink)); + // FIXME + store->addTempRoot(*unpackedStorePath); + if (!store->isValidPath(*unpackedStorePath)) + unpackedStorePath.reset(); } - if (unpackedStorePath.empty()) { - printInfo(format("unpacking '%1%'...") % url); + if (!unpackedStorePath) { + printInfo("unpacking '%s'...", url); Path tmpDir = createTempDir(); AutoDelete autoDelete(tmpDir, true); - // FIXME: this requires GNU tar for decompression. - runProgram("tar", true, {"xf", store->toRealPath(storePath), "-C", tmpDir, "--strip-components", "1"}); - unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair); + unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir); + auto members = readDirectory(tmpDir); + if (members.size() != 1) + throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); + auto topDir = tmpDir + "/" + members.begin()->name; + unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair); } - replaceSymlink(unpackedStorePath, unpackedLink); - storePath = unpackedStorePath; + replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink); + storePath = std::move(*unpackedStorePath); } - if (expectedStorePath != "" && storePath != expectedStorePath) { - Hash gotHash = unpack - ? hashPath(expectedHash.type, store->toRealPath(storePath)).first - : hashFile(expectedHash.type, store->toRealPath(storePath)); - throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", - url, expectedHash.to_string(), gotHash.to_string()); + if (expectedStorePath && *storePath != *expectedStorePath) { + unsigned int statusCode = 102; + Hash gotHash = request.unpack + ? hashPath(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))).first + : hashFile(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))); + throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", + url, request.expectedHash.to_string(), gotHash.to_string()); } - return store->toRealPath(storePath); + result.storePath = store->printStorePath(*storePath); + result.path = store->toRealPath(result.storePath); + return result; } diff --git a/src/libstore/download.hh b/src/libstore/download.hh index f0228f7d0..5a131c704 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -2,20 +2,44 @@ #include "types.hh" #include "hash.hh" -#include "globals.hh" +#include "config.hh" #include #include namespace nix { +struct DownloadSettings : Config +{ + Setting enableHttp2{this, true, "http2", + "Whether to enable HTTP/2 support."}; + + Setting userAgentSuffix{this, "", "user-agent-suffix", + "String appended to the user agent in HTTP requests."}; + + Setting httpConnections{this, 25, "http-connections", + "Number of parallel HTTP connections.", + {"binary-caches-parallel-connections"}}; + + Setting connectTimeout{this, 0, "connect-timeout", + "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; + + Setting stalledDownloadTimeout{this, 300, "stalled-download-timeout", + "Timeout (in seconds) for receiving data from servers during download. Nix cancels idle downloads after this timeout's duration."}; + + Setting tries{this, 5, "download-attempts", + "How often Nix will attempt to download a file before giving up."}; +}; + +extern DownloadSettings downloadSettings; + struct DownloadRequest { std::string uri; std::string expectedETag; bool verifyTLS = true; bool head = false; - size_t tries = 5; + size_t tries = downloadSettings.tries; unsigned int baseRetryTimeMs = 250; ActivityId parentAct; bool decompress = true; @@ -36,15 +60,39 @@ struct DownloadResult { bool cached = false; std::string etag; - std::string effectiveUrl; + std::string effectiveUri; std::shared_ptr data; uint64_t bodySize = 0; }; +struct CachedDownloadRequest +{ + std::string uri; + bool unpack = false; + std::string name; + Hash expectedHash; + unsigned int ttl; + + CachedDownloadRequest(const std::string & uri); + CachedDownloadRequest() = delete; +}; + +struct CachedDownloadResult +{ + // Note: 'storePath' may be different from 'path' when using a + // chroot store. + Path storePath; + Path path; + std::optional etag; + std::string effectiveUri; +}; + class Store; struct Downloader { + virtual ~Downloader() { } + /* Enqueue a download request, returning a future to the result of the download. The future may throw a DownloadError exception. */ @@ -64,8 +112,7 @@ struct Downloader and is more recent than ‘tarball-ttl’ seconds. Otherwise, use the recorded ETag to verify if the server has a more recent version, and if so, download it to the Nix store. */ - Path downloadCached(ref store, const string & uri, bool unpack, string name = "", - const Hash & expectedHash = Hash(), string * effectiveUri = nullptr, int ttl = settings.tarballTtl); + CachedDownloadResult downloadCached(ref store, const CachedDownloadRequest & request); enum Error { NotFound, Forbidden, Misc, Transient, Interrupted }; }; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index ef3fea7c8..4692d1a7b 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -24,9 +24,9 @@ struct HashAndWriteSink : Sink } }; -void Store::exportPaths(const Paths & paths, Sink & sink) +void Store::exportPaths(const StorePathSet & paths, Sink & sink) { - Paths sorted = topoSortPaths(PathSet(paths.begin(), paths.end())); + auto sorted = topoSortPaths(paths); std::reverse(sorted.begin(), sorted.end()); std::string doneLabel("paths exported"); @@ -42,7 +42,7 @@ void Store::exportPaths(const Paths & paths, Sink & sink) sink << 0; } -void Store::exportPath(const Path & path, Sink & sink) +void Store::exportPath(const StorePath & path, Sink & sink) { auto info = queryPathInfo(path); @@ -55,15 +55,21 @@ void Store::exportPath(const Path & path, Sink & sink) Don't complain if the stored hash is zero (unknown). */ Hash hash = hashAndWriteSink.currentHash(); if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) - throw Error(format("hash of path '%1%' has changed from '%2%' to '%3%'!") % path - % info->narHash.to_string() % hash.to_string()); + throw Error("hash of path '%s' has changed from '%s' to '%s'!", + printStorePath(path), info->narHash.to_string(), hash.to_string()); - hashAndWriteSink << exportMagic << path << info->references << info->deriver << 0; + hashAndWriteSink + << exportMagic + << printStorePath(path); + writeStorePaths(*this, hashAndWriteSink, info->references); + hashAndWriteSink + << (info->deriver ? printStorePath(*info->deriver) : "") + << 0; } -Paths Store::importPaths(Source & source, std::shared_ptr accessor, CheckSigsFlag checkSigs) +StorePaths Store::importPaths(Source & source, std::shared_ptr accessor, CheckSigsFlag checkSigs) { - Paths res; + StorePaths res; while (true) { auto n = readNum(source); if (n == 0) break; @@ -77,16 +83,15 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, if (magic != exportMagic) throw Error("Nix archive cannot be imported; wrong format"); - ValidPathInfo info; - - info.path = readStorePath(*this, source); + ValidPathInfo info(parseStorePath(readString(source))); //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); - info.references = readStorePaths(*this, source); + info.references = readStorePaths(*this, source); - info.deriver = readString(source); - if (info.deriver != "") assertStorePath(info.deriver); + auto deriver = readString(source); + if (deriver != "") + info.deriver = parseStorePath(deriver); info.narHash = hashString(htSHA256, *tee.source.data); info.narSize = tee.source.data->size(); @@ -97,7 +102,7 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, addToStore(info, tee.source.data, NoRepair, checkSigs, accessor); - res.push_back(info.path); + res.push_back(info.path.clone()); } return res; diff --git a/src/libstore/fs-accessor.hh b/src/libstore/fs-accessor.hh index f703e1d15..64780a6da 100644 --- a/src/libstore/fs-accessor.hh +++ b/src/libstore/fs-accessor.hh @@ -19,6 +19,8 @@ public: uint64_t narOffset = 0; // regular files only }; + virtual ~FSAccessor() { } + virtual Stat stat(const Path & path) = 0; virtual StringSet readDirectory(const Path & path) = 0; diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index d8a5da0d4..690febc5b 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -29,7 +29,7 @@ static string gcRootsDir = "gcroots"; read. To be precise: when they try to create a new temporary root file, they will block until the garbage collector has finished / yielded the GC lock. */ -int LocalStore::openGCLock(LockType lockType) +AutoCloseFD LocalStore::openGCLock(LockType lockType) { Path fnGCLock = (format("%1%/%2%") % stateDir % gcLockName).str(); @@ -49,7 +49,7 @@ int LocalStore::openGCLock(LockType lockType) process that can open the file for reading can DoS the collector. */ - return fdGCLock.release(); + return fdGCLock; } @@ -85,12 +85,10 @@ void LocalStore::addIndirectRoot(const Path & path) } -Path LocalFSStore::addPermRoot(const Path & _storePath, +Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot, bool indirect, bool allowOutsideRootsDir) { - Path storePath(canonPath(_storePath)); Path gcRoot(canonPath(_gcRoot)); - assertStorePath(storePath); if (isInStore(gcRoot)) throw Error(format( @@ -102,7 +100,7 @@ Path LocalFSStore::addPermRoot(const Path & _storePath, point to the Nix store. */ if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot)))) throw Error(format("cannot create symlink '%1%'; already exists") % gcRoot); - makeSymlink(gcRoot, storePath); + makeSymlink(gcRoot, printStorePath(storePath)); addIndirectRoot(gcRoot); } @@ -117,10 +115,10 @@ Path LocalFSStore::addPermRoot(const Path & _storePath, % gcRoot % rootsDir); } - if (baseNameOf(gcRoot) == baseNameOf(storePath)) + if (baseNameOf(gcRoot) == std::string(storePath.to_string())) writeFile(gcRoot, ""); else - makeSymlink(gcRoot, storePath); + makeSymlink(gcRoot, printStorePath(storePath)); } /* Check that the root can be found by the garbage collector. @@ -129,13 +127,12 @@ Path LocalFSStore::addPermRoot(const Path & _storePath, check if the root is in a directory in or linked from the gcroots directory. */ if (settings.checkRootReachability) { - Roots roots = findRoots(false); - if (roots[storePath].count(gcRoot) == 0) + auto roots = findRoots(false); + if (roots[storePath.clone()].count(gcRoot) == 0) printError( - format( - "warning: '%1%' is not in a directory where the garbage collector looks for roots; " - "therefore, '%2%' might be removed by the garbage collector") - % gcRoot % storePath); + "warning: '%1%' is not in a directory where the garbage collector looks for roots; " + "therefore, '%2%' might be removed by the garbage collector", + gcRoot, printStorePath(storePath)); } /* Grab the global GC root, causing us to block while a GC is in @@ -147,7 +144,7 @@ Path LocalFSStore::addPermRoot(const Path & _storePath, } -void LocalStore::addTempRoot(const Path & path) +void LocalStore::addTempRoot(const StorePath & path) { auto state(_state.lock()); @@ -188,7 +185,7 @@ void LocalStore::addTempRoot(const Path & path) debug(format("acquiring write lock on '%1%'") % fnTempRoots); lockFile(state->fdTempRoots.get(), ltWrite, true); - string s = path + '\0'; + string s = printStorePath(path) + '\0'; writeFull(state->fdTempRoots.get(), s); /* Downgrade to a read lock. */ @@ -221,26 +218,22 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor) //FDPtr fd(new AutoCloseFD(openLockFile(path, false))); //if (*fd == -1) continue; - if (path != fnTempRoots) { - - /* Try to acquire a write lock without blocking. This can - only succeed if the owning process has died. In that case - we don't care about its temporary roots. */ - if (lockFile(fd->get(), ltWrite, false)) { - printError(format("removing stale temporary roots file '%1%'") % path); - unlink(path.c_str()); - writeFull(fd->get(), "d"); - continue; - } - - /* Acquire a read lock. This will prevent the owning process - from upgrading to a write lock, therefore it will block in - addTempRoot(). */ - debug(format("waiting for read lock on '%1%'") % path); - lockFile(fd->get(), ltRead, true); - + /* Try to acquire a write lock without blocking. This can + only succeed if the owning process has died. In that case + we don't care about its temporary roots. */ + if (lockFile(fd->get(), ltWrite, false)) { + printError(format("removing stale temporary roots file '%1%'") % path); + unlink(path.c_str()); + writeFull(fd->get(), "d"); + continue; } + /* Acquire a read lock. This will prevent the owning process + from upgrading to a write lock, therefore it will block in + addTempRoot(). */ + debug(format("waiting for read lock on '%1%'") % path); + lockFile(fd->get(), ltRead, true); + /* Read the entire file. */ string contents = readFile(fd->get()); @@ -250,8 +243,7 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor) while ((end = contents.find((char) 0, pos)) != string::npos) { Path root(contents, pos, end - pos); debug("got temporary root '%s'", root); - assertStorePath(root); - tempRoots[root].emplace(censor ? censored : fmt("{temp:%d}", pid)); + tempRoots[parseStorePath(root)].emplace(censor ? censored : fmt("{temp:%d}", pid)); pos = end + 1; } @@ -264,10 +256,11 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) { auto foundRoot = [&](const Path & path, const Path & target) { Path storePath = toStorePath(target); - if (isStorePath(storePath) && isValidPath(storePath)) - roots[storePath].emplace(path); + // FIXME + if (isStorePath(storePath) && isValidPath(parseStorePath(storePath))) + roots[parseStorePath(storePath)].emplace(path); else - printInfo(format("skipping invalid root from '%1%' to '%2%'") % path % storePath); + printInfo("skipping invalid root from '%1%' to '%2%'", path, storePath); }; try { @@ -303,9 +296,10 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) } else if (type == DT_REG) { - Path storePath = storeDir + "/" + baseNameOf(path); - if (isStorePath(storePath) && isValidPath(storePath)) - roots[storePath].emplace(path); + Path storePath = storeDir + "/" + std::string(baseNameOf(path)); + // FIXME + if (isStorePath(storePath) && isValidPath(parseStorePath(storePath))) + roots[parseStorePath(storePath)].emplace(path); } } @@ -313,7 +307,7 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots) catch (SysError & e) { /* We only ignore permanent failures. */ if (e.errNo == EACCES || e.errNo == ENOENT || e.errNo == ENOTDIR) - printInfo(format("cannot read potential root '%1%'") % path); + printInfo("cannot read potential root '%1%'", path); else throw; } @@ -326,10 +320,9 @@ void LocalStore::findRootsNoTemp(Roots & roots, bool censor) findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots); findRoots(stateDir + "/profiles", DT_UNKNOWN, roots); - /* Add additional roots returned by the program specified by the - NIX_ROOT_FINDER environment variable. This is typically used - to add running programs to the set of roots (to prevent them - from being garbage collected). */ + /* Add additional roots returned by different platforms-specific + heuristics. This is typically used to add running programs to + the set of roots (to prevent them from being garbage collected). */ findRuntimeRoots(roots, censor); } @@ -345,7 +338,9 @@ Roots LocalStore::findRoots(bool censor) return roots; } -static void readProcLink(const string & file, Roots & roots) +typedef std::unordered_map> UncheckedRoots; + +static void readProcLink(const string & file, UncheckedRoots & roots) { /* 64 is the starting buffer size gnu readlink uses... */ auto bufsiz = ssize_t{64}; @@ -374,7 +369,7 @@ static string quoteRegexChars(const string & raw) return std::regex_replace(raw, specialRegex, R"(\$&)"); } -static void readFileRoots(const char * path, Roots & roots) +static void readFileRoots(const char * path, UncheckedRoots & roots) { try { roots[readFile(path)].emplace(path); @@ -386,7 +381,7 @@ static void readFileRoots(const char * path, Roots & roots) void LocalStore::findRuntimeRoots(Roots & roots, bool censor) { - Roots unchecked; + UncheckedRoots unchecked; auto procDir = AutoCloseDir{opendir("/proc")}; if (procDir) { @@ -445,17 +440,22 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) } #if !defined(__linux__) - try { - std::regex lsofRegex(R"(^n(/.*)$)"); - auto lsofLines = - tokenizeString>(runProgram(LSOF, true, { "-n", "-w", "-F", "n" }), "\n"); - for (const auto & line : lsofLines) { - std::smatch match; - if (std::regex_match(line, match, lsofRegex)) - unchecked[match[1]].emplace("{lsof}"); + // lsof is really slow on OS X. This actually causes the gc-concurrent.sh test to fail. + // See: https://github.com/NixOS/nix/issues/3011 + // Because of this we disable lsof when running the tests. + if (getEnv("_NIX_TEST_NO_LSOF") != "1") { + try { + std::regex lsofRegex(R"(^n(/.*)$)"); + auto lsofLines = + tokenizeString>(runProgram(LSOF, true, { "-n", "-w", "-F", "n" }), "\n"); + for (const auto & line : lsofLines) { + std::smatch match; + if (std::regex_match(line, match, lsofRegex)) + unchecked[match[1]].emplace("{lsof}"); + } + } catch (ExecError & e) { + /* lsof not installed, lsof failed */ } - } catch (ExecError & e) { - /* lsof not installed, lsof failed */ } #endif @@ -466,16 +466,16 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) #endif for (auto & [target, links] : unchecked) { - if (isInStore(target)) { - Path path = toStorePath(target); - if (isStorePath(path) && isValidPath(path)) { - debug(format("got additional root '%1%'") % path); - if (censor) - roots[path].insert(censored); - else - roots[path].insert(links.begin(), links.end()); - } - } + if (!isInStore(target)) continue; + Path pathS = toStorePath(target); + if (!isStorePath(pathS)) continue; + auto path = parseStorePath(pathS); + if (!isValidPath(path)) continue; + debug("got additional root '%1%'", pathS); + if (censor) + roots[path.clone()].insert(censored); + else + roots[path.clone()].insert(links.begin(), links.end()); } } @@ -485,18 +485,19 @@ struct GCLimitReached { }; struct LocalStore::GCState { - GCOptions options; + const GCOptions & options; GCResults & results; - PathSet roots; - PathSet tempRoots; - PathSet dead; - PathSet alive; + StorePathSet roots; + StorePathSet tempRoots; + StorePathSet dead; + StorePathSet alive; bool gcKeepOutputs; bool gcKeepDerivations; unsigned long long bytesInvalidated; bool moveToTrash = true; bool shouldDelete; - GCState(GCResults & results_) : results(results_), bytesInvalidated(0) { } + GCState(const GCOptions & options, GCResults & results) + : options(options), results(results), bytesInvalidated(0) { } }; @@ -504,7 +505,7 @@ bool LocalStore::isActiveTempFile(const GCState & state, const Path & path, const string & suffix) { return hasSuffix(path, suffix) - && state.tempRoots.find(string(path, 0, path.size() - suffix.size())) != state.tempRoots.end(); + && state.tempRoots.count(parseStorePath(string(path, 0, path.size() - suffix.size()))); } @@ -522,16 +523,17 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path) unsigned long long size = 0; - if (isStorePath(path) && isValidPath(path)) { - PathSet referrers; - queryReferrers(path, referrers); + // FIXME + if (isStorePath(path) && isValidPath(parseStorePath(path))) { + StorePathSet referrers; + queryReferrers(parseStorePath(path), referrers); for (auto & i : referrers) - if (i != path) deletePathRecursive(state, i); - size = queryPathInfo(path)->narSize; - invalidatePathChecked(path); + if (printStorePath(i) != path) deletePathRecursive(state, printStorePath(i)); + size = queryPathInfo(parseStorePath(path))->narSize; + invalidatePathChecked(parseStorePath(path)); } - Path realPath = realStoreDir + "/" + baseNameOf(path); + Path realPath = realStoreDir + "/" + std::string(baseNameOf(path)); struct stat st; if (lstat(realPath.c_str(), &st)) { @@ -555,7 +557,7 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path) try { if (chmod(realPath.c_str(), st.st_mode | S_IWUSR) == -1) throw SysError(format("making '%1%' writable") % realPath); - Path tmp = trashDir + "/" + baseNameOf(path); + Path tmp = trashDir + "/" + std::string(baseNameOf(path)); if (rename(realPath.c_str(), tmp.c_str())) throw SysError(format("unable to rename '%1%' to '%2%'") % realPath % tmp); state.bytesInvalidated += size; @@ -575,7 +577,7 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path) } -bool LocalStore::canReachRoot(GCState & state, PathSet & visited, const Path & path) +bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path) { if (visited.count(path)) return false; @@ -584,41 +586,41 @@ bool LocalStore::canReachRoot(GCState & state, PathSet & visited, const Path & p if (state.dead.count(path)) return false; if (state.roots.count(path)) { - debug(format("cannot delete '%1%' because it's a root") % path); - state.alive.insert(path); + debug("cannot delete '%1%' because it's a root", printStorePath(path)); + state.alive.insert(path.clone()); return true; } - visited.insert(path); + visited.insert(path.clone()); - if (!isStorePath(path) || !isValidPath(path)) return false; + //FIXME + if (!isStorePath(printStorePath(path)) || !isValidPath(path)) return false; - PathSet incoming; + StorePathSet incoming; /* Don't delete this path if any of its referrers are alive. */ queryReferrers(path, incoming); /* If keep-derivations is set and this is a derivation, then don't delete the derivation if any of the outputs are alive. */ - if (state.gcKeepDerivations && isDerivation(path)) { - PathSet outputs = queryDerivationOutputs(path); - for (auto & i : outputs) + if (state.gcKeepDerivations && path.isDerivation()) { + for (auto & i : queryDerivationOutputs(path)) if (isValidPath(i) && queryPathInfo(i)->deriver == path) - incoming.insert(i); + incoming.insert(i.clone()); } /* If keep-outputs is set, then don't delete this path if there are derivers of this path that are not garbage. */ if (state.gcKeepOutputs) { - PathSet derivers = queryValidDerivers(path); + auto derivers = queryValidDerivers(path); for (auto & i : derivers) - incoming.insert(i); + incoming.insert(i.clone()); } for (auto & i : incoming) if (i != path) if (canReachRoot(state, visited, i)) { - state.alive.insert(path); + state.alive.insert(path.clone()); return true; } @@ -630,12 +632,13 @@ void LocalStore::tryToDelete(GCState & state, const Path & path) { checkInterrupt(); - auto realPath = realStoreDir + "/" + baseNameOf(path); + auto realPath = realStoreDir + "/" + std::string(baseNameOf(path)); if (realPath == linksDir || realPath == trashDir) return; //Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path); - if (!isStorePath(path) || !isValidPath(path)) { + // FIXME + if (!isStorePath(path) || !isValidPath(parseStorePath(path))) { /* A lock file belonging to a path that we're building right now isn't garbage. */ if (isActiveTempFile(state, path, ".lock")) return; @@ -650,16 +653,17 @@ void LocalStore::tryToDelete(GCState & state, const Path & path) if (isActiveTempFile(state, path, ".check")) return; } - PathSet visited; + StorePathSet visited; - if (canReachRoot(state, visited, path)) { - debug(format("cannot delete '%1%' because it's still reachable") % path); + if (canReachRoot(state, visited, parseStorePath(path))) { + debug("cannot delete '%s' because it's still reachable", path); } else { /* No path we visited was a root, so everything is garbage. But we only delete ‘path’ and its referrers here so that ‘nix-store --delete’ doesn't have the unexpected effect of recursing into derivations and outputs. */ - state.dead.insert(visited.begin(), visited.end()); + for (auto & i : visited) + state.dead.insert(i.clone()); if (state.shouldDelete) deletePathRecursive(state, path); } @@ -690,9 +694,8 @@ void LocalStore::removeUnusedLinks(const GCState & state) throw SysError(format("statting '%1%'") % path); if (st.st_nlink != 1) { - unsigned long long size = st.st_blocks * 512ULL; - actualSize += size; - unsharedSize += (st.st_nlink - 1) * size; + actualSize += st.st_size; + unsharedSize += (st.st_nlink - 1) * st.st_size; continue; } @@ -701,7 +704,7 @@ void LocalStore::removeUnusedLinks(const GCState & state) if (unlink(path.c_str()) == -1) throw SysError(format("deleting '%1%'") % path); - state.results.bytesFreed += st.st_blocks * 512ULL; + state.results.bytesFreed += st.st_size; } struct stat st; @@ -716,8 +719,7 @@ void LocalStore::removeUnusedLinks(const GCState & state) void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) { - GCState state(results); - state.options = options; + GCState state(options, results); state.gcKeepOutputs = settings.gcKeepOutputs; state.gcKeepDerivations = settings.gcKeepDerivations; @@ -742,12 +744,12 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Find the roots. Since we've grabbed the GC lock, the set of permanent roots cannot increase now. */ - printError(format("finding garbage collector roots...")); + printError("finding garbage collector roots..."); Roots rootMap; if (!options.ignoreLiveness) findRootsNoTemp(rootMap, true); - for (auto & i : rootMap) state.roots.insert(i.first); + for (auto & i : rootMap) state.roots.insert(i.first.clone()); /* Read the temporary roots. This acquires read locks on all per-process temporary root files. So after this point no paths @@ -755,9 +757,10 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) FDs fds; Roots tempRoots; findTempRoots(fds, tempRoots, true); - for (auto & root : tempRoots) - state.tempRoots.insert(root.first); - state.roots.insert(state.tempRoots.begin(), state.tempRoots.end()); + for (auto & root : tempRoots) { + state.tempRoots.insert(root.first.clone()); + state.roots.insert(root.first.clone()); + } /* After this point the set of roots or temporary roots cannot increase, since we hold locks on everything. So everything @@ -769,7 +772,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) createDirs(trashDir); } catch (SysError & e) { if (e.errNo == ENOSPC) { - printInfo(format("note: can't create trash directory: %1%") % e.msg()); + printInfo("note: can't create trash directory: %s", e.msg()); state.moveToTrash = false; } } @@ -781,18 +784,21 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) if (options.action == GCOptions::gcDeleteSpecific) { for (auto & i : options.pathsToDelete) { - assertStorePath(i); - tryToDelete(state, i); + tryToDelete(state, printStorePath(i)); if (state.dead.find(i) == state.dead.end()) - throw Error(format("cannot delete path '%1%' since it is still alive") % i); + throw Error( + "cannot delete path '%1%' since it is still alive. " + "To find out why use: " + "nix-store --query --roots", + printStorePath(i)); } } else if (options.maxFreed > 0) { if (state.shouldDelete) - printError(format("deleting garbage...")); + printError("deleting garbage..."); else - printError(format("determining live/dead paths...")); + printError("determining live/dead paths..."); try { @@ -812,7 +818,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) string name = dirent->d_name; if (name == "." || name == "..") continue; Path path = storeDir + "/" + name; - if (isStorePath(path) && isValidPath(path)) + // FIXME + if (isStorePath(path) && isValidPath(parseStorePath(path))) entries.push_back(path); else tryToDelete(state, path); @@ -837,12 +844,14 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } if (state.options.action == GCOptions::gcReturnLive) { - state.results.paths = state.alive; + for (auto & i : state.alive) + state.results.paths.insert(printStorePath(i)); return; } if (state.options.action == GCOptions::gcReturnDead) { - state.results.paths = state.dead; + for (auto & i : state.dead) + state.results.paths.insert(printStorePath(i)); return; } @@ -856,7 +865,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Clean up the links directory. */ if (options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific) { - printError(format("deleting unused links...")); + printError("deleting unused links..."); removeUnusedLinks(state); } @@ -867,7 +876,12 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) void LocalStore::autoGC(bool sync) { - auto getAvail = [this]() { + static auto fakeFreeSpaceFile = getEnv("_NIX_TEST_FREE_SPACE_FILE"); + + auto getAvail = [this]() -> uint64_t { + if (fakeFreeSpaceFile) + return std::stoll(readFile(*fakeFreeSpaceFile)); + struct statvfs st; if (statvfs(realStoreDir.c_str(), &st)) throw SysError("getting filesystem info about '%s'", realStoreDir); @@ -888,7 +902,7 @@ void LocalStore::autoGC(bool sync) auto now = std::chrono::steady_clock::now(); - if (now < state->lastGCCheck + std::chrono::seconds(5)) return; + if (now < state->lastGCCheck + std::chrono::seconds(settings.minFreeCheckInterval)) return; auto avail = getAvail(); @@ -915,11 +929,11 @@ void LocalStore::autoGC(bool sync) promise.set_value(); }); - printInfo("running auto-GC to free %d bytes", settings.maxFree - avail); - GCOptions options; options.maxFreed = settings.maxFree - avail; + printInfo("running auto-GC to free %d bytes", options.maxFreed); + GCResults results; collectGarbage(options, results); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 1c2c08715..a8945996e 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -7,6 +7,7 @@ #include #include #include +#include namespace nix { @@ -32,20 +33,20 @@ static GlobalConfig::Register r1(&settings); Settings::Settings() : nixPrefix(NIX_PREFIX) - , nixStore(canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR)))) - , nixDataDir(canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR))) - , nixLogDir(canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR))) - , nixStateDir(canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR))) - , nixConfDir(canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR))) - , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR))) - , nixBinDir(canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR))) + , nixStore(canonPath(getEnv("NIX_STORE_DIR").value_or(getEnv("NIX_STORE").value_or(NIX_STORE_DIR)))) + , nixDataDir(canonPath(getEnv("NIX_DATA_DIR").value_or(NIX_DATA_DIR))) + , nixLogDir(canonPath(getEnv("NIX_LOG_DIR").value_or(NIX_LOG_DIR))) + , nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR))) + , nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR))) + , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR))) + , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR))) , nixManDir(canonPath(NIX_MAN_DIR)) , nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH)) { buildUsersGroup = getuid() == 0 ? "nixbld" : ""; - lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; + lockCPU = getEnv("NIX_AFFINITY_HACK") == "1"; - caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "")); + caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or("")); if (caFile == "") { for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"}) if (pathExists(fn)) { @@ -56,9 +57,9 @@ Settings::Settings() /* Backwards compatibility. */ auto s = getEnv("NIX_REMOTE_SYSTEMS"); - if (s != "") { + if (s) { Strings ss; - for (auto & p : tokenizeString(s, ":")) + for (auto & p : tokenizeString(*s, ":")) ss.push_back("@" + p); builders = concatStringsSep(" ", ss); } @@ -95,7 +96,7 @@ StringSet Settings::getDefaultSystemFeatures() /* For backwards compatibility, accept some "features" that are used in Nixpkgs to route builds to certain machines but don't actually require anything special on the machines. */ - StringSet features{"nixos-test", "benchmark", "big-parallel"}; + StringSet features{"nixos-test", "benchmark", "big-parallel", "recursive-nix"}; #if __linux__ if (access("/dev/kvm", R_OK | W_OK) == 0) @@ -105,6 +106,27 @@ StringSet Settings::getDefaultSystemFeatures() return features; } +bool Settings::isExperimentalFeatureEnabled(const std::string & name) +{ + auto & f = experimentalFeatures.get(); + return std::find(f.begin(), f.end(), name) != f.end(); +} + +void Settings::requireExperimentalFeature(const std::string & name) +{ + if (!isExperimentalFeatureEnabled(name)) + throw Error("experimental Nix feature '%s' is disabled", name); +} + +bool Settings::isWSL1() +{ + struct utsname utsbuf; + uname(&utsbuf); + // WSL1 uses -Microsoft suffix + // WSL2 uses -microsoft-standard suffix + return hasSuffix(utsbuf.release, "-Microsoft"); +} + const string nixVersion = PACKAGE_VERSION; template<> void BaseSetting::set(const std::string & str) @@ -115,7 +137,7 @@ template<> void BaseSetting::set(const std::string & str) else throw UsageError("option '%s' has invalid value '%s'", name, str); } -template<> std::string BaseSetting::to_string() +template<> std::string BaseSetting::to_string() const { if (value == smEnabled) return "true"; else if (value == smRelaxed) return "relaxed"; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 53efc6a90..782870547 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -34,6 +34,8 @@ class Settings : public Config { StringSet getDefaultSystemFeatures(); + bool isWSL1(); + public: Settings(); @@ -66,7 +68,7 @@ public: /* File name of the socket the daemon listens to. */ Path nixDaemonSocketFile; - Setting storeUri{this, getEnv("NIX_REMOTE", "auto"), "store", + Setting storeUri{this, getEnv("NIX_REMOTE").value_or("auto"), "store", "The default Nix store to use."}; Setting keepFailed{this, false, "keep-failed", @@ -130,7 +132,7 @@ public: Setting fsyncMetadata{this, true, "fsync-metadata", "Whether SQLite should use fsync()."}; - Setting useSQLiteWAL{this, true, "use-sqlite-wal", + Setting useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal", "Whether SQLite should use WAL mode."}; Setting syncBeforeRegistering{this, false, "sync-before-registering", @@ -209,6 +211,9 @@ public: "The paths to make available inside the build sandbox.", {"build-chroot-dirs", "build-sandbox-paths"}}; + Setting sandboxFallback{this, true, "sandbox-fallback", + "Whether to disable sandboxing when the kernel doesn't allow it."}; + Setting extraSandboxPaths{this, {}, "extra-sandbox-paths", "Additional paths to make available inside the build sandbox.", {"build-extra-chroot-dirs", "build-extra-sandbox-paths"}}; @@ -255,7 +260,7 @@ public: "Secret keys with which to sign local builds."}; Setting tarballTtl{this, 60 * 60, "tarball-ttl", - "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; + "How long downloaded files are considered up-to-date."}; Setting requireSigs{this, true, "require-sigs", "Whether to check that any non-content-addressed path added to the " @@ -315,6 +320,9 @@ public: "pre-build-hook", "A program to run just before a build to set derivation-specific build settings."}; + Setting postBuildHook{this, "", "post-build-hook", + "A program to run just after each successful build."}; + Setting netrcFile{this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file", "Path to the netrc file used to obtain usernames/passwords for downloads."}; @@ -342,8 +350,18 @@ public: Setting maxFree{this, std::numeric_limits::max(), "max-free", "Stop deleting garbage when free disk space is above the specified amount."}; + Setting minFreeCheckInterval{this, 5, "min-free-check-interval", + "Number of seconds between checking free disk space."}; + Setting pluginFiles{this, {}, "plugin-files", "Plugins to dynamically load at nix initialization time."}; + + Setting experimentalFeatures{this, {}, "experimental-features", + "Experimental Nix features to enable."}; + + bool isExperimentalFeatureEnabled(const std::string & name); + + void requireExperimentalFeature(const std::string & name); }; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 8da0e2f9d..d4ae36662 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -42,13 +42,16 @@ public: void init() override { // FIXME: do this lazily? - if (!diskCache->cacheExists(cacheUri, wantMassQuery_, priority)) { + if (auto cacheInfo = diskCache->cacheExists(cacheUri)) { + wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false"); + priority.setDefault(fmt("%d", cacheInfo->priority)); + } else { try { BinaryCacheStore::init(); } catch (UploadToHTTP &) { throw Error("'%s' does not appear to be a binary cache", cacheUri); } - diskCache->createCache(cacheUri, storeDir, wantMassQuery_, priority); + diskCache->createCache(cacheUri, storeDir, wantMassQuery, priority); } } @@ -84,7 +87,6 @@ protected: try { DownloadRequest request(cacheUri + "/" + path); request.head = true; - request.tries = 5; getDownloader()->download(request); return true; } catch (DownloadError & e) { @@ -114,7 +116,6 @@ protected: DownloadRequest makeRequest(const std::string & path) { DownloadRequest request(cacheUri + "/" + path); - request.tries = 8; return request; } @@ -133,23 +134,25 @@ protected: } void getFile(const std::string & path, - Callback> callback) override + Callback> callback) noexcept override { checkEnabled(); auto request(makeRequest(path)); + auto callbackPtr = std::make_shared(std::move(callback)); + getDownloader()->enqueueDownload(request, - {[callback, this](std::future result) { + {[callbackPtr, this](std::future result) { try { - callback(result.get().data); + (*callbackPtr)(result.get().data); } catch (DownloadError & e) { if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) - return callback(std::shared_ptr()); + return (*callbackPtr)(std::shared_ptr()); maybeDisable(); - callback.rethrow(); + callbackPtr->rethrow(); } catch (...) { - callback.rethrow(); + callbackPtr->rethrow(); } }}); } diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 7c9bc2b68..458266be0 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -87,25 +87,27 @@ struct LegacySSHStore : public Store return uriScheme + host; } - void queryPathInfoUncached(const Path & path, - Callback> callback) override + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override { try { auto conn(connections->get()); - debug("querying remote host '%s' for info on '%s'", host, path); + debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); - conn->to << cmdQueryPathInfos << PathSet{path}; + conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)}; conn->to.flush(); - auto info = std::make_shared(); - conn->from >> info->path; - if (info->path.empty()) return callback(nullptr); + auto p = readString(conn->from); + if (p.empty()) return callback(nullptr); + auto info = std::make_shared(parseStorePath(p)); assert(path == info->path); PathSet references; - conn->from >> info->deriver; - info->references = readStorePaths(*this, conn->from); + auto deriver = readString(conn->from); + if (deriver != "") + info->deriver = parseStorePath(deriver); + info->references = readStorePaths(*this, conn->from); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -127,7 +129,7 @@ struct LegacySSHStore : public Store RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override { - debug("adding path '%s' to remote host '%s'", info.path, host); + debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host); auto conn(connections->get()); @@ -135,10 +137,11 @@ struct LegacySSHStore : public Store conn->to << cmdAddToStoreNar - << info.path - << info.deriver - << info.narHash.to_string(Base16, false) - << info.references + << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(Base16, false); + writeStorePaths(*this, conn->to, info.references); + conn->to << info.registrationTime << info.narSize << info.ultimate @@ -165,9 +168,10 @@ struct LegacySSHStore : public Store } conn->to << exportMagic - << info.path - << info.references - << info.deriver + << printStorePath(info.path); + writeStorePaths(*this, conn->to, info.references); + conn->to + << (info.deriver ? printStorePath(*info.deriver) : "") << 0 << 0; conn->to.flush(); @@ -175,39 +179,40 @@ struct LegacySSHStore : public Store } if (readInt(conn->from) != 1) - throw Error("failed to add path '%s' to remote host '%s', info.path, host"); + throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host); } - void narFromPath(const Path & path, Sink & sink) override + void narFromPath(const StorePath & path, Sink & sink) override { auto conn(connections->get()); - conn->to << cmdDumpStorePath << path; + conn->to << cmdDumpStorePath << printStorePath(path); conn->to.flush(); copyNAR(conn->from, sink); } - Path queryPathFromHashPart(const string & hashPart) override + std::optional queryPathFromHashPart(const std::string & hashPart) override { unsupported("queryPathFromHashPart"); } - Path addToStore(const string & name, const Path & srcPath, + StorePath addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) override { unsupported("addToStore"); } - Path addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) override + StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) override { unsupported("addTextToStore"); } - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override { auto conn(connections->get()); conn->to << cmdBuildDerivation - << drvPath - << drv + << printStorePath(drvPath); + writeDerivation(conn->to, *this, drv); + conn->to << settings.maxSilentTime << settings.buildTimeout; if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2) @@ -230,11 +235,11 @@ struct LegacySSHStore : public Store return status; } - void ensurePath(const Path & path) override + void ensurePath(const StorePath & path) override { unsupported("ensurePath"); } - void computeFSClosure(const PathSet & paths, - PathSet & out, bool flipDirection = false, + void computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false) override { if (flipDirection || includeDerivers) { @@ -246,16 +251,15 @@ struct LegacySSHStore : public Store conn->to << cmdQueryClosure - << includeOutputs - << paths; + << includeOutputs; + writeStorePaths(*this, conn->to, paths); conn->to.flush(); - auto res = readStorePaths(*this, conn->from); - - out.insert(res.begin(), res.end()); + for (auto & i : readStorePaths(*this, conn->from)) + out.insert(i.clone()); } - PathSet queryValidPaths(const PathSet & paths, + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override { auto conn(connections->get()); @@ -263,11 +267,11 @@ struct LegacySSHStore : public Store conn->to << cmdQueryValidPaths << false // lock - << maybeSubstitute - << paths; + << maybeSubstitute; + writeStorePaths(*this, conn->to, paths); conn->to.flush(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } void connect() override diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index b7001795b..363be1443 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -44,15 +44,15 @@ protected: } } - PathSet queryAllValidPaths() override + StorePathSet queryAllValidPaths() override { - PathSet paths; + StorePathSet paths; for (auto & entry : readDirectory(binaryCacheDir)) { if (entry.name.size() != 40 || !hasSuffix(entry.name, ".narinfo")) continue; - paths.insert(storeDir + "/" + entry.name.substr(0, entry.name.size() - 8)); + paths.insert(parseStorePath(storeDir + "/" + entry.name.substr(0, entry.name.size() - 8))); } return paths; @@ -63,6 +63,8 @@ protected: void LocalBinaryCacheStore::init() { createDirs(binaryCacheDir + "/nar"); + if (writeDebugInfo) + createDirs(binaryCacheDir + "/debuginfo"); BinaryCacheStore::init(); } diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 642e4070d..aa5abd835 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -21,7 +21,7 @@ struct LocalStoreAccessor : public FSAccessor Path toRealPath(const Path & path) { Path storePath = store->toStorePath(path); - if (!store->isValidPath(storePath)) + if (!store->isValidPath(store->parseStorePath(storePath))) throw InvalidPath(format("path '%1%' is not a valid store path") % storePath); return store->getRealStoreDir() + std::string(path, store->storeDir.size()); } @@ -77,34 +77,32 @@ ref LocalFSStore::getFSAccessor() std::dynamic_pointer_cast(shared_from_this()))); } -void LocalFSStore::narFromPath(const Path & path, Sink & sink) +void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) { if (!isValidPath(path)) - throw Error(format("path '%s' is not valid") % path); - dumpPath(getRealStoreDir() + std::string(path, storeDir.size()), sink); + throw Error("path '%s' is not valid", printStorePath(path)); + dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink); } const string LocalFSStore::drvsLogDir = "drvs"; -std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) +std::shared_ptr LocalFSStore::getBuildLog(const StorePath & path_) { - auto path(path_); + auto path = path_.clone(); - assertStorePath(path); - - - if (!isDerivation(path)) { + if (!path.isDerivation()) { try { - path = queryPathInfo(path)->deriver; + auto info = queryPathInfo(path); + if (!info->deriver) return nullptr; + path = info->deriver->clone(); } catch (InvalidPath &) { return nullptr; } - if (path == "") return nullptr; } - string baseName = baseNameOf(path); + auto baseName = std::string(baseNameOf(printStorePath(path))); for (int j = 0; j < 2; j++) { diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 0bf1831e4..436619c11 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -5,6 +5,7 @@ #include "worker-protocol.hh" #include "derivations.hh" #include "nar-info.hh" +#include "references.hh" #include #include @@ -53,6 +54,7 @@ LocalStore::LocalStore(const Params & params) , trashDir(realStoreDir + "/trash") , tempRootsDir(stateDir + "/temproots") , fnTempRoots(fmt("%s/%d", tempRootsDir, getpid())) + , locksHeld(tokenizeString(getEnv("NIX_HELD_LOCKS").value_or(""))) { auto state(_state.lock()); @@ -70,15 +72,17 @@ LocalStore::LocalStore(const Params & params) createSymlink(profilesDir, gcRootsDir + "/profiles"); } + for (auto & perUserDir : {profilesDir + "/per-user", gcRootsDir + "/per-user"}) { + createDirs(perUserDir); + if (chmod(perUserDir.c_str(), 0755) == -1) + throw SysError("could not set permissions on '%s' to 755", perUserDir); + } + + createUser(getUserName(), getuid()); + /* Optionally, create directories and set permissions for a multi-user install. */ if (getuid() == 0 && settings.buildUsersGroup != "") { - - Path perUserDir = profilesDir + "/per-user"; - createDirs(perUserDir); - if (chmod(perUserDir.c_str(), 01777) == -1) - throw SysError(format("could not set permissions on '%1%' to 1777") % perUserDir); - mode_t perm = 01775; struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str()); @@ -537,41 +541,35 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid) } -void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation & drv) +void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv) { - string drvName = storePathToName(drvPath); - assert(isDerivation(drvName)); + assert(drvPath.isDerivation()); + std::string drvName(drvPath.name()); drvName = string(drvName, 0, drvName.size() - drvExtension.size()); if (drv.isFixedOutput()) { DerivationOutputs::const_iterator out = drv.outputs.find("out"); if (out == drv.outputs.end()) - throw Error(format("derivation '%1%' does not have an output named 'out'") % drvPath); + throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath)); auto [recursive, h] = out->second.parseHashInfo(); - Path outPath = makeFixedOutputPath(recursive, h, drvName); + auto outPath = makeFixedOutputPath(recursive, h, drvName); StringPairs::const_iterator j = drv.env.find("out"); - if (out->second.path != outPath || j == drv.env.end() || j->second != outPath) - throw Error(format("derivation '%1%' has incorrect output '%2%', should be '%3%'") - % drvPath % out->second.path % outPath); + if (out->second.path != outPath || j == drv.env.end() || parseStorePath(j->second) != outPath) + throw Error("derivation '%s' has incorrect output '%s', should be '%s'", + printStorePath(drvPath), printStorePath(out->second.path), printStorePath(outPath)); } else { - Derivation drvCopy(drv); - for (auto & i : drvCopy.outputs) { - i.second.path = ""; - drvCopy.env[i.first] = ""; - } - - Hash h = hashDerivationModulo(*this, drvCopy); + Hash h = hashDerivationModulo(*this, drv, true); for (auto & i : drv.outputs) { - Path outPath = makeOutputPath(i.first, h, drvName); + auto outPath = makeOutputPath(i.first, h, drvName); StringPairs::const_iterator j = drv.env.find(i.first); - if (i.second.path != outPath || j == drv.env.end() || j->second != outPath) - throw Error(format("derivation '%1%' has incorrect output '%2%', should be '%3%'") - % drvPath % i.second.path % outPath); + if (i.second.path != outPath || j == drv.env.end() || parseStorePath(j->second) != outPath) + throw Error("derivation '%s' has incorrect output '%s', should be '%s'", + printStorePath(drvPath), printStorePath(i.second.path), printStorePath(outPath)); } } } @@ -581,13 +579,14 @@ uint64_t LocalStore::addValidPath(State & state, const ValidPathInfo & info, bool checkOutputs) { if (info.ca != "" && !info.isContentAddressed(*this)) - throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", info.path); + throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", + printStorePath(info.path)); state.stmtRegisterValidPath.use() - (info.path) + (printStorePath(info.path)) (info.narHash.to_string(Base16)) (info.registrationTime == 0 ? time(0) : info.registrationTime) - (info.deriver, info.deriver != "") + (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.narSize, info.narSize != 0) (info.ultimate ? 1 : 0, info.ultimate) (concatStringsSep(" ", info.sigs), !info.sigs.empty()) @@ -599,8 +598,8 @@ uint64_t LocalStore::addValidPath(State & state, the database. This is useful for the garbage collector: it can efficiently query whether a path is an output of some derivation. */ - if (isDerivation(info.path)) { - Derivation drv = readDerivation(realStoreDir + "/" + baseNameOf(info.path)); + if (info.path.isDerivation()) { + auto drv = readDerivation(*this, realStoreDir + "/" + std::string(info.path.to_string())); /* Verify that the output paths in the derivation are correct (i.e., follow the scheme for computing output paths from @@ -613,34 +612,31 @@ uint64_t LocalStore::addValidPath(State & state, state.stmtAddDerivationOutput.use() (id) (i.first) - (i.second.path) + (printStorePath(i.second.path)) .exec(); } } { auto state_(Store::state.lock()); - state_->pathInfoCache.upsert(storePathToHash(info.path), std::make_shared(info)); + state_->pathInfoCache.upsert(storePathToHash(printStorePath(info.path)), std::make_shared(info)); } return id; } -void LocalStore::queryPathInfoUncached(const Path & path, - Callback> callback) +void LocalStore::queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept { try { - auto info = std::make_shared(); - info->path = path; - - assertStorePath(path); + auto info = std::make_shared(path.clone()); callback(retrySQLite>([&]() { auto state(_state.lock()); /* Get the path info. */ - auto useQueryPathInfo(state->stmtQueryPathInfo.use()(path)); + auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path))); if (!useQueryPathInfo.next()) return std::shared_ptr(); @@ -650,13 +646,13 @@ void LocalStore::queryPathInfoUncached(const Path & path, try { info->narHash = Hash(useQueryPathInfo.getStr(1)); } catch (BadHash & e) { - throw Error("in valid-path entry for '%s': %s", path, e.what()); + throw Error("in valid-path entry for '%s': %s", printStorePath(path), e.what()); } info->registrationTime = useQueryPathInfo.getInt(2); auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3); - if (s) info->deriver = s; + if (s) info->deriver = parseStorePath(s); /* Note that narSize = NULL yields 0. */ info->narSize = useQueryPathInfo.getInt(4); @@ -673,7 +669,7 @@ void LocalStore::queryPathInfoUncached(const Path & path, auto useQueryReferences(state->stmtQueryReferences.use()(info->id)); while (useQueryReferences.next()) - info->references.insert(useQueryReferences.getStr(0)); + info->references.insert(parseStorePath(useQueryReferences.getStr(0))); return info; })); @@ -691,27 +687,27 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info) (info.ultimate ? 1 : 0, info.ultimate) (concatStringsSep(" ", info.sigs), !info.sigs.empty()) (info.ca, !info.ca.empty()) - (info.path) + (printStorePath(info.path)) .exec(); } -uint64_t LocalStore::queryValidPathId(State & state, const Path & path) +uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path) { - auto use(state.stmtQueryPathInfo.use()(path)); + auto use(state.stmtQueryPathInfo.use()(printStorePath(path))); if (!use.next()) - throw Error(format("path '%1%' is not valid") % path); + throw Error("path '%s' is not valid", printStorePath(path)); return use.getInt(0); } -bool LocalStore::isValidPath_(State & state, const Path & path) +bool LocalStore::isValidPath_(State & state, const StorePath & path) { - return state.stmtQueryPathInfo.use()(path).next(); + return state.stmtQueryPathInfo.use()(printStorePath(path)).next(); } -bool LocalStore::isValidPathUncached(const Path & path) +bool LocalStore::isValidPathUncached(const StorePath & path) { return retrySQLite([&]() { auto state(_state.lock()); @@ -720,39 +716,38 @@ bool LocalStore::isValidPathUncached(const Path & path) } -PathSet LocalStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) +StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { - PathSet res; + StorePathSet res; for (auto & i : paths) - if (isValidPath(i)) res.insert(i); + if (isValidPath(i)) res.insert(i.clone()); return res; } -PathSet LocalStore::queryAllValidPaths() +StorePathSet LocalStore::queryAllValidPaths() { - return retrySQLite([&]() { + return retrySQLite([&]() { auto state(_state.lock()); auto use(state->stmtQueryValidPaths.use()); - PathSet res; - while (use.next()) res.insert(use.getStr(0)); + StorePathSet res; + while (use.next()) res.insert(parseStorePath(use.getStr(0))); return res; }); } -void LocalStore::queryReferrers(State & state, const Path & path, PathSet & referrers) +void LocalStore::queryReferrers(State & state, const StorePath & path, StorePathSet & referrers) { - auto useQueryReferrers(state.stmtQueryReferrers.use()(path)); + auto useQueryReferrers(state.stmtQueryReferrers.use()(printStorePath(path))); while (useQueryReferrers.next()) - referrers.insert(useQueryReferrers.getStr(0)); + referrers.insert(parseStorePath(useQueryReferrers.getStr(0))); } -void LocalStore::queryReferrers(const Path & path, PathSet & referrers) +void LocalStore::queryReferrers(const StorePath & path, StorePathSet & referrers) { - assertStorePath(path); return retrySQLite([&]() { auto state(_state.lock()); queryReferrers(*state, path, referrers); @@ -760,42 +755,40 @@ void LocalStore::queryReferrers(const Path & path, PathSet & referrers) } -PathSet LocalStore::queryValidDerivers(const Path & path) +StorePathSet LocalStore::queryValidDerivers(const StorePath & path) { - assertStorePath(path); - - return retrySQLite([&]() { + return retrySQLite([&]() { auto state(_state.lock()); - auto useQueryValidDerivers(state->stmtQueryValidDerivers.use()(path)); + auto useQueryValidDerivers(state->stmtQueryValidDerivers.use()(printStorePath(path))); - PathSet derivers; + StorePathSet derivers; while (useQueryValidDerivers.next()) - derivers.insert(useQueryValidDerivers.getStr(1)); + derivers.insert(parseStorePath(useQueryValidDerivers.getStr(1))); return derivers; }); } -PathSet LocalStore::queryDerivationOutputs(const Path & path) +StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path) { - return retrySQLite([&]() { + return retrySQLite([&]() { auto state(_state.lock()); auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() (queryValidPathId(*state, path))); - PathSet outputs; + StorePathSet outputs; while (useQueryDerivationOutputs.next()) - outputs.insert(useQueryDerivationOutputs.getStr(1)); + outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1))); return outputs; }); } -StringSet LocalStore::queryDerivationOutputNames(const Path & path) +StringSet LocalStore::queryDerivationOutputNames(const StorePath & path) { return retrySQLite([&]() { auto state(_state.lock()); @@ -812,45 +805,50 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path) } -Path LocalStore::queryPathFromHashPart(const string & hashPart) +std::optional LocalStore::queryPathFromHashPart(const std::string & hashPart) { if (hashPart.size() != storePathHashLen) throw Error("invalid hash part"); Path prefix = storeDir + "/" + hashPart; - return retrySQLite([&]() -> std::string { + return retrySQLite>([&]() -> std::optional { auto state(_state.lock()); auto useQueryPathFromHashPart(state->stmtQueryPathFromHashPart.use()(prefix)); - if (!useQueryPathFromHashPart.next()) return ""; + if (!useQueryPathFromHashPart.next()) return {}; const char * s = (const char *) sqlite3_column_text(state->stmtQueryPathFromHashPart, 0); - return s && prefix.compare(0, prefix.size(), s, prefix.size()) == 0 ? s : ""; + if (s && prefix.compare(0, prefix.size(), s, prefix.size()) == 0) + return parseStorePath(s); + return {}; }); } -PathSet LocalStore::querySubstitutablePaths(const PathSet & paths) +StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths) { - if (!settings.useSubstitutes) return PathSet(); + if (!settings.useSubstitutes) return StorePathSet(); - auto remaining = paths; - PathSet res; + StorePathSet remaining; + for (auto & i : paths) + remaining.insert(i.clone()); + + StorePathSet res; for (auto & sub : getDefaultSubstituters()) { if (remaining.empty()) break; if (sub->storeDir != storeDir) continue; - if (!sub->wantMassQuery()) continue; + if (!sub->wantMassQuery) continue; auto valid = sub->queryValidPaths(remaining); - PathSet remaining2; + StorePathSet remaining2; for (auto & path : remaining) if (valid.count(path)) - res.insert(path); + res.insert(path.clone()); else - remaining2.insert(path); + remaining2.insert(path.clone()); std::swap(remaining, remaining2); } @@ -859,7 +857,7 @@ PathSet LocalStore::querySubstitutablePaths(const PathSet & paths) } -void LocalStore::querySubstitutablePathInfos(const PathSet & paths, +void LocalStore::querySubstitutablePathInfos(const StorePathSet & paths, SubstitutablePathInfos & infos) { if (!settings.useSubstitutes) return; @@ -867,19 +865,18 @@ void LocalStore::querySubstitutablePathInfos(const PathSet & paths, if (sub->storeDir != storeDir) continue; for (auto & path : paths) { if (infos.count(path)) continue; - debug(format("checking substituter '%s' for path '%s'") - % sub->getUri() % path); + debug("checking substituter '%s' for path '%s'", sub->getUri(), printStorePath(path)); try { auto info = sub->queryPathInfo(path); auto narInfo = std::dynamic_pointer_cast( std::shared_ptr(info)); - infos[path] = SubstitutablePathInfo{ - info->deriver, - info->references, + infos.insert_or_assign(path.clone(), SubstitutablePathInfo{ + info->deriver ? info->deriver->clone() : std::optional(), + cloneStorePathSet(info->references), narInfo ? narInfo->fileSize : 0, - info->narSize}; - } catch (InvalidPath) { - } catch (SubstituterDisabled) { + info->narSize}); + } catch (InvalidPath &) { + } catch (SubstituterDisabled &) { } catch (Error & e) { if (settings.tryFallback) printError(e.what()); @@ -911,7 +908,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) auto state(_state.lock()); SQLiteTxn txn(state->db); - PathSet paths; + StorePathSet paths; for (auto & i : infos) { assert(i.narHash.type == htSHA256); @@ -919,7 +916,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) updatePathInfo(*state, i); else addValidPath(*state, i, false); - paths.insert(i.path); + paths.insert(i.path.clone()); } for (auto & i : infos) { @@ -932,11 +929,10 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) this in addValidPath() above, because the references might not be valid yet. */ for (auto & i : infos) - if (isDerivation(i.path)) { - // FIXME: inefficient; we already loaded the - // derivation in addValidPath(). - Derivation drv = readDerivation(realStoreDir + "/" + baseNameOf(i.path)); - checkDerivationOutputs(i.path, drv); + if (i.path.isDerivation()) { + // FIXME: inefficient; we already loaded the derivation in addValidPath(). + checkDerivationOutputs(i.path, + readDerivation(*this, realStoreDir + "/" + std::string(i.path.to_string()))); } /* Do a topological sort of the paths. This will throw an @@ -952,18 +948,18 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) /* Invalidate a path. The caller is responsible for checking that there are no referrers. */ -void LocalStore::invalidatePath(State & state, const Path & path) +void LocalStore::invalidatePath(State & state, const StorePath & path) { - debug(format("invalidating path '%1%'") % path); + debug("invalidating path '%s'", printStorePath(path)); - state.stmtInvalidatePath.use()(path).exec(); + state.stmtInvalidatePath.use()(printStorePath(path)).exec(); /* Note that the foreign key constraints on the Refs table take care of deleting the references entries for `path'. */ { auto state_(Store::state.lock()); - state_->pathInfoCache.erase(storePathToHash(path)); + state_->pathInfoCache.erase(storePathToHash(printStorePath(path))); } } @@ -981,10 +977,10 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) { if (!info.narHash) - throw Error("cannot add path '%s' because it lacks a hash", info.path); + throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path)); if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys())) - throw Error("cannot add path '%s' because it lacks a valid signature", info.path); + throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path)); addTempRoot(info.path); @@ -992,12 +988,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, PathLocks outputLock; - Path realPath = realStoreDir + "/" + baseNameOf(info.path); + Path realPath = realStoreDir + "/" + std::string(info.path.to_string()); /* Lock the output path. But don't lock if we're being called from a build hook (whose parent process already acquired a lock on this path). */ - if (!locksHeld.count(info.path)) + if (!locksHeld.count(printStorePath(info.path))) outputLock.lockPaths({realPath}); if (repair || !isValidPath(info.path)) { @@ -1006,25 +1002,32 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, /* While restoring the path from the NAR, compute the hash of the NAR. */ - HashSink hashSink(htSHA256); + std::unique_ptr hashSink; + if (info.ca == "") + hashSink = std::make_unique(htSHA256); + else { + if (!info.references.empty()) + settings.requireExperimentalFeature("ca-references"); + hashSink = std::make_unique(htSHA256, storePathToHash(printStorePath(info.path))); + } LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t { size_t n = source.read(data, len); - hashSink(data, n); + (*hashSink)(data, n); return n; }); restorePath(realPath, wrapperSource); - auto hashResult = hashSink.finish(); + auto hashResult = hashSink->finish(); if (hashResult.first != info.narHash) throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s", - info.path, info.narHash.to_string(), hashResult.first.to_string()); + printStorePath(info.path), info.narHash.to_string(), hashResult.first.to_string()); if (hashResult.second != info.narSize) throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s", - info.path, info.narSize, hashResult.second); + printStorePath(info.path), info.narSize, hashResult.second); autoGC(); @@ -1040,12 +1043,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -Path LocalStore::addToStoreFromDump(const string & dump, const string & name, +StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name, bool recursive, HashType hashAlgo, RepairFlag repair) { Hash h = hashString(hashAlgo, dump); - Path dstPath = makeFixedOutputPath(recursive, h, name); + auto dstPath = makeFixedOutputPath(recursive, h, name); addTempRoot(dstPath); @@ -1054,7 +1057,8 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, /* The first check above is an optimisation to prevent unnecessary lock acquisition. */ - Path realPath = realStoreDir + "/" + baseNameOf(dstPath); + Path realPath = realStoreDir + "/"; + realPath += dstPath.to_string(); PathLocks outputLock({realPath}); @@ -1085,8 +1089,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, optimisePath(realPath); // FIXME: combine with hashPath() - ValidPathInfo info; - info.path = dstPath; + ValidPathInfo info(dstPath.clone()); info.narHash = hash.first; info.narSize = hash.second; info.ca = makeFixedOutputCA(recursive, h); @@ -1100,7 +1103,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, } -Path LocalStore::addToStore(const string & name, const Path & _srcPath, +StorePath LocalStore::addToStore(const string & name, const Path & _srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { Path srcPath(absPath(_srcPath)); @@ -1118,8 +1121,8 @@ Path LocalStore::addToStore(const string & name, const Path & _srcPath, } -Path LocalStore::addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) +StorePath LocalStore::addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) { auto hash = hashString(htSHA256, s); auto dstPath = makeTextPath(name, hash, references); @@ -1128,7 +1131,8 @@ Path LocalStore::addTextToStore(const string & name, const string & s, if (repair || !isValidPath(dstPath)) { - Path realPath = realStoreDir + "/" + baseNameOf(dstPath); + Path realPath = realStoreDir + "/"; + realPath += dstPath.to_string(); PathLocks outputLock({realPath}); @@ -1148,11 +1152,10 @@ Path LocalStore::addTextToStore(const string & name, const string & s, optimisePath(realPath); - ValidPathInfo info; - info.path = dstPath; + ValidPathInfo info(dstPath.clone()); info.narHash = narHash; info.narSize = sink.s->size(); - info.references = references; + info.references = cloneStorePathSet(references); info.ca = "text:" + hash.to_string(); registerValidPath(info); } @@ -1174,27 +1177,25 @@ Path LocalStore::createTempDirInStore() the GC between createTempDir() and addTempRoot(), so repeat until `tmpDir' exists. */ tmpDir = createTempDir(realStoreDir); - addTempRoot(tmpDir); + addTempRoot(parseStorePath(tmpDir)); } while (!pathExists(tmpDir)); return tmpDir; } -void LocalStore::invalidatePathChecked(const Path & path) +void LocalStore::invalidatePathChecked(const StorePath & path) { - assertStorePath(path); - retrySQLite([&]() { auto state(_state.lock()); SQLiteTxn txn(state->db); if (isValidPath_(*state, path)) { - PathSet referrers; queryReferrers(*state, path, referrers); + StorePathSet referrers; queryReferrers(*state, path, referrers); referrers.erase(path); /* ignore self-references */ if (!referrers.empty()) - throw PathInUse(format("cannot delete path '%1%' because it is in use by %2%") - % path % showPaths(referrers)); + throw PathInUse("cannot delete path '%s' because it is in use by %s", + printStorePath(path), showPaths(referrers)); invalidatePath(*state, path); } @@ -1209,27 +1210,49 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) bool errors = false; - /* Acquire the global GC lock to prevent a garbage collection. */ + /* Acquire the global GC lock to get a consistent snapshot of + existing and valid paths. */ AutoCloseFD fdGCLock = openGCLock(ltWrite); - PathSet store; + StringSet store; for (auto & i : readDirectory(realStoreDir)) store.insert(i.name); /* Check whether all valid paths actually exist. */ printInfo("checking path existence..."); - PathSet validPaths2 = queryAllValidPaths(), validPaths, done; + StorePathSet validPaths; + PathSet done; - for (auto & i : validPaths2) - verifyPath(i, store, done, validPaths, repair, errors); - - /* Release the GC lock so that checking content hashes (which can - take ages) doesn't block the GC or builds. */ fdGCLock = -1; + for (auto & i : queryAllValidPaths()) + verifyPath(printStorePath(i), store, done, validPaths, repair, errors); + /* Optionally, check the content hashes (slow). */ if (checkContents) { - printInfo("checking hashes..."); + + printInfo("checking link hashes..."); + + for (auto & link : readDirectory(linksDir)) { + printMsg(lvlTalkative, "checking contents of '%s'", link.name); + Path linkPath = linksDir + "/" + link.name; + string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false); + if (hash != link.name) { + printError( + "link '%s' was modified! expected hash '%s', got '%s'", + linkPath, link.name, hash); + if (repair) { + if (unlink(linkPath.c_str()) == 0) + printError("removed link '%s'", linkPath); + else + throw SysError("removing corrupt link '%s'", linkPath); + } else { + errors = true; + } + } + } + + printInfo("checking store hashes..."); Hash nullHash(htSHA256); @@ -1238,13 +1261,20 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) auto info = std::const_pointer_cast(std::shared_ptr(queryPathInfo(i))); /* Check the content hash (optionally - slow). */ - printMsg(lvlTalkative, format("checking contents of '%1%'") % i); - HashResult current = hashPath(info->narHash.type, toRealPath(i)); + printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); + + std::unique_ptr hashSink; + if (info->ca == "") + hashSink = std::make_unique(info->narHash.type); + else + hashSink = std::make_unique(info->narHash.type, storePathToHash(printStorePath(info->path))); + + dumpPath(toRealPath(printStorePath(i)), *hashSink); + auto current = hashSink->finish(); if (info->narHash != nullHash && info->narHash != current.first) { - printError(format("path '%1%' was modified! " - "expected hash '%2%', got '%3%'") - % i % info->narHash.to_string() % current.first.to_string()); + printError("path '%s' was modified! expected hash '%s', got '%s'", + printStorePath(i), info->narHash.to_string(), current.first.to_string()); if (repair) repairPath(i); else errors = true; } else { @@ -1252,14 +1282,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) /* Fill in missing hashes. */ if (info->narHash == nullHash) { - printError(format("fixing missing hash on '%1%'") % i); + printError("fixing missing hash on '%s'", printStorePath(i)); info->narHash = current.first; update = true; } /* Fill in missing narSize fields (from old stores). */ if (info->narSize == 0) { - printError(format("updating size field on '%1%' to %2%") % i % current.second); + printError("updating size field on '%s' to %s", printStorePath(i), current.second); info->narSize = current.second; update = true; } @@ -1275,9 +1305,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) /* It's possible that the path got GC'ed, so ignore errors on invalid paths. */ if (isValidPath(i)) - printError(format("error: %1%") % e.msg()); + printError("error: %s", e.msg()); else - printError(format("warning: %1%") % e.msg()); + warn(e.msg()); errors = true; } } @@ -1287,44 +1317,43 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) } -void LocalStore::verifyPath(const Path & path, const PathSet & store, - PathSet & done, PathSet & validPaths, RepairFlag repair, bool & errors) +void LocalStore::verifyPath(const Path & pathS, const StringSet & store, + PathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors) { checkInterrupt(); - if (done.find(path) != done.end()) return; - done.insert(path); + if (!done.insert(pathS).second) return; - if (!isStorePath(path)) { - printError(format("path '%1%' is not in the Nix store") % path); - auto state(_state.lock()); - invalidatePath(*state, path); + if (!isStorePath(pathS)) { + printError("path '%s' is not in the Nix store", pathS); return; } - if (store.find(baseNameOf(path)) == store.end()) { + auto path = parseStorePath(pathS); + + if (!store.count(std::string(path.to_string()))) { /* Check any referrers first. If we can invalidate them first, then we can invalidate this path as well. */ bool canInvalidate = true; - PathSet referrers; queryReferrers(path, referrers); + StorePathSet referrers; queryReferrers(path, referrers); for (auto & i : referrers) if (i != path) { - verifyPath(i, store, done, validPaths, repair, errors); - if (validPaths.find(i) != validPaths.end()) + verifyPath(printStorePath(i), store, done, validPaths, repair, errors); + if (validPaths.count(i)) canInvalidate = false; } if (canInvalidate) { - printError(format("path '%1%' disappeared, removing from database...") % path); + printError("path '%s' disappeared, removing from database...", pathS); auto state(_state.lock()); invalidatePath(*state, path); } else { - printError(format("path '%1%' disappeared, but it still has valid referrers!") % path); + printError("path '%s' disappeared, but it still has valid referrers!", pathS); if (repair) try { repairPath(path); } catch (Error & e) { - printError(format("warning: %1%") % e.msg()); + warn(e.msg()); errors = true; } else errors = true; @@ -1333,7 +1362,7 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store, return; } - validPaths.insert(path); + validPaths.insert(std::move(path)); } @@ -1402,7 +1431,7 @@ void LocalStore::vacuumDB() } -void LocalStore::addSignatures(const Path & storePath, const StringSet & sigs) +void LocalStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { retrySQLite([&]() { auto state(_state.lock()); @@ -1428,7 +1457,22 @@ void LocalStore::signPathInfo(ValidPathInfo & info) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - info.sign(secretKey); + info.sign(*this, secretKey); + } +} + + +void LocalStore::createUser(const std::string & userName, uid_t userId) +{ + for (auto & dir : { + fmt("%s/profiles/per-user/%s", stateDir, userName), + fmt("%s/gcroots/per-user/%s", stateDir, userName) + }) { + createDirs(dir); + if (chmod(dir.c_str(), 0755) == -1) + throw SysError("changing permissions of directory '%s'", dir); + if (chown(dir.c_str(), userId, getgid()) == -1) + throw SysError("changing owner of directory '%s'", dir); } } diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 6b655647b..16aeab0ad 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -107,7 +107,7 @@ private: public: // Hack for build-remote.cc. - PathSet locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); + PathSet locksHeld; /* Initialise the local store, upgrading the schema if necessary. */ @@ -119,36 +119,36 @@ public: std::string getUri() override; - bool isValidPathUncached(const Path & path) override; + bool isValidPathUncached(const StorePath & path) override; - PathSet queryValidPaths(const PathSet & paths, + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; - PathSet queryAllValidPaths() override; + StorePathSet queryAllValidPaths() override; - void queryPathInfoUncached(const Path & path, - Callback> callback) override; + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override; - void queryReferrers(const Path & path, PathSet & referrers) override; + void queryReferrers(const StorePath & path, StorePathSet & referrers) override; - PathSet queryValidDerivers(const Path & path) override; + StorePathSet queryValidDerivers(const StorePath & path) override; - PathSet queryDerivationOutputs(const Path & path) override; + StorePathSet queryDerivationOutputs(const StorePath & path) override; - StringSet queryDerivationOutputNames(const Path & path) override; + StringSet queryDerivationOutputNames(const StorePath & path) override; - Path queryPathFromHashPart(const string & hashPart) override; + std::optional queryPathFromHashPart(const std::string & hashPart) override; - PathSet querySubstitutablePaths(const PathSet & paths) override; + StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; - void querySubstitutablePathInfos(const PathSet & paths, + void querySubstitutablePathInfos(const StorePathSet & paths, SubstitutablePathInfos & infos) override; void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; - Path addToStore(const string & name, const Path & srcPath, + StorePath addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) override; @@ -156,20 +156,22 @@ public: in `dump', which is either a NAR serialisation (if recursive == true) or simply the contents of a regular file (if recursive == false). */ - Path addToStoreFromDump(const string & dump, const string & name, - bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair); + StorePath addToStoreFromDump(const string & dump, const string & name, + bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override; - Path addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) override; + StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) override; - void buildPaths(const PathSet & paths, BuildMode buildMode) override; - - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + void buildPaths( + const std::vector & paths, BuildMode buildMode) override; - void ensurePath(const Path & path) override; + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode) override; - void addTempRoot(const Path & path) override; + void ensurePath(const StorePath & path) override; + + void addTempRoot(const StorePath & path) override; void addIndirectRoot(const Path & path) override; @@ -215,9 +217,9 @@ public: /* Repair the contents of the given path by redownloading it using a substituter (if available). */ - void repairPath(const Path & path); + void repairPath(const StorePath & path); - void addSignatures(const Path & storePath, const StringSet & sigs) override; + void addSignatures(const StorePath & storePath, const StringSet & sigs) override; /* If free disk space in /nix/store if below minFree, delete garbage until it exceeds maxFree. */ @@ -231,17 +233,17 @@ private: void makeStoreWritable(); - uint64_t queryValidPathId(State & state, const Path & path); + uint64_t queryValidPathId(State & state, const StorePath & path); uint64_t addValidPath(State & state, const ValidPathInfo & info, bool checkOutputs = true); - void invalidatePath(State & state, const Path & path); + void invalidatePath(State & state, const StorePath & path); /* Delete a path from the Nix store. */ - void invalidatePathChecked(const Path & path); + void invalidatePathChecked(const StorePath & path); - void verifyPath(const Path & path, const PathSet & store, - PathSet & done, PathSet & validPaths, RepairFlag repair, bool & errors); + void verifyPath(const Path & path, const StringSet & store, + PathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors); void updatePathInfo(State & state, const ValidPathInfo & info); @@ -256,14 +258,14 @@ private: void tryToDelete(GCState & state, const Path & path); - bool canReachRoot(GCState & state, PathSet & visited, const Path & path); + bool canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path); void deletePathRecursive(GCState & state, const Path & path); bool isActiveTempFile(const GCState & state, const Path & path, const string & suffix); - int openGCLock(LockType lockType); + AutoCloseFD openGCLock(LockType lockType); void findRoots(const Path & path, unsigned char type, Roots & roots); @@ -275,7 +277,7 @@ private: Path createTempDirInStore(); - void checkDerivationOutputs(const Path & drvPath, const Derivation & drv); + void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv); typedef std::unordered_set InodeHash; @@ -284,8 +286,8 @@ private: void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash); // Internal versions that are not wrapped in retry_sqlite. - bool isValidPath_(State & state, const Path & path); - void queryReferrers(State & state, const Path & path, PathSet & referrers); + bool isValidPath_(State & state, const StorePath & path); + void queryReferrers(State & state, const StorePath & path, StorePathSet & referrers); /* Add signatures to a ValidPathInfo using the secret keys specified by the ‘secret-key-files’ option. */ @@ -293,6 +295,8 @@ private: Path getRealStoreDir() override { return realStoreDir; } + void createUser(const std::string & userName, uid_t userId) override; + friend class DerivationGoal; friend class SubstitutionGoal; }; diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 89fc918c3..ac68c2342 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -6,14 +6,16 @@ libstore_DIR := $(d) libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc) -libstore_LIBS = libutil +libstore_LIBS = libutil libnixrust libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread ifneq ($(OS), FreeBSD) libstore_LDFLAGS += -ldl endif +ifeq ($(OS), Darwin) libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb +endif $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox))) @@ -39,9 +41,12 @@ libstore_CXXFLAGS = \ -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \ -DNIX_MAN_DIR=\"$(mandir)\" \ - -DSANDBOX_SHELL="\"$(sandbox_shell)\"" \ -DLSOF=\"$(lsof)\" +ifneq ($(sandbox_shell),) +libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" +endif + $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/build.cc: diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index adcce026f..9c47fe524 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -1,4 +1,5 @@ #include "derivations.hh" +#include "parsed-derivations.hh" #include "globals.hh" #include "local-store.hh" #include "store-api.hh" @@ -8,13 +9,13 @@ namespace nix { -void Store::computeFSClosure(const PathSet & startPaths, - PathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) +void Store::computeFSClosure(const StorePathSet & startPaths, + StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) { struct State { size_t pending; - PathSet & paths; + StorePathSet & paths; std::exception_ptr exc; }; @@ -28,46 +29,47 @@ void Store::computeFSClosure(const PathSet & startPaths, { auto state(state_.lock()); if (state->exc) return; - if (state->paths.count(path)) return; - state->paths.insert(path); + if (!state->paths.insert(parseStorePath(path)).second) return; state->pending++; } - queryPathInfo(path, {[&, path](std::future> fut) { + queryPathInfo(parseStorePath(path), {[&, pathS(path)](std::future> fut) { // FIXME: calls to isValidPath() should be async try { auto info = fut.get(); + auto path = parseStorePath(pathS); + if (flipDirection) { - PathSet referrers; + StorePathSet referrers; queryReferrers(path, referrers); for (auto & ref : referrers) if (ref != path) - enqueue(ref); + enqueue(printStorePath(ref)); if (includeOutputs) for (auto & i : queryValidDerivers(path)) - enqueue(i); + enqueue(printStorePath(i)); - if (includeDerivers && isDerivation(path)) + if (includeDerivers && path.isDerivation()) for (auto & i : queryDerivationOutputs(path)) if (isValidPath(i) && queryPathInfo(i)->deriver == path) - enqueue(i); + enqueue(printStorePath(i)); } else { for (auto & ref : info->references) if (ref != path) - enqueue(ref); + enqueue(printStorePath(ref)); - if (includeOutputs && isDerivation(path)) + if (includeOutputs && path.isDerivation()) for (auto & i : queryDerivationOutputs(path)) - if (isValidPath(i)) enqueue(i); + if (isValidPath(i)) enqueue(printStorePath(i)); - if (includeDerivers && isValidPath(info->deriver)) - enqueue(info->deriver); + if (includeDerivers && info->deriver && isValidPath(*info->deriver)) + enqueue(printStorePath(*info->deriver)); } @@ -87,7 +89,7 @@ void Store::computeFSClosure(const PathSet & startPaths, }; for (auto & startPath : startPaths) - enqueue(startPath); + enqueue(printStorePath(startPath)); { auto state(state_.lock()); @@ -97,15 +99,17 @@ void Store::computeFSClosure(const PathSet & startPaths, } -void Store::computeFSClosure(const Path & startPath, - PathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) +void Store::computeFSClosure(const StorePath & startPath, + StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers) { - computeFSClosure(PathSet{startPath}, paths_, flipDirection, includeOutputs, includeDerivers); + StorePathSet paths; + paths.insert(startPath.clone()); + computeFSClosure(paths, paths_, flipDirection, includeOutputs, includeDerivers); } -void Store::queryMissing(const PathSet & targets, - PathSet & willBuild_, PathSet & willSubstitute_, PathSet & unknown_, +void Store::queryMissing(const std::vector & targets, + StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_, unsigned long long & downloadSize_, unsigned long long & narSize_) { Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths"); @@ -116,8 +120,8 @@ void Store::queryMissing(const PathSet & targets, struct State { - PathSet done; - PathSet & unknown, & willSubstitute, & willBuild; + std::unordered_set done; + StorePathSet & unknown, & willSubstitute, & willBuild; unsigned long long & downloadSize; unsigned long long & narSize; }; @@ -126,31 +130,36 @@ void Store::queryMissing(const PathSet & targets, { size_t left; bool done = false; - PathSet outPaths; + StorePathSet outPaths; DrvState(size_t left) : left(left) { } }; - Sync state_(State{PathSet(), unknown_, willSubstitute_, willBuild_, downloadSize_, narSize_}); + Sync state_(State{{}, unknown_, willSubstitute_, willBuild_, downloadSize_, narSize_}); - std::function doPath; + std::function doPath; - auto mustBuildDrv = [&](const Path & drvPath, const Derivation & drv) { + auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { { auto state(state_.lock()); - state->willBuild.insert(drvPath); + state->willBuild.insert(drvPath.clone()); } for (auto & i : drv.inputDrvs) - pool.enqueue(std::bind(doPath, makeDrvPathWithOutputs(i.first, i.second))); + pool.enqueue(std::bind(doPath, StorePathWithOutputs(i.first, i.second))); }; auto checkOutput = [&]( - const Path & drvPath, ref drv, const Path & outPath, ref> drvState_) + const Path & drvPathS, ref drv, const Path & outPathS, ref> drvState_) { if (drvState_->lock()->done) return; + auto drvPath = parseStorePath(drvPathS); + auto outPath = parseStorePath(outPathS); + SubstitutablePathInfos infos; - querySubstitutablePathInfos({outPath}, infos); + StorePathSet paths; // FIXME + paths.insert(outPath.clone()); + querySubstitutablePathInfos(paths, infos); if (infos.empty()) { drvState_->lock()->done = true; @@ -161,74 +170,74 @@ void Store::queryMissing(const PathSet & targets, if (drvState->done) return; assert(drvState->left); drvState->left--; - drvState->outPaths.insert(outPath); + drvState->outPaths.insert(outPath.clone()); if (!drvState->left) { for (auto & path : drvState->outPaths) - pool.enqueue(std::bind(doPath, path)); + pool.enqueue(std::bind(doPath, StorePathWithOutputs(path.clone()))); } } } }; - doPath = [&](const Path & path) { + doPath = [&](const StorePathWithOutputs & path) { { auto state(state_.lock()); - if (state->done.count(path)) return; - state->done.insert(path); + if (!state->done.insert(path.to_string(*this)).second) return; } - DrvPathWithOutputs i2 = parseDrvPathWithOutputs(path); - - if (isDerivation(i2.first)) { - if (!isValidPath(i2.first)) { + if (path.path.isDerivation()) { + if (!isValidPath(path.path)) { // FIXME: we could try to substitute the derivation. auto state(state_.lock()); - state->unknown.insert(path); + state->unknown.insert(path.path.clone()); return; } - Derivation drv = derivationFromPath(i2.first); + auto drv = make_ref(derivationFromPath(path.path)); + ParsedDerivation parsedDrv(path.path.clone(), *drv); PathSet invalid; - for (auto & j : drv.outputs) - if (wantOutput(j.first, i2.second) + for (auto & j : drv->outputs) + if (wantOutput(j.first, path.outputs) && !isValidPath(j.second.path)) - invalid.insert(j.second.path); + invalid.insert(printStorePath(j.second.path)); if (invalid.empty()) return; - if (settings.useSubstitutes && drv.substitutesAllowed()) { + if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) { auto drvState = make_ref>(DrvState(invalid.size())); for (auto & output : invalid) - pool.enqueue(std::bind(checkOutput, i2.first, make_ref(drv), output, drvState)); + pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState)); } else - mustBuildDrv(i2.first, drv); + mustBuildDrv(path.path, *drv); } else { - if (isValidPath(path)) return; + if (isValidPath(path.path)) return; SubstitutablePathInfos infos; - querySubstitutablePathInfos({path}, infos); + StorePathSet paths; // FIXME + paths.insert(path.path.clone()); + querySubstitutablePathInfos(paths, infos); if (infos.empty()) { auto state(state_.lock()); - state->unknown.insert(path); + state->unknown.insert(path.path.clone()); return; } - auto info = infos.find(path); + auto info = infos.find(path.path); assert(info != infos.end()); { auto state(state_.lock()); - state->willSubstitute.insert(path); + state->willSubstitute.insert(path.path.clone()); state->downloadSize += info->second.downloadSize; state->narSize += info->second.narSize; } for (auto & ref : info->second.references) - pool.enqueue(std::bind(doPath, ref)); + pool.enqueue(std::bind(doPath, StorePathWithOutputs(ref))); } }; @@ -239,40 +248,42 @@ void Store::queryMissing(const PathSet & targets, } -Paths Store::topoSortPaths(const PathSet & paths) +StorePaths Store::topoSortPaths(const StorePathSet & paths) { - Paths sorted; - PathSet visited, parents; + StorePaths sorted; + StorePathSet visited, parents; - std::function dfsVisit; + std::function dfsVisit; - dfsVisit = [&](const Path & path, const Path * parent) { - if (parents.find(path) != parents.end()) - throw BuildError(format("cycle detected in the references of '%1%' from '%2%'") % path % *parent); + dfsVisit = [&](const StorePath & path, const StorePath * parent) { + if (parents.count(path)) + throw BuildError("cycle detected in the references of '%s' from '%s'", + printStorePath(path), printStorePath(*parent)); - if (visited.find(path) != visited.end()) return; - visited.insert(path); - parents.insert(path); + if (!visited.insert(path.clone()).second) return; + parents.insert(path.clone()); - PathSet references; + StorePathSet references; try { - references = queryPathInfo(path)->references; + references = cloneStorePathSet(queryPathInfo(path)->references); } catch (InvalidPath &) { } for (auto & i : references) /* Don't traverse into paths that don't exist. That can happen due to substitutes for non-existent paths. */ - if (i != path && paths.find(i) != paths.end()) + if (i != path && paths.count(i)) dfsVisit(i, &path); - sorted.push_front(path); + sorted.push_back(path.clone()); parents.erase(path); }; for (auto & i : paths) dfsVisit(i, nullptr); + std::reverse(sorted.begin(), sorted.end()); + return sorted; } diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 32ad7f2b2..907645d86 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -147,26 +147,26 @@ public: }); } - bool cacheExists(const std::string & uri, - bool & wantMassQuery, int & priority) override + std::optional cacheExists(const std::string & uri) override { - return retrySQLite([&]() { + return retrySQLite>([&]() -> std::optional { auto state(_state.lock()); auto i = state->caches.find(uri); if (i == state->caches.end()) { auto queryCache(state->queryCache.use()(uri)); - if (!queryCache.next()) return false; + if (!queryCache.next()) + return std::nullopt; state->caches.emplace(uri, Cache{(int) queryCache.getInt(0), queryCache.getStr(1), queryCache.getInt(2) != 0, (int) queryCache.getInt(3)}); } auto & cache(getCache(*state, uri)); - wantMassQuery = cache.wantMassQuery; - priority = cache.priority; - - return true; + return CacheInfo { + .wantMassQuery = cache.wantMassQuery, + .priority = cache.priority + }; }); } @@ -193,11 +193,8 @@ public: if (!queryNAR.getInt(0)) return {oInvalid, 0}; - auto narInfo = make_ref(); - auto namePart = queryNAR.getStr(1); - narInfo->path = cache.storeDir + "/" + - hashPart + (namePart.empty() ? "" : "-" + namePart); + auto narInfo = make_ref(StorePath::fromBaseName(hashPart + "-" + namePart)); narInfo->url = queryNAR.getStr(2); narInfo->compression = queryNAR.getStr(3); if (!queryNAR.isNull(4)) @@ -206,9 +203,9 @@ public: narInfo->narHash = Hash(queryNAR.getStr(6)); narInfo->narSize = queryNAR.getInt(7); for (auto & r : tokenizeString(queryNAR.getStr(8), " ")) - narInfo->references.insert(cache.storeDir + "/" + r); + narInfo->references.insert(StorePath::fromBaseName(r)); if (!queryNAR.isNull(9)) - narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(9); + narInfo->deriver = StorePath::fromBaseName(queryNAR.getStr(9)); for (auto & sig : tokenizeString(queryNAR.getStr(10), " ")) narInfo->sigs.insert(sig); narInfo->ca = queryNAR.getStr(11); @@ -219,7 +216,7 @@ public: void upsertNarInfo( const std::string & uri, const std::string & hashPart, - std::shared_ptr info) override + std::shared_ptr info) override { retrySQLite([&]() { auto state(_state.lock()); @@ -228,14 +225,14 @@ public: if (info) { - auto narInfo = std::dynamic_pointer_cast(info); + auto narInfo = std::dynamic_pointer_cast(info); - assert(hashPart == storePathToHash(info->path)); + //assert(hashPart == storePathToHash(info->path)); state->insertNAR.use() (cache.id) (hashPart) - (storePathToName(info->path)) + (std::string(info->path.name())) (narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0) (narInfo && narInfo->fileHash ? narInfo->fileHash.to_string() : "", narInfo && narInfo->fileHash) @@ -243,7 +240,7 @@ public: (info->narHash.to_string()) (info->narSize) (concatStringsSep(" ", info->shortRefs())) - (info->deriver != "" ? baseNameOf(info->deriver) : "", info->deriver != "") + (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (concatStringsSep(" ", info->sigs)) (info->ca) (time(0)).exec(); diff --git a/src/libstore/nar-info-disk-cache.hh b/src/libstore/nar-info-disk-cache.hh index 88d909732..878acbb87 100644 --- a/src/libstore/nar-info-disk-cache.hh +++ b/src/libstore/nar-info-disk-cache.hh @@ -10,18 +10,25 @@ class NarInfoDiskCache public: typedef enum { oValid, oInvalid, oUnknown } Outcome; + virtual ~NarInfoDiskCache() { }; + virtual void createCache(const std::string & uri, const Path & storeDir, bool wantMassQuery, int priority) = 0; - virtual bool cacheExists(const std::string & uri, - bool & wantMassQuery, int & priority) = 0; + struct CacheInfo + { + bool wantMassQuery; + int priority; + }; + + virtual std::optional cacheExists(const std::string & uri) = 0; virtual std::pair> lookupNarInfo( const std::string & uri, const std::string & hashPart) = 0; virtual void upsertNarInfo( const std::string & uri, const std::string & hashPart, - std::shared_ptr info) = 0; + std::shared_ptr info) = 0; }; /* Return a singleton cache object that can be used concurrently by diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index cb568ccdc..1375094b5 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -4,6 +4,7 @@ namespace nix { NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence) + : ValidPathInfo(StorePath::dummy.clone()) // FIXME: hack { auto corrupt = [&]() { throw Error(format("NAR info file '%1%' is corrupt") % whence); @@ -18,6 +19,8 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & } }; + bool havePath = false; + size_t pos = 0; while (pos < s.size()) { @@ -32,8 +35,8 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & std::string value(s, colon + 2, eol - colon - 2); if (name == "StorePath") { - if (!store.isStorePath(value)) corrupt(); - path = value; + path = store.parseStorePath(value); + havePath = true; } else if (name == "URL") url = value; @@ -52,18 +55,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & else if (name == "References") { auto refs = tokenizeString(value, " "); if (!references.empty()) corrupt(); - for (auto & r : refs) { - auto r2 = store.storeDir + "/" + r; - if (!store.isStorePath(r2)) corrupt(); - references.insert(r2); - } + for (auto & r : refs) + references.insert(StorePath::fromBaseName(r)); } else if (name == "Deriver") { - if (value != "unknown-deriver") { - auto p = store.storeDir + "/" + value; - if (!store.isStorePath(p)) corrupt(); - deriver = p; - } + if (value != "unknown-deriver") + deriver = StorePath::fromBaseName(value); } else if (name == "System") system = value; @@ -79,13 +76,13 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & if (compression == "") compression = "bzip2"; - if (path.empty() || url.empty() || narSize == 0 || !narHash) corrupt(); + if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt(); } -std::string NarInfo::to_string() const +std::string NarInfo::to_string(const Store & store) const { std::string res; - res += "StorePath: " + path + "\n"; + res += "StorePath: " + store.printStorePath(path) + "\n"; res += "URL: " + url + "\n"; assert(compression != ""); res += "Compression: " + compression + "\n"; @@ -98,8 +95,8 @@ std::string NarInfo::to_string() const res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; - if (!deriver.empty()) - res += "Deriver: " + baseNameOf(deriver) + "\n"; + if (deriver) + res += "Deriver: " + std::string(deriver->to_string()) + "\n"; if (!system.empty()) res += "System: " + system + "\n"; diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh index 4995061fb..373c33427 100644 --- a/src/libstore/nar-info.hh +++ b/src/libstore/nar-info.hh @@ -14,11 +14,12 @@ struct NarInfo : ValidPathInfo uint64_t fileSize = 0; std::string system; - NarInfo() { } + NarInfo() = delete; + NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { } NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { } NarInfo(const Store & store, const std::string & s, const std::string & whence); - std::string to_string() const; + std::string to_string(const Store & store) const; }; } diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 991512f21..8ac382e9d 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -254,7 +254,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats) { Activity act(*logger, actOptimiseStore); - PathSet paths = queryAllValidPaths(); + auto paths = queryAllValidPaths(); InodeHash inodeHash = loadInodeHash(); act.progress(0, paths.size()); @@ -265,8 +265,8 @@ void LocalStore::optimiseStore(OptimiseStats & stats) addTempRoot(i); if (!isValidPath(i)) continue; /* path was GC'ed, probably */ { - Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", i)); - optimisePath_(&act, stats, realStoreDir + "/" + baseNameOf(i), inodeHash); + Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i))); + optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash); } done++; act.progress(done, paths.size()); diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 17fde00a0..d0f289a0f 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -2,8 +2,8 @@ namespace nix { -ParsedDerivation::ParsedDerivation(const Path & drvPath, BasicDerivation & drv) - : drvPath(drvPath), drv(drv) +ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv) + : drvPath(std::move(drvPath)), drv(drv) { /* Parse the __json attribute, if any. */ auto jsonAttr = drv.env.find("__json"); @@ -11,7 +11,7 @@ ParsedDerivation::ParsedDerivation(const Path & drvPath, BasicDerivation & drv) try { structuredAttrs = nlohmann::json::parse(jsonAttr->second); } catch (std::exception & e) { - throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what()); + throw Error("cannot process __json attribute of '%s': %s", drvPath.to_string(), e.what()); } } } @@ -24,7 +24,7 @@ std::optional ParsedDerivation::getStringAttr(const std::string & n return {}; else { if (!i->is_string()) - throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath); + throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath.to_string()); return i->get(); } } else { @@ -44,7 +44,7 @@ bool ParsedDerivation::getBoolAttr(const std::string & name, bool def) const return def; else { if (!i->is_boolean()) - throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath); + throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath.to_string()); return i->get(); } } else { @@ -64,11 +64,11 @@ std::optional ParsedDerivation::getStringsAttr(const std::string & name return {}; else { if (!i->is_array()) - throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath.to_string()); Strings res; for (auto j = i->begin(); j != i->end(); ++j) { if (!j->is_string()) - throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath.to_string()); res.push_back(j->get()); } return res; @@ -108,4 +108,9 @@ bool ParsedDerivation::willBuildLocally() const return getBoolAttr("preferLocalBuild") && canBuildLocally(); } +bool ParsedDerivation::substitutesAllowed() const +{ + return getBoolAttr("allowSubstitutes", true); +} + } diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh index ed07dc652..cec868754 100644 --- a/src/libstore/parsed-derivations.hh +++ b/src/libstore/parsed-derivations.hh @@ -6,13 +6,13 @@ namespace nix { class ParsedDerivation { - Path drvPath; + StorePath drvPath; BasicDerivation & drv; std::optional structuredAttrs; public: - ParsedDerivation(const Path & drvPath, BasicDerivation & drv); + ParsedDerivation(StorePath && drvPath, BasicDerivation & drv); const std::optional & getStructuredAttrs() const { @@ -30,6 +30,8 @@ public: bool canBuildLocally() const; bool willBuildLocally() const; + + bool substitutesAllowed() const; }; } diff --git a/src/libstore/path.cc b/src/libstore/path.cc new file mode 100644 index 000000000..a33bec3ed --- /dev/null +++ b/src/libstore/path.cc @@ -0,0 +1,117 @@ +#include "store-api.hh" + +namespace nix { + +extern "C" { + rust::Result ffi_StorePath_new(rust::StringSlice path, rust::StringSlice storeDir); + rust::Result ffi_StorePath_new2(unsigned char hash[20], rust::StringSlice storeDir); + rust::Result ffi_StorePath_fromBaseName(rust::StringSlice baseName); + rust::String ffi_StorePath_to_string(const StorePath & _this); + StorePath ffi_StorePath_clone(const StorePath & _this); + rust::StringSlice ffi_StorePath_name(const StorePath & _this); +} + +StorePath StorePath::make(std::string_view path, std::string_view storeDir) +{ + return ffi_StorePath_new((rust::StringSlice) path, (rust::StringSlice) storeDir).unwrap(); +} + +StorePath StorePath::make(unsigned char hash[20], std::string_view name) +{ + return ffi_StorePath_new2(hash, (rust::StringSlice) name).unwrap(); +} + +StorePath StorePath::fromBaseName(std::string_view baseName) +{ + return ffi_StorePath_fromBaseName((rust::StringSlice) baseName).unwrap(); +} + +rust::String StorePath::to_string() const +{ + return ffi_StorePath_to_string(*this); +} + +StorePath StorePath::clone() const +{ + return ffi_StorePath_clone(*this); +} + +bool StorePath::isDerivation() const +{ + return hasSuffix(name(), drvExtension); +} + +std::string_view StorePath::name() const +{ + return ffi_StorePath_name(*this); +} + +StorePath StorePath::dummy( + StorePath::make( + (unsigned char *) "xxxxxxxxxxxxxxxxxxxx", "x")); + +StorePath Store::parseStorePath(std::string_view path) const +{ + return StorePath::make(path, storeDir); +} + +StorePathSet Store::parseStorePathSet(const PathSet & paths) const +{ + StorePathSet res; + for (auto & i : paths) res.insert(parseStorePath(i)); + return res; +} + +std::string Store::printStorePath(const StorePath & path) const +{ + auto s = storeDir + "/"; + s += (std::string_view) path.to_string(); + return s; +} + +PathSet Store::printStorePathSet(const StorePathSet & paths) const +{ + PathSet res; + for (auto & i : paths) res.insert(printStorePath(i)); + return res; +} + +StorePathSet cloneStorePathSet(const StorePathSet & paths) +{ + StorePathSet res; + for (auto & p : paths) + res.insert(p.clone()); + return res; +} + +StorePathSet storePathsToSet(const StorePaths & paths) +{ + StorePathSet res; + for (auto & p : paths) + res.insert(p.clone()); + return res; +} + +StorePathSet singleton(const StorePath & path) +{ + StorePathSet res; + res.insert(path.clone()); + return res; +} + +std::pair parsePathWithOutputs(std::string_view s) +{ + size_t n = s.find("!"); + return n == s.npos + ? std::make_pair(s, std::set()) + : std::make_pair(((std::string_view) s).substr(0, n), + tokenizeString>(((std::string_view) s).substr(n + 1), ",")); +} + +StorePathWithOutputs Store::parsePathWithOutputs(const std::string & s) +{ + auto [path, outputs] = nix::parsePathWithOutputs(s); + return {parseStorePath(path), std::move(outputs)}; +} + +} diff --git a/src/libstore/path.hh b/src/libstore/path.hh new file mode 100644 index 000000000..c31ea2179 --- /dev/null +++ b/src/libstore/path.hh @@ -0,0 +1,107 @@ +#pragma once + +#include "rust-ffi.hh" + +namespace nix { + +/* See path.rs. */ +struct StorePath; + +class Store; + +extern "C" { + void ffi_StorePath_drop(void *); + bool ffi_StorePath_less_than(const StorePath & a, const StorePath & b); + bool ffi_StorePath_eq(const StorePath & a, const StorePath & b); + unsigned char * ffi_StorePath_hash_data(const StorePath & p); +} + +struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop> +{ + static StorePath make(std::string_view path, std::string_view storeDir); + + static StorePath make(unsigned char hash[20], std::string_view name); + + static StorePath fromBaseName(std::string_view baseName); + + rust::String to_string() const; + + bool operator < (const StorePath & other) const + { + return ffi_StorePath_less_than(*this, other); + } + + bool operator == (const StorePath & other) const + { + return ffi_StorePath_eq(*this, other); + } + + bool operator != (const StorePath & other) const + { + return !(*this == other); + } + + StorePath clone() const; + + /* Check whether a file name ends with the extension for + derivations. */ + bool isDerivation() const; + + std::string_view name() const; + + unsigned char * hashData() const + { + return ffi_StorePath_hash_data(*this); + } + + static StorePath dummy; +}; + +typedef std::set StorePathSet; +typedef std::vector StorePaths; + +StorePathSet cloneStorePathSet(const StorePathSet & paths); +StorePathSet storePathsToSet(const StorePaths & paths); + +StorePathSet singleton(const StorePath & path); + +/* Size of the hash part of store paths, in base-32 characters. */ +const size_t storePathHashLen = 32; // i.e. 160 bits + +/* Extension of derivations in the Nix store. */ +const std::string drvExtension = ".drv"; + +struct StorePathWithOutputs +{ + StorePath path; + std::set outputs; + + StorePathWithOutputs(const StorePath & path, const std::set & outputs = {}) + : path(path.clone()), outputs(outputs) + { } + + StorePathWithOutputs(StorePath && path, std::set && outputs) + : path(std::move(path)), outputs(std::move(outputs)) + { } + + StorePathWithOutputs(const StorePathWithOutputs & other) + : path(other.path.clone()), outputs(other.outputs) + { } + + std::string to_string(const Store & store) const; +}; + +std::pair parsePathWithOutputs(std::string_view s); + +} + +namespace std { + +template<> struct hash { + std::size_t operator()(const nix::StorePath & path) const noexcept + { + return * (std::size_t *) path.hashData(); + } +}; + +} diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 08d1efdbe..2635e3940 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -5,9 +5,10 @@ #include #include +#include #include #include -#include +#include namespace nix { @@ -40,17 +41,14 @@ void deleteLockFile(const Path & path, int fd) bool lockFile(int fd, LockType lockType, bool wait) { - struct flock lock; - if (lockType == ltRead) lock.l_type = F_RDLCK; - else if (lockType == ltWrite) lock.l_type = F_WRLCK; - else if (lockType == ltNone) lock.l_type = F_UNLCK; + int type; + if (lockType == ltRead) type = LOCK_SH; + else if (lockType == ltWrite) type = LOCK_EX; + else if (lockType == ltNone) type = LOCK_UN; else abort(); - lock.l_whence = SEEK_SET; - lock.l_start = 0; - lock.l_len = 0; /* entire file */ if (wait) { - while (fcntl(fd, F_SETLKW, &lock) != 0) { + while (flock(fd, type) != 0) { checkInterrupt(); if (errno != EINTR) throw SysError(format("acquiring/releasing lock")); @@ -58,9 +56,9 @@ bool lockFile(int fd, LockType lockType, bool wait) return false; } } else { - while (fcntl(fd, F_SETLK, &lock) != 0) { + while (flock(fd, type | LOCK_NB) != 0) { checkInterrupt(); - if (errno == EACCES || errno == EAGAIN) return false; + if (errno == EWOULDBLOCK) return false; if (errno != EINTR) throw SysError(format("acquiring/releasing lock")); } @@ -70,14 +68,6 @@ bool lockFile(int fd, LockType lockType, bool wait) } -/* This enables us to check whether are not already holding a lock on - a file ourselves. POSIX locks (fcntl) suck in this respect: if we - close a descriptor, the previous lock will be closed as well. And - there is no way to query whether we already have a lock (F_GETLK - only works on locks held by other processes). */ -static Sync lockedPaths_; - - PathLocks::PathLocks() : deletePaths(false) { @@ -91,7 +81,7 @@ PathLocks::PathLocks(const PathSet & paths, const string & waitMsg) } -bool PathLocks::lockPaths(const PathSet & _paths, +bool PathLocks::lockPaths(const PathSet & paths, const string & waitMsg, bool wait) { assert(fds.empty()); @@ -99,75 +89,54 @@ bool PathLocks::lockPaths(const PathSet & _paths, /* Note that `fds' is built incrementally so that the destructor will only release those locks that we have already acquired. */ - /* Sort the paths. This assures that locks are always acquired in - the same order, thus preventing deadlocks. */ - Paths paths(_paths.begin(), _paths.end()); - paths.sort(); - - /* Acquire the lock for each path. */ + /* Acquire the lock for each path in sorted order. This ensures + that locks are always acquired in the same order, thus + preventing deadlocks. */ for (auto & path : paths) { checkInterrupt(); Path lockPath = path + ".lock"; debug(format("locking path '%1%'") % path); - { - auto lockedPaths(lockedPaths_.lock()); - if (lockedPaths->count(lockPath)) { - if (!wait) return false; - throw AlreadyLocked("deadlock: trying to re-acquire self-held lock '%s'", lockPath); - } - lockedPaths->insert(lockPath); - } + AutoCloseFD fd; - try { + while (1) { - AutoCloseFD fd; + /* Open/create the lock file. */ + fd = openLockFile(lockPath, true); - while (1) { - - /* Open/create the lock file. */ - fd = openLockFile(lockPath, true); - - /* Acquire an exclusive lock. */ - if (!lockFile(fd.get(), ltWrite, false)) { - if (wait) { - if (waitMsg != "") printError(waitMsg); - lockFile(fd.get(), ltWrite, true); - } else { - /* Failed to lock this path; release all other - locks. */ - unlock(); - lockedPaths_.lock()->erase(lockPath); - return false; - } + /* Acquire an exclusive lock. */ + if (!lockFile(fd.get(), ltWrite, false)) { + if (wait) { + if (waitMsg != "") printError(waitMsg); + lockFile(fd.get(), ltWrite, true); + } else { + /* Failed to lock this path; release all other + locks. */ + unlock(); + return false; } - - debug(format("lock acquired on '%1%'") % lockPath); - - /* Check that the lock file hasn't become stale (i.e., - hasn't been unlinked). */ - struct stat st; - if (fstat(fd.get(), &st) == -1) - throw SysError(format("statting lock file '%1%'") % lockPath); - if (st.st_size != 0) - /* This lock file has been unlinked, so we're holding - a lock on a deleted file. This means that other - processes may create and acquire a lock on - `lockPath', and proceed. So we must retry. */ - debug(format("open lock file '%1%' has become stale") % lockPath); - else - break; } - /* Use borrow so that the descriptor isn't closed. */ - fds.push_back(FDPair(fd.release(), lockPath)); + debug(format("lock acquired on '%1%'") % lockPath); - } catch (...) { - lockedPaths_.lock()->erase(lockPath); - throw; + /* Check that the lock file hasn't become stale (i.e., + hasn't been unlinked). */ + struct stat st; + if (fstat(fd.get(), &st) == -1) + throw SysError(format("statting lock file '%1%'") % lockPath); + if (st.st_size != 0) + /* This lock file has been unlinked, so we're holding + a lock on a deleted file. This means that other + processes may create and acquire a lock on + `lockPath', and proceed. So we must retry. */ + debug(format("open lock file '%1%' has become stale") % lockPath); + else + break; } + /* Use borrow so that the descriptor isn't closed. */ + fds.push_back(FDPair(fd.release(), lockPath)); } return true; @@ -189,8 +158,6 @@ void PathLocks::unlock() for (auto & i : fds) { if (deletePaths) deleteLockFile(i.second, i.first); - lockedPaths_.lock()->erase(i.second); - if (close(i.first) == -1) printError( format("error (ignored): cannot close lock file on '%1%'") % i.second); @@ -208,11 +175,4 @@ void PathLocks::setDeletion(bool deletePaths) } -bool pathIsLockedByMe(const Path & path) -{ - Path lockPath = path + ".lock"; - return lockedPaths_.lock()->count(lockPath); -} - - } diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh index db51f950a..411da0222 100644 --- a/src/libstore/pathlocks.hh +++ b/src/libstore/pathlocks.hh @@ -16,8 +16,6 @@ enum LockType { ltRead, ltWrite, ltNone }; bool lockFile(int fd, LockType lockType, bool wait); -MakeError(AlreadyLocked, Error); - class PathLocks { private: @@ -37,6 +35,4 @@ public: void setDeletion(bool deletePaths); }; -bool pathIsLockedByMe(const Path & path); - } diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 4c6af567a..dae3f2d32 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -40,7 +40,7 @@ Generations findGenerations(Path profile, int & curGen) Generations gens; Path profileDir = dirOf(profile); - string profileName = baseNameOf(profile); + auto profileName = std::string(baseNameOf(profile)); for (auto & i : readDirectory(profileDir)) { int n; @@ -108,7 +108,7 @@ Path createGeneration(ref store, Path profile, Path outPath) user environment etc. we've just built. */ Path generation; makeName(profile, num + 1, generation); - store->addPermRoot(outPath, generation, false, true); + store->addPermRoot(store->parseStorePath(outPath), generation, false, true); return generation; } diff --git a/src/libstore/references.cc b/src/libstore/references.cc index 5b7eb1f84..102e15921 100644 --- a/src/libstore/references.cc +++ b/src/libstore/references.cc @@ -36,11 +36,10 @@ static void search(const unsigned char * s, size_t len, } if (!match) continue; string ref((const char *) s + i, refLength); - if (hashes.find(ref) != hashes.end()) { + if (hashes.erase(ref)) { debug(format("found reference to '%1%' at offset '%2%'") % ref % i); seen.insert(ref); - hashes.erase(ref); } ++i; } @@ -90,7 +89,7 @@ PathSet scanForReferences(const string & path, hash part of the file name. (This assumes that all references have the form `HASH-bla'). */ for (auto & i : refs) { - string baseName = baseNameOf(i); + auto baseName = std::string(baseNameOf(i)); string::size_type pos = baseName.find('-'); if (pos == string::npos) throw Error(format("bad reference '%1%'") % i); @@ -119,4 +118,66 @@ PathSet scanForReferences(const string & path, } +RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink) + : from(from), to(to), nextSink(nextSink) +{ + assert(from.size() == to.size()); +} + +void RewritingSink::operator () (const unsigned char * data, size_t len) +{ + std::string s(prev); + s.append((const char *) data, len); + + size_t j = 0; + while ((j = s.find(from, j)) != string::npos) { + matches.push_back(pos + j); + s.replace(j, from.size(), to); + } + + prev = s.size() < from.size() ? s : std::string(s, s.size() - from.size() + 1, from.size() - 1); + + auto consumed = s.size() - prev.size(); + + pos += consumed; + + if (consumed) nextSink((unsigned char *) s.data(), consumed); +} + +void RewritingSink::flush() +{ + if (prev.empty()) return; + pos += prev.size(); + nextSink((unsigned char *) prev.data(), prev.size()); + prev.clear(); +} + +HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus) + : hashSink(ht) + , rewritingSink(modulus, std::string(modulus.size(), 0), hashSink) +{ +} + +void HashModuloSink::operator () (const unsigned char * data, size_t len) +{ + rewritingSink(data, len); +} + +HashResult HashModuloSink::finish() +{ + rewritingSink.flush(); + + /* Hash the positions of the self-references. This ensures that a + NAR with self-references and a NAR with some of the + self-references already zeroed out do not produce a hash + collision. FIXME: proof. */ + for (auto & pos : rewritingSink.matches) { + auto s = fmt("|%d", pos); + hashSink((unsigned char *) s.data(), s.size()); + } + + auto h = hashSink.finish(); + return {h.first, rewritingSink.pos}; +} + } diff --git a/src/libstore/references.hh b/src/libstore/references.hh index 013809d12..c38bdd720 100644 --- a/src/libstore/references.hh +++ b/src/libstore/references.hh @@ -7,5 +7,32 @@ namespace nix { PathSet scanForReferences(const Path & path, const PathSet & refs, HashResult & hash); - + +struct RewritingSink : Sink +{ + std::string from, to, prev; + Sink & nextSink; + uint64_t pos = 0; + + std::vector matches; + + RewritingSink(const std::string & from, const std::string & to, Sink & nextSink); + + void operator () (const unsigned char * data, size_t len) override; + + void flush(); +}; + +struct HashModuloSink : AbstractHashSink +{ + HashSink hashSink; + RewritingSink rewritingSink; + + HashModuloSink(HashType ht, const std::string & modulus); + + void operator () (const unsigned char * data, size_t len) override; + + HashResult finish() override; +}; + } diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index 5233fb2c2..5a2d103b9 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -50,7 +50,7 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_) auto storePath = store->toStorePath(path); std::string restPath = std::string(path, storePath.size()); - if (!store->isValidPath(storePath)) + if (!store->isValidPath(store->parseStorePath(storePath))) throw InvalidPath(format("path '%1%' is not a valid store path") % storePath); auto i = nars.find(storePath); @@ -96,7 +96,7 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_) } catch (SysError &) { } } - store->narFromPath(storePath, sink); + store->narFromPath(store->parseStorePath(storePath), sink); auto narAccessor = makeNarAccessor(sink.s); addToCache(storePath, *sink.s, narAccessor); return {narAccessor, restPath}; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 15faf78a5..8c55da268 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -22,23 +22,22 @@ namespace nix { -Path readStorePath(Store & store, Source & from) +template<> StorePathSet readStorePaths(const Store & store, Source & from) { - Path path = readString(from); - store.assertStorePath(path); - return path; -} - - -template T readStorePaths(Store & store, Source & from) -{ - T paths = readStrings(from); - for (auto & i : paths) store.assertStorePath(i); + StorePathSet paths; + for (auto & i : readStrings(from)) + paths.insert(store.parseStorePath(i)); return paths; } -template PathSet readStorePaths(Store & store, Source & from); -template Paths readStorePaths(Store & store, Source & from); + +void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths) +{ + out << paths.size(); + for (auto & i : paths) + out << store.printStorePath(i); +} + /* TODO: Separate these store impls into different files, give them better names */ RemoteStore::RemoteStore(const Params & params) @@ -151,7 +150,7 @@ void RemoteStore::initConnection(Connection & conn) conn.to << PROTOCOL_VERSION; if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 14) { - int cpu = settings.lockCPU ? lockToCurrentCPU() : -1; + int cpu = sameMachine() && settings.lockCPU ? lockToCurrentCPU() : -1; if (cpu != -1) conn.to << 1 << cpu; else @@ -191,6 +190,14 @@ void RemoteStore::setOptions(Connection & conn) if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 12) { std::map overrides; globalConfig.getSettings(overrides, true); + overrides.erase(settings.keepFailed.name); + overrides.erase(settings.keepGoing.name); + overrides.erase(settings.tryFallback.name); + overrides.erase(settings.maxBuildJobs.name); + overrides.erase(settings.maxSilentTime.name); + overrides.erase(settings.buildCores.name); + overrides.erase(settings.useSubstitutes.name); + overrides.erase(settings.showTrace.name); conn.to << overrides.size(); for (auto & i : overrides) conn.to << i.first << i.second.value; @@ -246,60 +253,62 @@ ConnectionHandle RemoteStore::getConnection() } -bool RemoteStore::isValidPathUncached(const Path & path) +bool RemoteStore::isValidPathUncached(const StorePath & path) { auto conn(getConnection()); - conn->to << wopIsValidPath << path; + conn->to << wopIsValidPath << printStorePath(path); conn.processStderr(); return readInt(conn->from); } -PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) +StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { - PathSet res; + StorePathSet res; for (auto & i : paths) - if (isValidPath(i)) res.insert(i); + if (isValidPath(i)) res.insert(i.clone()); return res; } else { - conn->to << wopQueryValidPaths << paths; + conn->to << wopQueryValidPaths; + writeStorePaths(*this, conn->to, paths); conn.processStderr(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } } -PathSet RemoteStore::queryAllValidPaths() +StorePathSet RemoteStore::queryAllValidPaths() { auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } -PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths) +StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) { auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { - PathSet res; + StorePathSet res; for (auto & i : paths) { - conn->to << wopHasSubstitutes << i; + conn->to << wopHasSubstitutes << printStorePath(i); conn.processStderr(); - if (readInt(conn->from)) res.insert(i); + if (readInt(conn->from)) res.insert(i.clone()); } return res; } else { - conn->to << wopQuerySubstitutablePaths << paths; + conn->to << wopQuerySubstitutablePaths; + writeStorePaths(*this, conn->to, paths); conn.processStderr(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } } -void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, +void RemoteStore::querySubstitutablePathInfos(const StorePathSet & paths, SubstitutablePathInfos & infos) { if (paths.empty()) return; @@ -310,29 +319,31 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, for (auto & i : paths) { SubstitutablePathInfo info; - conn->to << wopQuerySubstitutablePathInfo << i; + conn->to << wopQuerySubstitutablePathInfo << printStorePath(i); conn.processStderr(); unsigned int reply = readInt(conn->from); if (reply == 0) continue; - info.deriver = readString(conn->from); - if (info.deriver != "") assertStorePath(info.deriver); - info.references = readStorePaths(*this, conn->from); + auto deriver = readString(conn->from); + if (deriver != "") + info.deriver = parseStorePath(deriver); + info.references = readStorePaths(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); - infos[i] = info; + infos.insert_or_assign(i.clone(), std::move(info)); } } else { - conn->to << wopQuerySubstitutablePathInfos << paths; + conn->to << wopQuerySubstitutablePathInfos; + writeStorePaths(*this, conn->to, paths); conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { - Path path = readStorePath(*this, conn->from); - SubstitutablePathInfo & info(infos[path]); - info.deriver = readString(conn->from); - if (info.deriver != "") assertStorePath(info.deriver); - info.references = readStorePaths(*this, conn->from); + SubstitutablePathInfo & info(infos[parseStorePath(readString(conn->from))]); + auto deriver = readString(conn->from); + if (deriver != "") + info.deriver = parseStorePath(deriver); + info.references = readStorePaths(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } @@ -341,14 +352,14 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, } -void RemoteStore::queryPathInfoUncached(const Path & path, - Callback> callback) +void RemoteStore::queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept { try { std::shared_ptr info; { auto conn(getConnection()); - conn->to << wopQueryPathInfo << path; + conn->to << wopQueryPathInfo << printStorePath(path); try { conn.processStderr(); } catch (Error & e) { @@ -359,14 +370,13 @@ void RemoteStore::queryPathInfoUncached(const Path & path, } if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) { bool valid; conn->from >> valid; - if (!valid) throw InvalidPath(format("path '%s' is not valid") % path); + if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path)); } - info = std::make_shared(); - info->path = path; - info->deriver = readString(conn->from); - if (info->deriver != "") assertStorePath(info->deriver); + info = std::make_shared(path.clone()); + auto deriver = readString(conn->from); + if (deriver != "") info->deriver = parseStorePath(deriver); info->narHash = Hash(readString(conn->from), htSHA256); - info->references = readStorePaths(*this, conn->from); + info->references = readStorePaths(*this, conn->from); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { conn->from >> info->ultimate; @@ -379,52 +389,52 @@ void RemoteStore::queryPathInfoUncached(const Path & path, } -void RemoteStore::queryReferrers(const Path & path, - PathSet & referrers) +void RemoteStore::queryReferrers(const StorePath & path, + StorePathSet & referrers) { auto conn(getConnection()); - conn->to << wopQueryReferrers << path; + conn->to << wopQueryReferrers << printStorePath(path); conn.processStderr(); - PathSet referrers2 = readStorePaths(*this, conn->from); - referrers.insert(referrers2.begin(), referrers2.end()); + for (auto & i : readStorePaths(*this, conn->from)) + referrers.insert(i.clone()); } -PathSet RemoteStore::queryValidDerivers(const Path & path) +StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) { auto conn(getConnection()); - conn->to << wopQueryValidDerivers << path; + conn->to << wopQueryValidDerivers << printStorePath(path); conn.processStderr(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } -PathSet RemoteStore::queryDerivationOutputs(const Path & path) +StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) { auto conn(getConnection()); - conn->to << wopQueryDerivationOutputs << path; + conn->to << wopQueryDerivationOutputs << printStorePath(path); conn.processStderr(); - return readStorePaths(*this, conn->from); + return readStorePaths(*this, conn->from); } -PathSet RemoteStore::queryDerivationOutputNames(const Path & path) +PathSet RemoteStore::queryDerivationOutputNames(const StorePath & path) { auto conn(getConnection()); - conn->to << wopQueryDerivationOutputNames << path; + conn->to << wopQueryDerivationOutputNames << printStorePath(path); conn.processStderr(); return readStrings(conn->from); } -Path RemoteStore::queryPathFromHashPart(const string & hashPart) +std::optional RemoteStore::queryPathFromHashPart(const std::string & hashPart) { auto conn(getConnection()); conn->to << wopQueryPathFromHashPart << hashPart; conn.processStderr(); Path path = readString(conn->from); - if (!path.empty()) assertStorePath(path); - return path; + if (path.empty()) return {}; + return parseStorePath(path); } @@ -442,9 +452,10 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, copyNAR(source, sink); sink << exportMagic - << info.path - << info.references - << info.deriver + << printStorePath(info.path); + writeStorePaths(*this, sink, info.references); + sink + << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature << 0 // == no path follows ; @@ -452,14 +463,17 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn.processStderr(0, source2.get()); - auto importedPaths = readStorePaths(*this, conn->from); + auto importedPaths = readStorePaths(*this, conn->from); assert(importedPaths.size() <= 1); } else { conn->to << wopAddToStoreNar - << info.path << info.deriver << info.narHash.to_string(Base16, false) - << info.references << info.registrationTime << info.narSize + << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(Base16, false); + writeStorePaths(*this, conn->to, info.references); + conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << info.ca << repair << !checkSigs; bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21; @@ -469,7 +483,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, } -Path RemoteStore::addToStore(const string & name, const Path & _srcPath, +StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath, bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); @@ -503,54 +517,52 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath, throw; } - return readStorePath(*this, conn->from); + return parseStorePath(readString(conn->from)); } -Path RemoteStore::addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) +StorePath RemoteStore::addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); auto conn(getConnection()); - conn->to << wopAddTextToStore << name << s << references; + conn->to << wopAddTextToStore << name << s; + writeStorePaths(*this, conn->to, references); conn.processStderr(); - return readStorePath(*this, conn->from); + return parseStorePath(readString(conn->from)); } -void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) +void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) { auto conn(getConnection()); conn->to << wopBuildPaths; - if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13) { - conn->to << drvPaths; - if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15) - conn->to << buildMode; - else - /* Old daemons did not take a 'buildMode' parameter, so we - need to validate it here on the client side. */ - if (buildMode != bmNormal) - throw Error("repairing or checking is not supported when building through the Nix daemon"); - } else { - /* For backwards compatibility with old daemons, strip output - identifiers. */ - PathSet drvPaths2; - for (auto & i : drvPaths) - drvPaths2.insert(string(i, 0, i.find('!'))); - conn->to << drvPaths2; - } + assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13); + Strings ss; + for (auto & p : drvPaths) + ss.push_back(p.to_string(*this)); + conn->to << ss; + if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15) + conn->to << buildMode; + else + /* Old daemons did not take a 'buildMode' parameter, so we + need to validate it here on the client side. */ + if (buildMode != bmNormal) + throw Error("repairing or checking is not supported when building through the Nix daemon"); conn.processStderr(); readInt(conn->from); } -BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv, +BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { auto conn(getConnection()); - conn->to << wopBuildDerivation << drvPath << drv << buildMode; + conn->to << wopBuildDerivation << printStorePath(drvPath); + writeDerivation(conn->to, *this, drv); + conn->to << buildMode; conn.processStderr(); BuildResult res; unsigned int status; @@ -560,19 +572,19 @@ BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDeriva } -void RemoteStore::ensurePath(const Path & path) +void RemoteStore::ensurePath(const StorePath & path) { auto conn(getConnection()); - conn->to << wopEnsurePath << path; + conn->to << wopEnsurePath << printStorePath(path); conn.processStderr(); readInt(conn->from); } -void RemoteStore::addTempRoot(const Path & path) +void RemoteStore::addTempRoot(const StorePath & path) { auto conn(getConnection()); - conn->to << wopAddTempRoot << path; + conn->to << wopAddTempRoot << printStorePath(path); conn.processStderr(); readInt(conn->from); } @@ -605,8 +617,8 @@ Roots RemoteStore::findRoots(bool censor) Roots result; while (count--) { Path link = readString(conn->from); - Path target = readStorePath(*this, conn->from); - result[target].emplace(link); + auto target = parseStorePath(readString(conn->from)); + result[std::move(target)].emplace(link); } return result; } @@ -617,7 +629,9 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) auto conn(getConnection()); conn->to - << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness + << wopCollectGarbage << options.action; + writeStorePaths(*this, conn->to, options.pathsToDelete); + conn->to << options.ignoreLiveness << options.maxFreed /* removed options */ << 0 << 0 << 0; @@ -653,17 +667,17 @@ bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) } -void RemoteStore::addSignatures(const Path & storePath, const StringSet & sigs) +void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & sigs) { auto conn(getConnection()); - conn->to << wopAddSignatures << storePath << sigs; + conn->to << wopAddSignatures << printStorePath(storePath) << sigs; conn.processStderr(); readInt(conn->from); } -void RemoteStore::queryMissing(const PathSet & targets, - PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, +void RemoteStore::queryMissing(const std::vector & targets, + StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, unsigned long long & downloadSize, unsigned long long & narSize) { { @@ -672,11 +686,15 @@ void RemoteStore::queryMissing(const PathSet & targets, // Don't hold the connection handle in the fallback case // to prevent a deadlock. goto fallback; - conn->to << wopQueryMissing << targets; + conn->to << wopQueryMissing; + Strings ss; + for (auto & p : targets) + ss.push_back(p.to_string(*this)); + conn->to << ss; conn.processStderr(); - willBuild = readStorePaths(*this, conn->from); - willSubstitute = readStorePaths(*this, conn->from); - unknown = readStorePaths(*this, conn->from); + willBuild = readStorePaths(*this, conn->from); + willSubstitute = readStorePaths(*this, conn->from); + unknown = readStorePaths(*this, conn->from); conn->from >> downloadSize >> narSize; return; } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 80f18ab71..f301a97d8 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -29,54 +29,56 @@ public: const Setting maxConnectionAge{(Store*) this, std::numeric_limits::max(), "max-connection-age", "number of seconds to reuse a connection"}; + virtual bool sameMachine() = 0; + RemoteStore(const Params & params); /* Implementations of abstract store API methods. */ - bool isValidPathUncached(const Path & path) override; + bool isValidPathUncached(const StorePath & path) override; - PathSet queryValidPaths(const PathSet & paths, + StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute) override; - PathSet queryAllValidPaths() override; + StorePathSet queryAllValidPaths() override; - void queryPathInfoUncached(const Path & path, - Callback> callback) override; + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override; - void queryReferrers(const Path & path, PathSet & referrers) override; + void queryReferrers(const StorePath & path, StorePathSet & referrers) override; - PathSet queryValidDerivers(const Path & path) override; + StorePathSet queryValidDerivers(const StorePath & path) override; - PathSet queryDerivationOutputs(const Path & path) override; + StorePathSet queryDerivationOutputs(const StorePath & path) override; - StringSet queryDerivationOutputNames(const Path & path) override; + StringSet queryDerivationOutputNames(const StorePath & path) override; - Path queryPathFromHashPart(const string & hashPart) override; + std::optional queryPathFromHashPart(const std::string & hashPart) override; - PathSet querySubstitutablePaths(const PathSet & paths) override; + StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; - void querySubstitutablePathInfos(const PathSet & paths, + void querySubstitutablePathInfos(const StorePathSet & paths, SubstitutablePathInfos & infos) override; void addToStore(const ValidPathInfo & info, Source & nar, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; - Path addToStore(const string & name, const Path & srcPath, + StorePath addToStore(const string & name, const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override; - Path addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair) override; + StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair) override; - void buildPaths(const PathSet & paths, BuildMode buildMode) override; + void buildPaths(const std::vector & paths, BuildMode buildMode) override; - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; - void ensurePath(const Path & path) override; + void ensurePath(const StorePath & path) override; - void addTempRoot(const Path & path) override; + void addTempRoot(const StorePath & path) override; void addIndirectRoot(const Path & path) override; @@ -90,10 +92,10 @@ public: bool verifyStore(bool checkContents, RepairFlag repair) override; - void addSignatures(const Path & storePath, const StringSet & sigs) override; + void addSignatures(const StorePath & storePath, const StringSet & sigs) override; - void queryMissing(const PathSet & targets, - PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, + void queryMissing(const std::vector & targets, + StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, unsigned long long & downloadSize, unsigned long long & narSize) override; void connect() override; @@ -146,6 +148,9 @@ public: std::string getUri() override; + bool sameMachine() override + { return true; } + private: ref openConnection() override; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index cd547a964..f2e4b63e0 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -205,11 +205,12 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore void init() override { - if (!diskCache->cacheExists(getUri(), wantMassQuery_, priority)) { - + if (auto cacheInfo = diskCache->cacheExists(getUri())) { + wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false"); + priority.setDefault(fmt("%d", cacheInfo->priority)); + } else { BinaryCacheStore::init(); - - diskCache->createCache(getUri(), storeDir, wantMassQuery_, priority); + diskCache->createCache(getUri(), storeDir, wantMassQuery, priority); } } @@ -222,7 +223,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore fetches the .narinfo file, rather than first checking for its existence via a HEAD request. Since .narinfos are small, doing a GET is unlikely to be slower than HEAD. */ - bool isValidPathUncached(const Path & storePath) override + bool isValidPathUncached(const StorePath & storePath) override { try { queryPathInfo(storePath); @@ -382,9 +383,9 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri()); } - PathSet queryAllValidPaths() override + StorePathSet queryAllValidPaths() override { - PathSet paths; + StorePathSet paths; std::string marker; do { @@ -405,7 +406,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore for (auto object : contents) { auto & key = object.GetKey(); if (key.size() != 40 || !hasSuffix(key, ".narinfo")) continue; - paths.insert(storeDir + "/" + key.substr(0, key.size() - 8)); + paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-unknown")); } marker = res.GetNextMarker(); diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 115679b84..bd012d9b9 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -99,8 +99,8 @@ void handleSQLiteBusy(const SQLiteBusy & e); /* Convenience function for retrying a SQLite transaction when the database is busy. */ -template -T retrySQLite(std::function fun) +template +T retrySQLite(F && fun) { while (true) { try { diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 39205ae2c..caae6b596 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -16,6 +16,8 @@ public: const Setting sshKey{(Store*) this, "", "ssh-key", "path to an SSH private key"}; const Setting compress{(Store*) this, false, "compress", "whether to compress the connection"}; + const Setting remoteProgram{(Store*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"}; + const Setting remoteStore{(Store*) this, "", "remote-store", "URI of the store on the remote system"}; SSHStore(const std::string & host, const Params & params) : Store(params) @@ -35,7 +37,10 @@ public: return uriScheme + host; } - void narFromPath(const Path & path, Sink & sink) override; + bool sameMachine() override + { return false; } + + void narFromPath(const StorePath & path, Sink & sink) override; ref getFSAccessor() override; @@ -63,10 +68,10 @@ private: }; }; -void SSHStore::narFromPath(const Path & path, Sink & sink) +void SSHStore::narFromPath(const StorePath & path, Sink & sink) { auto conn(connections->get()); - conn->to << wopNarFromPath << path; + conn->to << wopNarFromPath << printStorePath(path); conn->processStderr(); copyNAR(conn->from, sink); } @@ -79,7 +84,9 @@ ref SSHStore::getFSAccessor() ref SSHStore::openConnection() { auto conn = make_ref(); - conn->sshConn = master.startCommand("nix-daemon --stdio"); + conn->sshConn = master.startCommand( + fmt("%s --stdio", remoteProgram) + + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); initConnection(*conn); diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 5e0e44935..2ee7115c5 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -16,7 +16,7 @@ SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, bool void SSHMaster::addCommonSSHOpts(Strings & args) { - for (auto & i : tokenizeString(getEnv("NIX_SSHOPTS"))) + for (auto & i : tokenizeString(getEnv("NIX_SSHOPTS").value_or(""))) args.push_back(i); if (!keyFile.empty()) args.insert(args.end(), {"-i", keyFile}); @@ -62,7 +62,8 @@ std::unique_ptr SSHMaster::startCommand(const std::string args.push_back(command); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); - throw SysError("executing '%s' on '%s'", command, host); + // could not exec ssh/bash + throw SysError("unable to execute '%s'", args.front()); }); @@ -108,7 +109,7 @@ Path SSHMaster::startMaster() addCommonSSHOpts(args); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); - throw SysError("starting SSH master"); + throw SysError("unable to execute '%s'", args.front()); }); out.writeSide = -1; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c13ff1156..d8f6c22bc 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -27,13 +27,6 @@ bool Store::isStorePath(const Path & path) const } -void Store::assertStorePath(const Path & path) const -{ - if (!isStorePath(path)) - throw Error(format("path '%1%' is not in the Nix store") % path); -} - - Path Store::toStorePath(const Path & path) const { if (!isInStore(path)) @@ -46,9 +39,9 @@ Path Store::toStorePath(const Path & path) const } -Path Store::followLinksToStore(const Path & _path) const +Path Store::followLinksToStore(std::string_view _path) const { - Path path = absPath(_path); + Path path = absPath(std::string(_path)); while (!isInStore(path)) { if (!isLink(path)) break; string target = readLink(path); @@ -60,17 +53,16 @@ Path Store::followLinksToStore(const Path & _path) const } -Path Store::followLinksToStorePath(const Path & path) const +StorePath Store::followLinksToStorePath(std::string_view path) const { - return toStorePath(followLinksToStore(path)); + return parseStorePath(toStorePath(followLinksToStore(path))); } -string storePathToName(const Path & path) +StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const { - auto base = baseNameOf(path); - assert(base.size() == storePathHashLen || (base.size() > storePathHashLen && base[storePathHashLen] == '-')); - return base.size() == storePathHashLen ? "" : string(base, storePathHashLen + 1); + auto [path2, outputs] = nix::parsePathWithOutputs(path); + return StorePathWithOutputs(followLinksToStorePath(path2), std::move(outputs)); } @@ -82,25 +74,6 @@ string storePathToHash(const Path & path) } -void checkStoreName(const string & name) -{ - string validChars = "+-._?="; - /* Disallow names starting with a dot for possible security - reasons (e.g., "." and ".."). */ - if (string(name, 0, 1) == ".") - throw Error(format("illegal name: '%1%'") % name); - for (auto & i : name) - if (!((i >= 'A' && i <= 'Z') || - (i >= 'a' && i <= 'z') || - (i >= '0' && i <= '9') || - validChars.find(i) != string::npos)) - { - throw Error(format("invalid character '%1%' in name '%2%'") - % i % name); - } -} - - /* Store paths have the following form: /- @@ -172,67 +145,78 @@ void checkStoreName(const string & name) */ -Path Store::makeStorePath(const string & type, - const Hash & hash, const string & name) const +StorePath Store::makeStorePath(const string & type, + const Hash & hash, std::string_view name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ - string s = type + ":" + hash.to_string(Base16) + ":" + storeDir + ":" + name; - - checkStoreName(name); - - return storeDir + "/" - + compressHash(hashString(htSHA256, s), 20).to_string(Base32, false) - + "-" + name; + string s = type + ":" + hash.to_string(Base16) + ":" + storeDir + ":" + std::string(name); + auto h = compressHash(hashString(htSHA256, s), 20); + return StorePath::make(h.hash, name); } -Path Store::makeOutputPath(const string & id, - const Hash & hash, const string & name) const +StorePath Store::makeOutputPath(const string & id, + const Hash & hash, std::string_view name) const { return makeStorePath("output:" + id, hash, - name + (id == "out" ? "" : "-" + id)); + std::string(name) + (id == "out" ? "" : "-" + id)); } -Path Store::makeFixedOutputPath(bool recursive, - const Hash & hash, const string & name) const +static std::string makeType( + const Store & store, + string && type, + const StorePathSet & references, + bool hasSelfReference = false) { - return hash.type == htSHA256 && recursive - ? makeStorePath("source", hash, name) - : makeStorePath("output:out", hashString(htSHA256, - "fixed:out:" + (recursive ? (string) "r:" : "") + - hash.to_string(Base16) + ":"), - name); + for (auto & i : references) { + type += ":"; + type += store.printStorePath(i); + } + if (hasSelfReference) type += ":self"; + return std::move(type); } -Path Store::makeTextPath(const string & name, const Hash & hash, - const PathSet & references) const +StorePath Store::makeFixedOutputPath( + bool recursive, + const Hash & hash, + std::string_view name, + const StorePathSet & references, + bool hasSelfReference) const +{ + if (hash.type == htSHA256 && recursive) { + return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name); + } else { + assert(references.empty()); + return makeStorePath("output:out", hashString(htSHA256, + "fixed:out:" + (recursive ? (string) "r:" : "") + + hash.to_string(Base16) + ":"), name); + } +} + + +StorePath Store::makeTextPath(std::string_view name, const Hash & hash, + const StorePathSet & references) const { assert(hash.type == htSHA256); /* Stuff the references (if any) into the type. This is a bit hacky, but we can't put them in `s' since that would be ambiguous. */ - string type = "text"; - for (auto & i : references) { - type += ":"; - type += i; - } - return makeStorePath(type, hash, name); + return makeStorePath(makeType(*this, "text", references), hash, name); } -std::pair Store::computeStorePathForPath(const string & name, +std::pair Store::computeStorePathForPath(std::string_view name, const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const { Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath); - Path dstPath = makeFixedOutputPath(recursive, h, name); - return std::pair(dstPath, h); + return std::make_pair(makeFixedOutputPath(recursive, h, name), h); } -Path Store::computeStorePathForText(const string & name, const string & s, - const PathSet & references) const +StorePath Store::computeStorePathForText(const string & name, const string & s, + const StorePathSet & references) const { return makeTextPath(name, hashString(htSHA256, s), references); } @@ -251,11 +235,9 @@ std::string Store::getUri() } -bool Store::isValidPath(const Path & storePath) +bool Store::isValidPath(const StorePath & storePath) { - assertStorePath(storePath); - - auto hashPart = storePathToHash(storePath); + auto hashPart = storePathToHash(printStorePath(storePath)); { auto state_(state.lock()); @@ -289,7 +271,7 @@ bool Store::isValidPath(const Path & storePath) /* Default implementation for stores that only implement queryPathInfoUncached(). */ -bool Store::isValidPathUncached(const Path & path) +bool Store::isValidPathUncached(const StorePath & path) { try { queryPathInfo(path); @@ -300,12 +282,12 @@ bool Store::isValidPathUncached(const Path & path) } -ref Store::queryPathInfo(const Path & storePath) +ref Store::queryPathInfo(const StorePath & storePath) { - std::promise> promise; + std::promise> promise; queryPathInfo(storePath, - {[&](std::future> result) { + {[&](std::future> result) { try { promise.set_value(result.get()); } catch (...) { @@ -317,22 +299,21 @@ ref Store::queryPathInfo(const Path & storePath) } -void Store::queryPathInfo(const Path & storePath, - Callback> callback) +void Store::queryPathInfo(const StorePath & storePath, + Callback> callback) noexcept { - assertStorePath(storePath); - - auto hashPart = storePathToHash(storePath); + std::string hashPart; try { + hashPart = storePathToHash(printStorePath(storePath)); { auto res = state.lock()->pathInfoCache.get(hashPart); if (res) { stats.narInfoReadAverted++; if (!*res) - throw InvalidPath(format("path '%s' is not valid") % storePath); - return callback(ref(*res)); + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + return callback(ref(*res)); } } @@ -345,17 +326,19 @@ void Store::queryPathInfo(const Path & storePath, state_->pathInfoCache.upsert(hashPart, res.first == NarInfoDiskCache::oInvalid ? 0 : res.second); if (res.first == NarInfoDiskCache::oInvalid || - (res.second->path != storePath && storePathToName(storePath) != "")) - throw InvalidPath(format("path '%s' is not valid") % storePath); + res.second->path != storePath) + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } - return callback(ref(res.second)); + return callback(ref(res.second)); } } } catch (...) { return callback.rethrow(); } + auto callbackPtr = std::make_shared(std::move(callback)); + queryPathInfoUncached(storePath, - {[this, storePath, hashPart, callback](std::future> fut) { + {[this, storePath{printStorePath(storePath)}, hashPart, callbackPtr](std::future> fut) { try { auto info = fut.get(); @@ -368,40 +351,38 @@ void Store::queryPathInfo(const Path & storePath, state_->pathInfoCache.upsert(hashPart, info); } - if (!info - || (info->path != storePath && storePathToName(storePath) != "")) - { + if (!info || info->path != parseStorePath(storePath)) { stats.narInfoMissing++; throw InvalidPath("path '%s' is not valid", storePath); } - callback(ref(info)); - } catch (...) { callback.rethrow(); } + (*callbackPtr)(ref(info)); + } catch (...) { callbackPtr->rethrow(); } }}); } -PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) +StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { struct State { size_t left; - PathSet valid; + StorePathSet valid; std::exception_ptr exc; }; - Sync state_(State{paths.size(), PathSet()}); + Sync state_(State{paths.size(), StorePathSet()}); std::condition_variable wakeup; ThreadPool pool; - auto doQuery = [&](const Path & path ) { + auto doQuery = [&](const Path & path) { checkInterrupt(); - queryPathInfo(path, {[path, &state_, &wakeup](std::future> fut) { + queryPathInfo(parseStorePath(path), {[path, this, &state_, &wakeup](std::future> fut) { auto state(state_.lock()); try { auto info = fut.get(); - state->valid.insert(path); + state->valid.insert(parseStorePath(path)); } catch (InvalidPath &) { } catch (...) { state->exc = std::current_exception(); @@ -413,7 +394,7 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti }; for (auto & path : paths) - pool.enqueue(std::bind(doQuery, path)); + pool.enqueue(std::bind(doQuery, printStorePath(path))); // FIXME pool.process(); @@ -421,7 +402,7 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti auto state(state_.lock()); if (!state->left) { if (state->exc) std::rethrow_exception(state->exc); - return state->valid; + return std::move(state->valid); } state.wait(wakeup); } @@ -431,13 +412,13 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti /* Return a string accepted by decodeValidPathInfo() that registers the specified paths as valid. Note: it's the responsibility of the caller to provide a closure. */ -string Store::makeValidityRegistration(const PathSet & paths, +string Store::makeValidityRegistration(const StorePathSet & paths, bool showDerivers, bool showHash) { string s = ""; for (auto & i : paths) { - s += i + "\n"; + s += printStorePath(i) + "\n"; auto info = queryPathInfo(i); @@ -446,31 +427,30 @@ string Store::makeValidityRegistration(const PathSet & paths, s += (format("%1%\n") % info->narSize).str(); } - Path deriver = showDerivers ? info->deriver : ""; + auto deriver = showDerivers && info->deriver ? printStorePath(*info->deriver) : ""; s += deriver + "\n"; s += (format("%1%\n") % info->references.size()).str(); for (auto & j : info->references) - s += j + "\n"; + s += printStorePath(j) + "\n"; } return s; } -void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths, +void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths, bool includeImpureInfo, bool showClosureSize, AllowInvalidFlag allowInvalid) { auto jsonList = jsonOut.list(); - for (auto storePath : storePaths) { + for (auto & storePath : storePaths) { auto jsonPath = jsonList.object(); - jsonPath.attr("path", storePath); + jsonPath.attr("path", printStorePath(storePath)); try { auto info = queryPathInfo(storePath); - storePath = info->path; jsonPath .attr("narHash", info->narHash.to_string()) @@ -479,7 +459,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths { auto jsonRefs = jsonPath.list("references"); for (auto & ref : info->references) - jsonRefs.elem(ref); + jsonRefs.elem(printStorePath(ref)); } if (info->ca != "") @@ -488,14 +468,14 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths std::pair closureSizes; if (showClosureSize) { - closureSizes = getClosureSize(storePath); + closureSizes = getClosureSize(info->path); jsonPath.attr("closureSize", closureSizes.first); } if (includeImpureInfo) { - if (info->deriver != "") - jsonPath.attr("deriver", info->deriver); + if (info->deriver) + jsonPath.attr("deriver", printStorePath(*info->deriver)); if (info->registrationTime) jsonPath.attr("registrationTime", info->registrationTime); @@ -531,10 +511,10 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths } -std::pair Store::getClosureSize(const Path & storePath) +std::pair Store::getClosureSize(const StorePath & storePath) { uint64_t totalNarSize = 0, totalDownloadSize = 0; - PathSet closure; + StorePathSet closure; computeFSClosure(storePath, closure, false, false); for (auto & p : closure) { auto info = queryPathInfo(p); @@ -558,30 +538,34 @@ const Store::Stats & Store::getStats() } -void Store::buildPaths(const PathSet & paths, BuildMode buildMode) +void Store::buildPaths(const std::vector & paths, BuildMode buildMode) { - for (auto & path : paths) - if (isDerivation(path)) - unsupported("buildPaths"); + StorePathSet paths2; - if (queryValidPaths(paths).size() != paths.size()) + for (auto & path : paths) { + if (path.path.isDerivation()) + unsupported("buildPaths"); + paths2.insert(path.path.clone()); + } + + if (queryValidPaths(paths2).size() != paths2.size()) unsupported("buildPaths"); } void copyStorePath(ref srcStore, ref dstStore, - const Path & storePath, RepairFlag repair, CheckSigsFlag checkSigs) + const StorePath & storePath, RepairFlag repair, CheckSigsFlag checkSigs) { auto srcUri = srcStore->getUri(); auto dstUri = dstStore->getUri(); Activity act(*logger, lvlInfo, actCopyPath, srcUri == "local" || srcUri == "daemon" - ? fmt("copying path '%s' to '%s'", storePath, dstUri) + ? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri) : dstUri == "local" || dstUri == "daemon" - ? fmt("copying path '%s' from '%s'", storePath, srcUri) - : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri), - {storePath, srcUri, dstUri}); + ? fmt("copying path '%s' from '%s'", srcStore->printStorePath(storePath), srcUri) + : fmt("copying path '%s' from '%s' to '%s'", srcStore->printStorePath(storePath), srcUri, dstUri), + {srcStore->printStorePath(storePath), srcUri, dstUri}); PushActivity pact(act.id); auto info = srcStore->queryPathInfo(storePath); @@ -614,23 +598,23 @@ void copyStorePath(ref srcStore, ref dstStore, total += len; act.progress(total, info->narSize); }); - srcStore->narFromPath({storePath}, wrapperSink); + srcStore->narFromPath(storePath, wrapperSink); }, [&]() { - throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); + throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri()); }); dstStore->addToStore(*info, *source, repair, checkSigs); } -void copyPaths(ref srcStore, ref dstStore, const PathSet & storePaths, +void copyPaths(ref srcStore, ref dstStore, const StorePathSet & storePaths, RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute) { - PathSet valid = dstStore->queryValidPaths(storePaths, substitute); + auto valid = dstStore->queryValidPaths(storePaths, substitute); PathSet missing; for (auto & path : storePaths) - if (!valid.count(path)) missing.insert(path); + if (!valid.count(path)) missing.insert(srcStore->printStorePath(path)); if (missing.empty()) return; @@ -651,23 +635,25 @@ void copyPaths(ref srcStore, ref dstStore, const PathSet & storePa PathSet(missing.begin(), missing.end()), [&](const Path & storePath) { - if (dstStore->isValidPath(storePath)) { + if (dstStore->isValidPath(dstStore->parseStorePath(storePath))) { nrDone++; showProgress(); return PathSet(); } - auto info = srcStore->queryPathInfo(storePath); + auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath)); bytesExpected += info->narSize; act.setExpected(actCopyPath, bytesExpected); - return info->references; + return srcStore->printStorePathSet(info->references); }, - [&](const Path & storePath) { + [&](const Path & storePathS) { checkInterrupt(); + auto storePath = dstStore->parseStorePath(storePathS); + if (!dstStore->isValidPath(storePath)) { MaintainCount mc(nrRunning); showProgress(); @@ -677,7 +663,7 @@ void copyPaths(ref srcStore, ref dstStore, const PathSet & storePa nrFailed++; if (!settings.keepGoing) throw e; - logger->log(lvlError, format("could not copy %s: %s") % storePath % e.what()); + logger->log(lvlError, fmt("could not copy %s: %s", storePathS, e.what())); showProgress(); return; } @@ -690,20 +676,36 @@ void copyPaths(ref srcStore, ref dstStore, const PathSet & storePa void copyClosure(ref srcStore, ref dstStore, - const PathSet & storePaths, RepairFlag repair, CheckSigsFlag checkSigs, + const StorePathSet & storePaths, RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute) { - PathSet closure; - srcStore->computeFSClosure({storePaths}, closure); + StorePathSet closure; + srcStore->computeFSClosure(storePaths, closure); copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute); } -ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven) +ValidPathInfo::ValidPathInfo(const ValidPathInfo & other) + : path(other.path.clone()) + , deriver(other.deriver ? other.deriver->clone(): std::optional{}) + , narHash(other.narHash) + , references(cloneStorePathSet(other.references)) + , registrationTime(other.registrationTime) + , narSize(other.narSize) + , id(other.id) + , ultimate(other.ultimate) + , sigs(other.sigs) + , ca(other.ca) { - ValidPathInfo info; - getline(str, info.path); - if (str.eof()) { info.path = ""; return info; } +} + + +std::optional decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven) +{ + std::string path; + getline(str, path); + if (str.eof()) { return {}; } + ValidPathInfo info(store.parseStorePath(path)); if (hashGiven) { string s; getline(str, s); @@ -711,16 +713,29 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven) getline(str, s); if (!string2Int(s, info.narSize)) throw Error("number expected"); } - getline(str, info.deriver); + std::string deriver; + getline(str, deriver); + if (deriver != "") info.deriver = store.parseStorePath(deriver); string s; int n; getline(str, s); if (!string2Int(s, n)) throw Error("number expected"); while (n--) { getline(str, s); - info.references.insert(s); + info.references.insert(store.parseStorePath(s)); } if (!str || str.eof()) throw Error("missing input"); - return info; + return std::optional(std::move(info)); +} + + +std::string Store::showPaths(const StorePathSet & paths) +{ + std::string s; + for (auto & i : paths) { + if (s.size() != 0) s += ", "; + s += "'" + printStorePath(i) + "'"; + } + return s; } @@ -735,34 +750,34 @@ string showPaths(const PathSet & paths) } -std::string ValidPathInfo::fingerprint() const +std::string ValidPathInfo::fingerprint(const Store & store) const { if (narSize == 0 || !narHash) - throw Error(format("cannot calculate fingerprint of path '%s' because its size/hash is not known") - % path); + throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known", + store.printStorePath(path)); return - "1;" + path + ";" + "1;" + store.printStorePath(path) + ";" + narHash.to_string(Base32) + ";" + std::to_string(narSize) + ";" - + concatStringsSep(",", references); + + concatStringsSep(",", store.printStorePathSet(references)); } -void ValidPathInfo::sign(const SecretKey & secretKey) +void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey) { - sigs.insert(secretKey.signDetached(fingerprint())); + sigs.insert(secretKey.signDetached(fingerprint(store))); } bool ValidPathInfo::isContentAddressed(const Store & store) const { auto warn = [&]() { - printError(format("warning: path '%s' claims to be content-addressed but isn't") % path); + printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path)); }; if (hasPrefix(ca, "text:")) { Hash hash(std::string(ca, 5)); - if (store.makeTextPath(storePathToName(path), hash, references) == path) + if (store.makeTextPath(path.name(), hash, references) == path) return true; else warn(); @@ -771,8 +786,13 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const else if (hasPrefix(ca, "fixed:")) { bool recursive = ca.compare(6, 2, "r:") == 0; Hash hash(std::string(ca, recursive ? 8 : 6)); - if (references.empty() && - store.makeFixedOutputPath(recursive, hash, storePathToName(path)) == path) + auto refs = cloneStorePathSet(references); + bool hasSelfReference = false; + if (refs.count(path)) { + hasSelfReference = true; + refs.erase(path); + } + if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path) return true; else warn(); @@ -788,15 +808,15 @@ size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & pu size_t good = 0; for (auto & sig : sigs) - if (checkSignature(publicKeys, sig)) + if (checkSignature(store, publicKeys, sig)) good++; return good; } -bool ValidPathInfo::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const +bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const { - return verifyDetached(fingerprint(), sig, publicKeys); + return verifyDetached(fingerprint(store), sig, publicKeys); } @@ -804,7 +824,7 @@ Strings ValidPathInfo::shortRefs() const { Strings refs; for (auto & r : references) - refs.push_back(baseNameOf(r)); + refs.push_back(std::string(r.to_string())); return refs; } @@ -917,7 +937,7 @@ static RegisterStoreImplementation regStore([]( const std::string & uri, const Store::Params & params) -> std::shared_ptr { - switch (getStoreType(uri, get(params, "state", settings.nixStateDir))) { + switch (getStoreType(uri, get(params, "state").value_or(settings.nixStateDir))) { case tDaemon: return std::shared_ptr(std::make_shared(params)); case tLocal: { @@ -940,8 +960,7 @@ std::list> getDefaultSubstituters() StringSet done; auto addStore = [&](const std::string & uri) { - if (done.count(uri)) return; - done.insert(uri); + if (!done.insert(uri).second) return; try { stores.push_back(openStore(uri)); } catch (Error & e) { @@ -956,7 +975,7 @@ std::list> getDefaultSubstituters() addStore(uri); stores.sort([](ref & a, ref & b) { - return a->getPriority() < b->getPriority(); + return a->priority < b->priority; }); return stores; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 7a1b31d0f..861b96930 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -1,5 +1,6 @@ #pragma once +#include "path.hh" #include "hash.hh" #include "serialise.hh" #include "crypto.hh" @@ -20,12 +21,12 @@ namespace nix { -MakeError(SubstError, Error) -MakeError(BuildError, Error) /* denotes a permanent build failure */ -MakeError(InvalidPath, Error) -MakeError(Unsupported, Error) -MakeError(SubstituteGone, Error) -MakeError(SubstituterDisabled, Error) +MakeError(SubstError, Error); +MakeError(BuildError, Error); // denotes a permanent build failure +MakeError(InvalidPath, Error); +MakeError(Unsupported, Error); +MakeError(SubstituteGone, Error); +MakeError(SubstituterDisabled, Error); struct BasicDerivation; @@ -42,14 +43,11 @@ enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true }; -/* Size of the hash part of store paths, in base-32 characters. */ -const size_t storePathHashLen = 32; // i.e. 160 bits - /* Magic header of exportPath() output (obsolete). */ const uint32_t exportMagic = 0x4558494e; -typedef std::unordered_map> Roots; +typedef std::unordered_map> Roots; struct GCOptions @@ -83,7 +81,7 @@ struct GCOptions bool ignoreLiveness{false}; /* For `gcDeleteSpecific', the paths to delete. */ - PathSet pathsToDelete; + StorePathSet pathsToDelete; /* Stop after at least `maxFreed' bytes have been freed. */ unsigned long long maxFreed{std::numeric_limits::max()}; @@ -104,21 +102,21 @@ struct GCResults struct SubstitutablePathInfo { - Path deriver; - PathSet references; + std::optional deriver; + StorePathSet references; unsigned long long downloadSize; /* 0 = unknown or inapplicable */ unsigned long long narSize; /* 0 = unknown */ }; -typedef std::map SubstitutablePathInfos; +typedef std::map SubstitutablePathInfos; struct ValidPathInfo { - Path path; - Path deriver; + StorePath path; + std::optional deriver; Hash narHash; - PathSet references; + StorePathSet references; time_t registrationTime = 0; uint64_t narSize = 0; // 0 = unknown uint64_t id; // internal use only @@ -143,7 +141,7 @@ struct ValidPathInfo Ideally, the content-addressability assertion would just be a Boolean, and the store path would be computed from - ‘storePathToName(path)’, ‘narHash’ and ‘references’. However, + the name component, ‘narHash’ and ‘references’. However, 1) we've accumulated several types of content-addressed paths over the years; and 2) fixed-output derivations support multiple hash algorithms and serialisation methods (flat file @@ -171,9 +169,9 @@ struct ValidPathInfo the NAR, and the sorted references. The size field is strictly speaking superfluous, but might prevent endless/excessive data attacks. */ - std::string fingerprint() const; + std::string fingerprint(const Store & store) const; - void sign(const SecretKey & secretKey); + void sign(const Store & store, const SecretKey & secretKey); /* Return true iff the path is verifiably content-addressed. */ bool isContentAddressed(const Store & store) const; @@ -186,10 +184,13 @@ struct ValidPathInfo size_t checkSignatures(const Store & store, const PublicKeys & publicKeys) const; /* Verify a single signature. */ - bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; + bool checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const; Strings shortRefs() const; + ValidPathInfo(StorePath && path) : path(std::move(path)) { } + explicit ValidPathInfo(const ValidPathInfo & other); + virtual ~ValidPathInfo() { } }; @@ -254,11 +255,16 @@ public: const Setting isTrusted{this, false, "trusted", "whether paths from this store can be used as substitutes even when they lack trusted signatures"}; + Setting priority{this, 0, "priority", "priority of this substituter (lower value means higher priority)"}; + + Setting wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"}; + protected: struct State { - LRUCache> pathInfoCache; + // FIXME: fix key + LRUCache> pathInfoCache; }; Sync state; @@ -273,6 +279,24 @@ public: virtual std::string getUri() = 0; + StorePath parseStorePath(std::string_view path) const; + + std::string printStorePath(const StorePath & path) const; + + // FIXME: remove + StorePathSet parseStorePathSet(const PathSet & paths) const; + + PathSet printStorePathSet(const StorePathSet & path) const; + + /* Split a string specifying a derivation and a set of outputs + (/nix/store/hash-foo!out1,out2,...) into the derivation path + and the outputs. */ + StorePathWithOutputs parsePathWithOutputs(const string & s); + + /* Display a set of paths in human-readable form (i.e., between quotes + and separated by commas). */ + std::string showPaths(const StorePathSet & paths); + /* Return true if ‘path’ is in the Nix store (but not the Nix store itself). */ bool isInStore(const Path & path) const; @@ -281,37 +305,38 @@ public: the Nix store. */ bool isStorePath(const Path & path) const; - /* Throw an exception if ‘path’ is not a store path. */ - void assertStorePath(const Path & path) const; - /* Chop off the parts after the top-level store name, e.g., /nix/store/abcd-foo/bar => /nix/store/abcd-foo. */ Path toStorePath(const Path & path) const; /* Follow symlinks until we end up with a path in the Nix store. */ - Path followLinksToStore(const Path & path) const; + Path followLinksToStore(std::string_view path) const; /* Same as followLinksToStore(), but apply toStorePath() to the result. */ - Path followLinksToStorePath(const Path & path) const; + StorePath followLinksToStorePath(std::string_view path) const; + + StorePathWithOutputs followLinksToStorePathWithOutputs(std::string_view path) const; /* Constructs a unique store path name. */ - Path makeStorePath(const string & type, - const Hash & hash, const string & name) const; + StorePath makeStorePath(const string & type, + const Hash & hash, std::string_view name) const; - Path makeOutputPath(const string & id, - const Hash & hash, const string & name) const; + StorePath makeOutputPath(const string & id, + const Hash & hash, std::string_view name) const; - Path makeFixedOutputPath(bool recursive, - const Hash & hash, const string & name) const; + StorePath makeFixedOutputPath(bool recursive, + const Hash & hash, std::string_view name, + const StorePathSet & references = {}, + bool hasSelfReference = false) const; - Path makeTextPath(const string & name, const Hash & hash, - const PathSet & references) const; + StorePath makeTextPath(std::string_view name, const Hash & hash, + const StorePathSet & references) const; /* This is the preparatory part of addToStore(); it computes the store path to which srcPath is to be copied. Returns the store path and the cryptographic hash of the contents of srcPath. */ - std::pair computeStorePathForPath(const string & name, + std::pair computeStorePathForPath(std::string_view name, const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; @@ -329,21 +354,21 @@ public: simply yield a different store path, so other users wouldn't be affected), but it has some backwards compatibility issues (the hashing scheme changes), so I'm not doing that for now. */ - Path computeStorePathForText(const string & name, const string & s, - const PathSet & references) const; + StorePath computeStorePathForText(const string & name, const string & s, + const StorePathSet & references) const; /* Check whether a path is valid. */ - bool isValidPath(const Path & path); + bool isValidPath(const StorePath & path); protected: - virtual bool isValidPathUncached(const Path & path); + virtual bool isValidPathUncached(const StorePath & path); public: /* Query which of the given paths is valid. Optionally, try to substitute missing paths. */ - virtual PathSet queryValidPaths(const PathSet & paths, + virtual StorePathSet queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute = NoSubstitute); /* Query the set of all valid paths. Note that for some store @@ -351,58 +376,56 @@ public: (i.e. you'll get /nix/store/ rather than /nix/store/-). Use queryPathInfo() to obtain the full store path. */ - virtual PathSet queryAllValidPaths() + virtual StorePathSet queryAllValidPaths() { unsupported("queryAllValidPaths"); } /* Query information about a valid path. It is permitted to omit the name part of the store path. */ - ref queryPathInfo(const Path & path); + ref queryPathInfo(const StorePath & path); /* Asynchronous version of queryPathInfo(). */ - void queryPathInfo(const Path & path, - Callback> callback); + void queryPathInfo(const StorePath & path, + Callback> callback) noexcept; protected: - virtual void queryPathInfoUncached(const Path & path, - Callback> callback) = 0; + virtual void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept = 0; public: /* Queries the set of incoming FS references for a store path. The result is not cleared. */ - virtual void queryReferrers(const Path & path, PathSet & referrers) + virtual void queryReferrers(const StorePath & path, StorePathSet & referrers) { unsupported("queryReferrers"); } /* Return all currently valid derivations that have `path' as an output. (Note that the result of `queryDeriver()' is the derivation that was actually used to produce `path', which may not exist anymore.) */ - virtual PathSet queryValidDerivers(const Path & path) { return {}; }; + virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; }; /* Query the outputs of the derivation denoted by `path'. */ - virtual PathSet queryDerivationOutputs(const Path & path) + virtual StorePathSet queryDerivationOutputs(const StorePath & path) { unsupported("queryDerivationOutputs"); } /* Query the output names of the derivation denoted by `path'. */ - virtual StringSet queryDerivationOutputNames(const Path & path) + virtual StringSet queryDerivationOutputNames(const StorePath & path) { unsupported("queryDerivationOutputNames"); } /* Query the full store path given the hash part of a valid store - path, or "" if the path doesn't exist. */ - virtual Path queryPathFromHashPart(const string & hashPart) = 0; + path, or empty if the path doesn't exist. */ + virtual std::optional queryPathFromHashPart(const std::string & hashPart) = 0; /* Query which of the given paths have substitutes. */ - virtual PathSet querySubstitutablePaths(const PathSet & paths) { return {}; }; + virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; }; /* Query substitute info (i.e. references, derivers and download sizes) of a set of paths. If a path does not have substitute info, it's omitted from the resulting ‘infos’ map. */ - virtual void querySubstitutablePathInfos(const PathSet & paths, + virtual void querySubstitutablePathInfos(const StorePathSet & paths, SubstitutablePathInfos & infos) { return; }; - virtual bool wantMassQuery() { return false; } - /* Import a path into the store. */ virtual void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, @@ -417,17 +440,24 @@ public: validity the resulting path. The resulting path is returned. The function object `filter' can be used to exclude files (see libutil/archive.hh). */ - virtual Path addToStore(const string & name, const Path & srcPath, + virtual StorePath addToStore(const string & name, const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0; + // FIXME: remove? + virtual StorePath addToStoreFromDump(const string & dump, const string & name, + bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) + { + throw Error("addToStoreFromDump() is not supported by this store"); + } + /* Like addToStore, but the contents written to the output path is a regular file containing the given string. */ - virtual Path addTextToStore(const string & name, const string & s, - const PathSet & references, RepairFlag repair = NoRepair) = 0; + virtual StorePath addTextToStore(const string & name, const string & s, + const StorePathSet & references, RepairFlag repair = NoRepair) = 0; /* Write a NAR dump of a store path. */ - virtual void narFromPath(const Path & path, Sink & sink) = 0; + virtual void narFromPath(const StorePath & path, Sink & sink) = 0; /* For each path, if it's a derivation, build it. Building a derivation means ensuring that the output paths are valid. If @@ -437,22 +467,24 @@ public: output paths can be created by running the builder, after recursively building any sub-derivations. For inputs that are not derivations, substitute them. */ - virtual void buildPaths(const PathSet & paths, BuildMode buildMode = bmNormal); + virtual void buildPaths( + const std::vector & paths, + BuildMode buildMode = bmNormal); /* Build a single non-materialized derivation (i.e. not from an on-disk .drv file). Note that ‘drvPath’ is only used for informational purposes. */ - virtual BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode = bmNormal) = 0; /* Ensure that a path is valid. If it is not currently valid, it may be made valid by running a substitute (if defined for the path). */ - virtual void ensurePath(const Path & path) = 0; + virtual void ensurePath(const StorePath & path) = 0; /* Add a store path as a temporary root of the garbage collector. The root disappears as soon as we exit. */ - virtual void addTempRoot(const Path & path) + virtual void addTempRoot(const StorePath & path) { unsupported("addTempRoot"); } /* Add an indirect root, which is merely a symlink to `path' from @@ -498,7 +530,7 @@ public: /* Return a string representing information about the path that can be loaded into the database using `nix-store --load-db' or `nix-store --register-validity'. */ - string makeValidityRegistration(const PathSet & paths, + string makeValidityRegistration(const StorePathSet & paths, bool showDerivers, bool showHash); /* Write a JSON representation of store path metadata, such as the @@ -506,14 +538,14 @@ public: variable elements such as the registration time are included. If ‘showClosureSize’ is true, the closure size of each path is included. */ - void pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths, + void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths, bool includeImpureInfo, bool showClosureSize, AllowInvalidFlag allowInvalid = DisallowInvalid); /* Return the size of the closure of the specified path, that is, the sum of the size of the NAR serialisation of each path in the closure. */ - std::pair getClosureSize(const Path & storePath); + std::pair getClosureSize(const StorePath & storePath); /* Optimise the disk space usage of the Nix store by hard-linking files with the same contents. */ @@ -529,14 +561,14 @@ public: /* Add signatures to the specified store path. The signatures are not verified. */ - virtual void addSignatures(const Path & storePath, const StringSet & sigs) + virtual void addSignatures(const StorePath & storePath, const StringSet & sigs) { unsupported("addSignatures"); } /* Utility functions. */ /* Read a derivation, after ensuring its existence through ensurePath(). */ - Derivation derivationFromPath(const Path & drvPath); + Derivation derivationFromPath(const StorePath & drvPath); /* Place in `out' the set of all store paths in the file system closure of `storePath'; that is, all paths than can be directly @@ -545,36 +577,36 @@ public: `storePath' is returned; that is, the closures under the `referrers' relation instead of the `references' relation is returned. */ - virtual void computeFSClosure(const PathSet & paths, - PathSet & out, bool flipDirection = false, + virtual void computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false); - void computeFSClosure(const Path & path, - PathSet & out, bool flipDirection = false, + void computeFSClosure(const StorePath & path, + StorePathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false); /* Given a set of paths that are to be built, return the set of derivations that will be built, and the set of output paths that will be substituted. */ - virtual void queryMissing(const PathSet & targets, - PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, + virtual void queryMissing(const std::vector & targets, + StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, unsigned long long & downloadSize, unsigned long long & narSize); /* Sort a set of paths topologically under the references - relation. If p refers to q, then p preceeds q in this list. */ - Paths topoSortPaths(const PathSet & paths); + relation. If p refers to q, then p precedes q in this list. */ + StorePaths topoSortPaths(const StorePathSet & paths); /* Export multiple paths in the format expected by ‘nix-store --import’. */ - void exportPaths(const Paths & paths, Sink & sink); + void exportPaths(const StorePathSet & paths, Sink & sink); - void exportPath(const Path & path, Sink & sink); + void exportPath(const StorePath & path, Sink & sink); /* Import a sequence of NAR dumps created by exportPaths() into the Nix store. Optionally, the contents of the NARs are preloaded into the specified FS accessor to speed up subsequent access. */ - Paths importPaths(Source & source, std::shared_ptr accessor, + StorePaths importPaths(Source & source, std::shared_ptr accessor, CheckSigsFlag checkSigs = CheckSigs); struct Stats @@ -598,7 +630,7 @@ public: /* Return the build log of the specified store path, if available, or null otherwise. */ - virtual std::shared_ptr getBuildLog(const Path & path) + virtual std::shared_ptr getBuildLog(const StorePath & path) { return nullptr; } /* Hack to allow long-running processes like hydra-queue-runner to @@ -618,16 +650,14 @@ public: return 0; }; - /* Get the priority of the store, used to order substituters. In - particular, binary caches can specify a priority field in their - "nix-cache-info" file. Lower value means higher priority. */ - virtual int getPriority() { return 0; } - virtual Path toRealPath(const Path & storePath) { return storePath; } + virtual void createUser(const std::string & userName, uid_t userId) + { } + protected: Stats stats; @@ -646,7 +676,7 @@ class LocalFSStore : public virtual Store public: // FIXME: the (Store*) cast works around a bug in gcc that causes - // it to emit the call to the Option constructor. Clang works fine + // it to omit the call to the Setting constructor. Clang works fine // either way. const PathSetting rootDir{(Store*) this, true, "", "root", "directory prefixed to all other paths"}; @@ -661,11 +691,11 @@ public: LocalFSStore(const Params & params); - void narFromPath(const Path & path, Sink & sink) override; + void narFromPath(const StorePath & path, Sink & sink) override; ref getFSAccessor() override; /* Register a permanent GC root. */ - Path addPermRoot(const Path & storePath, + Path addPermRoot(const StorePath & storePath, const Path & gcRoot, bool indirect, bool allowOutsideRootsDir = false); virtual Path getRealStoreDir() { return storeDir; } @@ -676,25 +706,17 @@ public: return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1); } - std::shared_ptr getBuildLog(const Path & path) override; + std::shared_ptr getBuildLog(const StorePath & path) override; }; -/* Extract the name part of the given store path. */ -string storePathToName(const Path & path); - /* Extract the hash part of the given store path. */ string storePathToHash(const Path & path); -/* Check whether ‘name’ is a valid store path name part, i.e. contains - only the characters [a-zA-Z0-9\+\-\.\_\?\=] and doesn't start with - a dot. */ -void checkStoreName(const string & name); - /* Copy a path from one store to another. */ void copyStorePath(ref srcStore, ref dstStore, - const Path & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); + const StorePath & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); /* Copy store paths from one store to another. The paths may be copied @@ -702,7 +724,7 @@ void copyStorePath(ref srcStore, ref dstStore, (i.e. if A is a reference of B, then A is copied before B), but the set of store paths is not automatically closed; use copyClosure() for that. */ -void copyPaths(ref srcStore, ref dstStore, const PathSet & storePaths, +void copyPaths(ref srcStore, ref dstStore, const StorePathSet & storePaths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); @@ -710,7 +732,7 @@ void copyPaths(ref srcStore, ref dstStore, const PathSet & storePa /* Copy the closure of the specified paths from one store to another. */ void copyClosure(ref srcStore, ref dstStore, - const PathSet & storePaths, + const StorePathSet & storePaths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); @@ -766,8 +788,7 @@ StoreType getStoreType(const std::string & uri = settings.storeUri.get(), const std::string & stateDir = settings.nixStateDir); /* Return the default substituter stores, defined by the - ‘substituters’ option and various legacy options like - ‘binary-caches’. */ + ‘substituters’ option and various legacy options. */ std::list> getDefaultSubstituters(); @@ -794,7 +815,9 @@ struct RegisterStoreImplementation string showPaths(const PathSet & paths); -ValidPathInfo decodeValidPathInfo(std::istream & str, +std::optional decodeValidPathInfo( + const Store & store, + std::istream & str, bool hashGiven = false); diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 5ebdfaf13..857d54d99 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -62,8 +62,12 @@ typedef enum { #define STDERR_RESULT 0x52534c54 -Path readStorePath(Store & store, Source & from); -template T readStorePaths(Store & store, Source & from); +class Store; +struct Source; + +template T readStorePaths(const Store & store, Source & from); + +void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths); } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 3aa120270..db544a212 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -375,4 +375,13 @@ void copyNAR(Source & source, Sink & sink) } +void copyPath(const Path & from, const Path & to) +{ + auto source = sinkToSource([&](Sink & sink) { + dumpPath(from, sink); + }); + restorePath(to, *source); +} + + } diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 25be426c1..768fe2536 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -77,6 +77,8 @@ void restorePath(const Path & path, Source & source); /* Read a NAR from 'source' and write it to 'sink'. */ void copyNAR(Source & source, Sink & sink); +void copyPath(const Path & from, const Path & to); + extern const std::string narVersionMagic1; diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 7af2a1bf7..ba15ea571 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -200,4 +200,74 @@ void printTable(std::ostream & out, const Table2 & table) } } +void Command::printHelp(const string & programName, std::ostream & out) +{ + Args::printHelp(programName, out); + + auto exs = examples(); + if (!exs.empty()) { + out << "\n"; + out << "Examples:\n"; + for (auto & ex : exs) + out << "\n" + << " " << ex.description << "\n" // FIXME: wrap + << " $ " << ex.command << "\n"; + } +} + +MultiCommand::MultiCommand(const Commands & commands) + : commands(commands) +{ + expectedArgs.push_back(ExpectedArg{"command", 1, true, [=](std::vector ss) { + assert(!command); + auto i = commands.find(ss[0]); + if (i == commands.end()) + throw UsageError("'%s' is not a recognised command", ss[0]); + command = i->second(); + command->_name = ss[0]; + }}); +} + +void MultiCommand::printHelp(const string & programName, std::ostream & out) +{ + if (command) { + command->printHelp(programName + " " + command->name(), out); + return; + } + + out << "Usage: " << programName << " ... ...\n"; + + out << "\n"; + out << "Common flags:\n"; + printFlags(out); + + out << "\n"; + out << "Available commands:\n"; + + Table2 table; + for (auto & i : commands) { + auto command = i.second(); + command->_name = i.first; + auto descr = command->description(); + if (!descr.empty()) + table.push_back(std::make_pair(command->name(), descr)); + } + printTable(out, table); +} + +bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end) +{ + if (Args::processFlag(pos, end)) return true; + if (command && command->processFlag(pos, end)) return true; + return false; +} + +bool MultiCommand::processArgs(const Strings & args, bool finish) +{ + if (command) + return command->processArgs(args, finish); + else + return Args::processArgs(args, finish); +} + } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index ad5fcca39..967efbe1c 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -70,23 +70,23 @@ public: Args & args; Flag::ptr flag; friend class Args; - FlagMaker(Args & args) : args(args), flag(std::make_shared()) { }; + FlagMaker(Args & args) : args(args), flag(std::make_shared()) { } public: ~FlagMaker(); - FlagMaker & longName(const std::string & s) { flag->longName = s; return *this; }; - FlagMaker & shortName(char s) { flag->shortName = s; return *this; }; - FlagMaker & description(const std::string & s) { flag->description = s; return *this; }; - FlagMaker & label(const std::string & l) { flag->arity = 1; flag->labels = {l}; return *this; }; - FlagMaker & labels(const Strings & ls) { flag->arity = ls.size(); flag->labels = ls; return *this; }; - FlagMaker & arity(size_t arity) { flag->arity = arity; return *this; }; - FlagMaker & handler(std::function)> handler) { flag->handler = handler; return *this; }; - FlagMaker & handler(std::function handler) { flag->handler = [handler](std::vector) { handler(); }; return *this; }; + FlagMaker & longName(const std::string & s) { flag->longName = s; return *this; } + FlagMaker & shortName(char s) { flag->shortName = s; return *this; } + FlagMaker & description(const std::string & s) { flag->description = s; return *this; } + FlagMaker & label(const std::string & l) { flag->arity = 1; flag->labels = {l}; return *this; } + FlagMaker & labels(const Strings & ls) { flag->arity = ls.size(); flag->labels = ls; return *this; } + FlagMaker & arity(size_t arity) { flag->arity = arity; return *this; } + FlagMaker & handler(std::function)> handler) { flag->handler = handler; return *this; } + FlagMaker & handler(std::function handler) { flag->handler = [handler](std::vector) { handler(); }; return *this; } FlagMaker & handler(std::function handler) { flag->arity = 1; flag->handler = [handler](std::vector ss) { handler(std::move(ss[0])); }; return *this; - }; - FlagMaker & category(const std::string & s) { flag->category = s; return *this; }; + } + FlagMaker & category(const std::string & s) { flag->category = s; return *this; } template FlagMaker & dest(T * dest) @@ -94,7 +94,7 @@ public: flag->arity = 1; flag->handler = [=](std::vector ss) { *dest = ss[0]; }; return *this; - }; + } template FlagMaker & set(T * dest, const T & val) @@ -102,7 +102,7 @@ public: flag->arity = 0; flag->handler = [=](std::vector ss) { *dest = val; }; return *this; - }; + } FlagMaker & mkHashTypeFlag(HashType * ht); }; @@ -188,6 +188,57 @@ public: friend class MultiCommand; }; +/* A command is an argument parser that can be executed by calling its + run() method. */ +struct Command : virtual Args +{ +private: + std::string _name; + + friend class MultiCommand; + +public: + + virtual ~Command() { } + + std::string name() { return _name; } + + virtual void prepare() { }; + virtual void run() = 0; + + struct Example + { + std::string description; + std::string command; + }; + + typedef std::list Examples; + + virtual Examples examples() { return Examples(); } + + void printHelp(const string & programName, std::ostream & out) override; +}; + +typedef std::map()>> Commands; + +/* An argument parser that supports multiple subcommands, + i.e. ‘ ’. */ +class MultiCommand : virtual Args +{ +public: + Commands commands; + + std::shared_ptr command; + + MultiCommand(const Commands & commands); + + void printHelp(const string & programName, std::ostream & out) override; + + bool processFlag(Strings::iterator & pos, Strings::iterator end) override; + + bool processArgs(const Strings & args, bool finish) override; +}; + Strings argvToStrings(int argc, char * * argv); /* Helper function for rendering argument labels. */ diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 0dd84e320..860b04adb 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -11,6 +11,8 @@ #include #include +#include + #include namespace nix { @@ -42,6 +44,66 @@ struct NoneSink : CompressionSink void write(const unsigned char * data, size_t len) override { nextSink(data, len); } }; +struct GzipDecompressionSink : CompressionSink +{ + Sink & nextSink; + z_stream strm; + bool finished = false; + uint8_t outbuf[BUFSIZ]; + + GzipDecompressionSink(Sink & nextSink) : nextSink(nextSink) + { + strm.zalloc = Z_NULL; + strm.zfree = Z_NULL; + strm.opaque = Z_NULL; + strm.avail_in = 0; + strm.next_in = Z_NULL; + strm.next_out = outbuf; + strm.avail_out = sizeof(outbuf); + + // Enable gzip and zlib decoding (+32) with 15 windowBits + int ret = inflateInit2(&strm,15+32); + if (ret != Z_OK) + throw CompressionError("unable to initialise gzip encoder"); + } + + ~GzipDecompressionSink() + { + inflateEnd(&strm); + } + + void finish() override + { + CompressionSink::flush(); + write(nullptr, 0); + } + + void write(const unsigned char * data, size_t len) override + { + assert(len <= std::numeric_limits::max()); + + strm.next_in = (Bytef *) data; + strm.avail_in = len; + + while (!finished && (!data || strm.avail_in)) { + checkInterrupt(); + + int ret = inflate(&strm,Z_SYNC_FLUSH); + if (ret != Z_OK && ret != Z_STREAM_END) + throw CompressionError("error while decompressing gzip file: %d (%d, %d)", + zError(ret), len, strm.avail_in); + + finished = ret == Z_STREAM_END; + + if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - strm.avail_out); + strm.next_out = (Bytef *) outbuf; + strm.avail_out = sizeof(outbuf); + } + } + } +}; + struct XzDecompressionSink : CompressionSink { Sink & nextSink; @@ -215,6 +277,8 @@ ref makeDecompressionSink(const std::string & method, Sink & ne return make_ref(nextSink); else if (method == "bzip2") return make_ref(nextSink); + else if (method == "gzip") + return make_ref(nextSink); else if (method == "br") return make_ref(nextSink); else diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 9023cb1bb..7551d97d1 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -154,6 +154,11 @@ AbstractSetting::AbstractSetting( { } +void AbstractSetting::setDefault(const std::string & str) +{ + if (!overriden) set(str); +} + void AbstractSetting::toJSON(JSONPlaceholder & out) { out.write(to_string()); @@ -185,7 +190,7 @@ template<> void BaseSetting::set(const std::string & str) value = str; } -template<> std::string BaseSetting::to_string() +template<> std::string BaseSetting::to_string() const { return value; } @@ -199,7 +204,7 @@ void BaseSetting::set(const std::string & str) } template -std::string BaseSetting::to_string() +std::string BaseSetting::to_string() const { static_assert(std::is_integral::value, "Integer required."); return std::to_string(value); @@ -215,7 +220,7 @@ template<> void BaseSetting::set(const std::string & str) throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str); } -template<> std::string BaseSetting::to_string() +template<> std::string BaseSetting::to_string() const { return value ? "true" : "false"; } @@ -239,7 +244,7 @@ template<> void BaseSetting::set(const std::string & str) value = tokenizeString(str); } -template<> std::string BaseSetting::to_string() +template<> std::string BaseSetting::to_string() const { return concatStringsSep(" ", value); } @@ -256,7 +261,7 @@ template<> void BaseSetting::set(const std::string & str) value = tokenizeString(str); } -template<> std::string BaseSetting::to_string() +template<> std::string BaseSetting::to_string() const { return concatStringsSep(" ", value); } diff --git a/src/libutil/config.hh b/src/libutil/config.hh index d86c65ff0..7ea78fdaf 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -115,6 +115,8 @@ public: bool overriden = false; + void setDefault(const std::string & str); + protected: AbstractSetting( @@ -131,13 +133,13 @@ protected: virtual void set(const std::string & value) = 0; - virtual std::string to_string() = 0; + virtual std::string to_string() const = 0; virtual void toJSON(JSONPlaceholder & out); virtual void convertToArg(Args & args, const std::string & category); - bool isOverriden() { return overriden; } + bool isOverriden() const { return overriden; } }; /* A setting of type T. */ @@ -174,7 +176,7 @@ public: value = v; } - std::string to_string() override; + std::string to_string() const override; void convertToArg(Args & args, const std::string & category) override; diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 1c14ebb18..7caee1da7 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -256,23 +256,9 @@ Hash hashString(HashType ht, const string & s) Hash hashFile(HashType ht, const Path & path) { - Ctx ctx; - Hash hash(ht); - start(ht, ctx); - - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC); - if (!fd) throw SysError(format("opening file '%1%'") % path); - - std::vector buf(8192); - ssize_t n; - while ((n = read(fd.get(), buf.data(), buf.size()))) { - checkInterrupt(); - if (n == -1) throw SysError(format("reading file '%1%'") % path); - update(ht, ctx, buf.data(), n); - } - - finish(ht, ctx, hash.hash); - return hash; + HashSink sink(ht); + readFile(path, sink); + return sink.finish().first; } diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 2dbc3b630..ffa43ecf5 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -111,7 +111,12 @@ string printHashType(HashType ht); union Ctx; -class HashSink : public BufferedSink +struct AbstractHashSink : virtual Sink +{ + virtual HashResult finish() = 0; +}; + +class HashSink : public BufferedSink, public AbstractHashSink { private: HashType ht; @@ -122,8 +127,8 @@ public: HashSink(HashType ht); HashSink(const HashSink & h); ~HashSink(); - void write(const unsigned char * data, size_t len); - HashResult finish(); + void write(const unsigned char * data, size_t len) override; + HashResult finish() override; HashResult currentHash(); }; diff --git a/src/libutil/json.cc b/src/libutil/json.cc index 0a6fb65f0..74e37b4c4 100644 --- a/src/libutil/json.cc +++ b/src/libutil/json.cc @@ -171,4 +171,9 @@ JSONObject JSONPlaceholder::object() return JSONObject(state); } +JSONPlaceholder::~JSONPlaceholder() +{ + assert(!first || std::uncaught_exception()); +} + } diff --git a/src/libutil/json.hh b/src/libutil/json.hh index 02a39917f..83213ca66 100644 --- a/src/libutil/json.hh +++ b/src/libutil/json.hh @@ -168,10 +168,7 @@ public: { } - ~JSONPlaceholder() - { - assert(!first || std::uncaught_exception()); - } + ~JSONPlaceholder(); template void write(const T & v) diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 3ccc23fd5..16c1fa03f 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -6,4 +6,6 @@ libutil_DIR := $(d) libutil_SOURCES := $(wildcard $(d)/*.cc) -libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) -lboost_context +libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context + +libutil_LIBS = libnixrust diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 799c6e1ae..fa5c84a27 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -21,7 +21,7 @@ Logger * logger = makeDefaultLogger(); void Logger::warn(const std::string & msg) { - log(lvlInfo, ANSI_RED "warning:" ANSI_NORMAL " " + msg); + log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg); } class SimpleLogger : public Logger @@ -46,6 +46,7 @@ public: char c; switch (lvl) { case lvlError: c = '3'; break; + case lvlWarn: c = '4'; break; case lvlInfo: c = '5'; break; case lvlTalkative: case lvlChatty: c = '6'; break; default: c = '7'; diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 678703102..beb5e6b64 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -6,6 +6,7 @@ namespace nix { typedef enum { lvlError = 0, + lvlWarn, lvlInfo, lvlTalkative, lvlChatty, @@ -25,6 +26,7 @@ typedef enum { actVerifyPaths = 107, actSubstitute = 108, actQueryPathInfo = 109, + actPostBuildHook = 110, } ActivityType; typedef enum { @@ -35,6 +37,7 @@ typedef enum { resSetPhase = 104, resProgress = 105, resSetExpected = 106, + resPostBuildLogLine = 107, } ResultType; typedef uint64_t ActivityId; @@ -155,10 +158,10 @@ extern Verbosity verbosity; /* suppress msgs > this */ #define vomit(args...) printMsg(lvlVomit, args) template -inline void warn(const std::string & fs, Args... args) +inline void warn(const std::string & fs, const Args & ... args) { boost::format f(fs); - nop{boost::io::detail::feed(f, args)...}; + formatHelper(f, args...); logger->warn(f.str()); } diff --git a/src/libutil/rust-ffi.cc b/src/libutil/rust-ffi.cc new file mode 100644 index 000000000..6f36b3192 --- /dev/null +++ b/src/libutil/rust-ffi.cc @@ -0,0 +1,22 @@ +#include "logging.hh" +#include "rust-ffi.hh" + +extern "C" std::exception_ptr * make_error(rust::StringSlice s) +{ + return new std::exception_ptr(std::make_exception_ptr(nix::Error(std::string(s.ptr, s.size)))); +} + +extern "C" void destroy_error(std::exception_ptr * ex) +{ + free(ex); +} + +namespace rust { + +std::ostream & operator << (std::ostream & str, const String & s) +{ + str << (std::string_view) s; + return str; +} + +} diff --git a/src/libutil/rust-ffi.hh b/src/libutil/rust-ffi.hh new file mode 100644 index 000000000..469a5fba3 --- /dev/null +++ b/src/libutil/rust-ffi.hh @@ -0,0 +1,185 @@ +#pragma once + +#include "serialise.hh" + +#include +#include +#include + +namespace rust { + +typedef void (*DropFun)(void *); + +/* A Rust value of N bytes. It can be moved but not copied. When it + goes out of scope, the C++ destructor will run the drop + function. */ +template +struct Value +{ +protected: + + std::array raw; + + ~Value() + { + if (!isEvacuated()) { + drop(this); + evacuate(); + } + } + + // Must not be called directly. + Value() + { } + + Value(Value && other) + : raw(other.raw) + { + other.evacuate(); + } + + void operator =(Value && other) + { + if (!isEvacuated()) + drop(this); + raw = other.raw; + other.evacuate(); + } + +private: + + /* FIXME: optimize these (ideally in such a way that the compiler + can elide most calls to evacuate() / isEvacuated(). */ + inline void evacuate() + { + for (auto & i : raw) i = 0; + } + + inline bool isEvacuated() + { + for (auto & i : raw) + if (i != 0) return false; + return true; + } +}; + +/* A Rust vector. */ +template +struct Vec : Value<3 * sizeof(void *), drop> +{ + inline size_t size() const + { + return ((const size_t *) &this->raw)[2]; + } + + const T * data() const + { + return ((const T * *) &this->raw)[0]; + } +}; + +/* A Rust slice. */ +template +struct Slice +{ + const T * ptr; + size_t size; + + Slice(const T * ptr, size_t size) : ptr(ptr), size(size) + { + assert(ptr); + } +}; + +struct StringSlice : Slice +{ + StringSlice(const std::string & s): Slice(s.data(), s.size()) {} + explicit StringSlice(std::string_view s): Slice(s.data(), s.size()) {} + StringSlice(const char * s): Slice(s, strlen(s)) {} + + operator std::string_view() const + { + return std::string_view(ptr, size); + } +}; + +/* A Rust string. */ +struct String; + +extern "C" { + void ffi_String_new(StringSlice s, String * out); + void ffi_String_drop(void * s); +} + +struct String : Vec +{ + String(std::string_view s) + { + ffi_String_new(StringSlice(s), this); + } + + String(const char * s) + : String({s, std::strlen(s)}) + { + } + + operator std::string_view() const + { + return std::string_view(data(), size()); + } +}; + +std::ostream & operator << (std::ostream & str, const String & s); + +/* C++ representation of Rust's Result. */ +template +struct Result +{ + enum { Ok = 0, Err = 1, Uninit = 2 } tag; + + union { + T data; + std::exception_ptr * exc; + }; + + Result() : tag(Uninit) { }; // FIXME: remove + + Result(const Result &) = delete; + + Result(Result && other) + : tag(other.tag) + { + other.tag = Uninit; + if (tag == Ok) + data = std::move(other.data); + else if (tag == Err) + exc = other.exc; + } + + ~Result() + { + if (tag == Ok) + data.~T(); + else if (tag == Err) + free(exc); + else if (tag == Uninit) + ; + else + abort(); + } + + /* Rethrow the wrapped exception or return the wrapped value. */ + T unwrap() + { + if (tag == Ok) { + tag = Uninit; + return std::move(data); + } + else if (tag == Err) + std::rethrow_exception(*exc); + else + abort(); + } +}; + +} diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 969e4dff3..5780c93a6 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -24,7 +24,7 @@ struct Sink /* A buffered abstract sink. */ -struct BufferedSink : Sink +struct BufferedSink : virtual Sink { size_t bufSize, bufPos; std::unique_ptr buffer; @@ -77,7 +77,6 @@ struct BufferedSource : Source size_t read(unsigned char * data, size_t len) override; - bool hasData(); protected: @@ -179,6 +178,36 @@ struct TeeSource : Source } }; +/* A reader that consumes the original Source until 'size'. */ +struct SizedSource : Source +{ + Source & orig; + size_t remain; + SizedSource(Source & orig, size_t size) + : orig(orig), remain(size) { } + size_t read(unsigned char * data, size_t len) + { + if (this->remain <= 0) { + throw EndOfFile("sized: unexpected end-of-file"); + } + len = std::min(len, this->remain); + size_t n = this->orig.read(data, len); + this->remain -= n; + return n; + } + + /* Consume the original source until no remain data is left to consume. */ + size_t drainAll() + { + std::vector buf(8192); + size_t sum = 0; + while (this->remain > 0) { + size_t n = read(buf.data(), buf.size()); + sum += n; + } + return sum; + } +}; /* Convert a function into a sink. */ struct LambdaSink : Sink @@ -244,7 +273,7 @@ Sink & operator << (Sink & sink, const Strings & s); Sink & operator << (Sink & sink, const StringSet & s); -MakeError(SerialisationError, Error) +MakeError(SerialisationError, Error); template diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc new file mode 100644 index 000000000..c4d8a4f91 --- /dev/null +++ b/src/libutil/tarfile.cc @@ -0,0 +1,125 @@ +#include +#include + +#include "serialise.hh" + +namespace nix { + +struct TarArchive { + struct archive * archive; + Source * source; + std::vector buffer; + + void check(int err, const char * reason = "failed to extract archive: %s") + { + if (err == ARCHIVE_EOF) + throw EndOfFile("reached end of archive"); + else if (err != ARCHIVE_OK) + throw Error(reason, archive_error_string(this->archive)); + } + + TarArchive(Source & source) : buffer(4096) + { + this->archive = archive_read_new(); + this->source = &source; + + archive_read_support_filter_all(archive); + archive_read_support_format_all(archive); + check(archive_read_open(archive, + (void *)this, + TarArchive::callback_open, + TarArchive::callback_read, + TarArchive::callback_close), + "failed to open archive: %s"); + } + + TarArchive(const Path & path) + { + this->archive = archive_read_new(); + + archive_read_support_filter_all(archive); + archive_read_support_format_all(archive); + check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); + } + + TarArchive(const TarArchive &) = delete; + + void close() + { + check(archive_read_close(archive), "failed to close archive: %s"); + } + + ~TarArchive() + { + if (this->archive) archive_read_free(this->archive); + } + +private: + + static int callback_open(struct archive *, void * self) { + return ARCHIVE_OK; + } + + static ssize_t callback_read(struct archive * archive, void * _self, const void * * buffer) + { + auto self = (TarArchive *)_self; + *buffer = self->buffer.data(); + + try { + return self->source->read(self->buffer.data(), 4096); + } catch (EndOfFile &) { + return 0; + } catch (std::exception & err) { + archive_set_error(archive, EIO, "source threw exception: %s", err.what()); + return -1; + } + } + + static int callback_close(struct archive *, void * self) { + return ARCHIVE_OK; + } +}; + +static void extract_archive(TarArchive & archive, const Path & destDir) +{ + int flags = ARCHIVE_EXTRACT_FFLAGS + | ARCHIVE_EXTRACT_PERM + | ARCHIVE_EXTRACT_TIME + | ARCHIVE_EXTRACT_SECURE_SYMLINKS + | ARCHIVE_EXTRACT_SECURE_NODOTDOT; + + for (;;) { + struct archive_entry * entry; + int r = archive_read_next_header(archive.archive, &entry); + if (r == ARCHIVE_EOF) break; + else if (r == ARCHIVE_WARN) + warn(archive_error_string(archive.archive)); + else + archive.check(r); + + archive_entry_set_pathname(entry, + (destDir + "/" + archive_entry_pathname(entry)).c_str()); + + archive.check(archive_read_extract(archive.archive, entry, flags)); + } + + archive.close(); +} + +void unpackTarfile(Source & source, const Path & destDir) +{ + auto archive = TarArchive(source); + + createDirs(destDir); + extract_archive(archive, destDir); +} + +void unpackTarfile(const Path & tarFile, const Path & destDir) +{ + auto archive = TarArchive(tarFile); + + createDirs(destDir); + extract_archive(archive, destDir); +} + +} diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh new file mode 100644 index 000000000..89a024f1d --- /dev/null +++ b/src/libutil/tarfile.hh @@ -0,0 +1,9 @@ +#include "serialise.hh" + +namespace nix { + +void unpackTarfile(Source & source, const Path & destDir); + +void unpackTarfile(const Path & tarFile, const Path & destDir); + +} diff --git a/src/libutil/thread-pool.hh b/src/libutil/thread-pool.hh index bb16b639a..b22e0d162 100644 --- a/src/libutil/thread-pool.hh +++ b/src/libutil/thread-pool.hh @@ -11,7 +11,7 @@ namespace nix { -MakeError(ThreadPoolShutDown, Error) +MakeError(ThreadPoolShutDown, Error); /* A simple thread pool that executes a queue of work items (lambdas). */ diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 92bf469b5..20b96a85c 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -51,6 +51,16 @@ struct FormatOrString ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion takes place). */ +inline void formatHelper(boost::format & f) +{ +} + +template +inline void formatHelper(boost::format & f, const T & x, const Args & ... args) +{ + formatHelper(f % x, args...); +} + inline std::string fmt(const std::string & s) { return s; @@ -67,11 +77,11 @@ inline std::string fmt(const FormatOrString & fs) } template -inline std::string fmt(const std::string & fs, Args... args) +inline std::string fmt(const std::string & fs, const Args & ... args) { boost::format f(fs); f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); - nop{boost::io::detail::feed(f, args)...}; + formatHelper(f, args...); return f.str(); } @@ -87,14 +97,14 @@ public: unsigned int status = 1; // exit status template - BaseError(unsigned int status, Args... args) + BaseError(unsigned int status, const Args & ... args) : err(fmt(args...)) , status(status) { } template - BaseError(Args... args) + BaseError(const Args & ... args) : err(fmt(args...)) { } @@ -116,9 +126,9 @@ public: { \ public: \ using superClass::superClass; \ - }; + } -MakeError(Error, BaseError) +MakeError(Error, BaseError); class SysError : public Error { @@ -126,7 +136,7 @@ public: int errNo; template - SysError(Args... args) + SysError(const Args & ... args) : Error(addErrno(fmt(args...))) { } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index e3dcd246c..012f1d071 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -10,17 +10,20 @@ #include #include #include +#include #include #include #include #include #include -#include +#include #include #include #include +#include #include +#include #include #ifdef __APPLE__ @@ -38,6 +41,9 @@ extern char * * environ; namespace nix { +const std::string nativeSystem = SYSTEM; + + BaseError & BaseError::addPrefix(const FormatOrString & fs) { prefix_ = fs.s + prefix_; @@ -52,10 +58,11 @@ std::string SysError::addErrno(const std::string & s) } -string getEnv(const string & key, const string & def) +std::optional getEnv(const std::string & key) { char * value = getenv(key.c_str()); - return value ? string(value) : def; + if (!value) return {}; + return std::string(value); } @@ -80,6 +87,15 @@ void clearEnv() unsetenv(name.first.c_str()); } +void replaceEnv(std::map newEnv) +{ + clearEnv(); + for (auto newEnvVar : newEnv) + { + setenv(newEnvVar.first.c_str(), newEnvVar.second.c_str(), 1); + } +} + Path absPath(Path path, Path dir) { @@ -172,22 +188,22 @@ Path dirOf(const Path & path) } -string baseNameOf(const Path & path) +std::string_view baseNameOf(std::string_view path) { if (path.empty()) return ""; - Path::size_type last = path.length() - 1; + auto last = path.size() - 1; if (path[last] == '/' && last > 0) last -= 1; - Path::size_type pos = path.rfind('/', last); + auto pos = path.rfind('/', last); if (pos == string::npos) pos = 0; else pos += 1; - return string(path, pos, last - pos + 1); + return path.substr(pos, last - pos + 1); } @@ -384,7 +400,7 @@ static void _deletePath(const Path & path, unsigned long long & bytesFreed) } if (!S_ISDIR(st.st_mode) && st.st_nlink == 1) - bytesFreed += st.st_blocks * 512; + bytesFreed += st.st_size; if (S_ISDIR(st.st_mode)) { /* Make the directory accessible. */ @@ -423,7 +439,7 @@ void deletePath(const Path & path, unsigned long long & bytesFreed) static Path tempName(Path tmpRoot, const Path & prefix, bool includePid, int & counter) { - tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR", "/tmp") : tmpRoot, true); + tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true); if (includePid) return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str(); else @@ -462,9 +478,19 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix, } +std::string getUserName() +{ + auto pw = getpwuid(geteuid()); + std::string name = pw ? pw->pw_name : getEnv("USER").value_or(""); + if (name.empty()) + throw Error("cannot figure out user name"); + return name; +} + + static Lazy getHome2([]() { - Path homeDir = getEnv("HOME"); - if (homeDir.empty()) { + auto homeDir = getEnv("HOME"); + if (!homeDir) { std::vector buf(16384); struct passwd pwbuf; struct passwd * pw; @@ -473,7 +499,7 @@ static Lazy getHome2([]() { throw Error("cannot determine user's home directory"); homeDir = pw->pw_dir; } - return homeDir; + return *homeDir; }); Path getHome() { return getHome2(); } @@ -481,25 +507,21 @@ Path getHome() { return getHome2(); } Path getCacheDir() { - Path cacheDir = getEnv("XDG_CACHE_HOME"); - if (cacheDir.empty()) - cacheDir = getHome() + "/.cache"; - return cacheDir; + auto cacheDir = getEnv("XDG_CACHE_HOME"); + return cacheDir ? *cacheDir : getHome() + "/.cache"; } Path getConfigDir() { - Path configDir = getEnv("XDG_CONFIG_HOME"); - if (configDir.empty()) - configDir = getHome() + "/.config"; - return configDir; + auto configDir = getEnv("XDG_CONFIG_HOME"); + return configDir ? *configDir : getHome() + "/.config"; } std::vector getConfigDirs() { Path configHome = getConfigDir(); - string configDirs = getEnv("XDG_CONFIG_DIRS"); + string configDirs = getEnv("XDG_CONFIG_DIRS").value_or(""); std::vector result = tokenizeString>(configDirs, ":"); result.insert(result.begin(), configHome); return result; @@ -508,10 +530,8 @@ std::vector getConfigDirs() Path getDataDir() { - Path dataDir = getEnv("XDG_DATA_HOME"); - if (dataDir.empty()) - dataDir = getHome() + "/.local/share"; - return dataDir; + auto dataDir = getEnv("XDG_DATA_HOME"); + return dataDir ? *dataDir : getHome() + "/.local/share"; } @@ -1015,13 +1035,35 @@ void runProgram2(const RunOptions & options) if (options.standardOut) out.create(); if (source) in.create(); + ProcessOptions processOptions; + // vfork implies that the environment of the main process and the fork will + // be shared (technically this is undefined, but in practice that's the + // case), so we can't use it if we alter the environment + if (options.environment) + processOptions.allowVfork = false; + /* Fork. */ Pid pid = startProcess([&]() { + if (options.environment) + replaceEnv(*options.environment); if (options.standardOut && dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("dupping stdout"); + if (options.mergeStderrToStdout) + if (dup2(STDOUT_FILENO, STDERR_FILENO) == -1) + throw SysError("cannot dup stdout into stderr"); if (source && dup2(in.readSide.get(), STDIN_FILENO) == -1) throw SysError("dupping stdin"); + if (options.chdir && chdir((*options.chdir).c_str()) == -1) + throw SysError("chdir failed"); + if (options.gid && setgid(*options.gid) == -1) + throw SysError("setgid failed"); + /* Drop all other groups if we're setgid. */ + if (options.gid && setgroups(0, 0) == -1) + throw SysError("setgroups failed"); + if (options.uid && setuid(*options.uid) == -1) + throw SysError("setuid failed"); + Strings args_(options.args); args_.push_front(options.program); @@ -1033,7 +1075,7 @@ void runProgram2(const RunOptions & options) execv(options.program.c_str(), stringsToCharPtrs(args_).data()); throw SysError("executing '%1%'", options.program); - }); + }, processOptions); out.writeSide = -1; @@ -1144,7 +1186,7 @@ void _interrupted() ////////////////////////////////////////////////////////////////////// -template C tokenizeString(const string & s, const string & separators) +template C tokenizeString(std::string_view s, const string & separators) { C result; string::size_type pos = s.find_first_not_of(separators, 0); @@ -1158,9 +1200,9 @@ template C tokenizeString(const string & s, const string & separators) return result; } -template Strings tokenizeString(const string & s, const string & separators); -template StringSet tokenizeString(const string & s, const string & separators); -template vector tokenizeString(const string & s, const string & separators); +template Strings tokenizeString(std::string_view s, const string & separators); +template StringSet tokenizeString(std::string_view s, const string & separators); +template vector tokenizeString(std::string_view s, const string & separators); string concatStringsSep(const string & sep, const Strings & ss) @@ -1215,6 +1257,19 @@ string replaceStrings(const std::string & s, } +std::string rewriteStrings(const std::string & _s, const StringMap & rewrites) +{ + auto s = _s; + for (auto & i : rewrites) { + if (i.first == i.second) continue; + size_t j = 0; + while ((j = s.find(i.first, j)) != string::npos) + s.replace(j, i.first.size(), i.second); + } + return s; +} + + string statusToString(int status) { if (!WIFEXITED(status) || WEXITSTATUS(status) != 0) { @@ -1247,9 +1302,10 @@ bool hasPrefix(const string & s, const string & prefix) } -bool hasSuffix(const string & s, const string & suffix) +bool hasSuffix(std::string_view s, std::string_view suffix) { - return s.size() >= suffix.size() && string(s, s.size() - suffix.size()) == suffix; + return s.size() >= suffix.size() + && s.substr(s.size() - suffix.size()) == suffix; } @@ -1407,7 +1463,7 @@ static Sync> windowSize{{0, 0}}; static void updateWindowSize() { struct winsize ws; - if (ioctl(1, TIOCGWINSZ, &ws) == 0) { + if (ioctl(2, TIOCGWINSZ, &ws) == 0) { auto windowSize_(windowSize.lock()); windowSize_->first = ws.ws_row; windowSize_->second = ws.ws_col; @@ -1504,4 +1560,37 @@ std::unique_ptr createInterruptCallback(std::function return std::unique_ptr(res.release()); } + +AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) +{ + AutoCloseFD fdSocket = socket(PF_UNIX, SOCK_STREAM + #ifdef SOCK_CLOEXEC + | SOCK_CLOEXEC + #endif + , 0); + if (!fdSocket) + throw SysError("cannot create Unix domain socket"); + + closeOnExec(fdSocket.get()); + + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + if (path.size() >= sizeof(addr.sun_path)) + throw Error("socket path '%1%' is too long", path); + strcpy(addr.sun_path, path.c_str()); + + unlink(path.c_str()); + + if (bind(fdSocket.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot bind to socket '%1%'", path); + + if (chmod(path.c_str(), mode) == -1) + throw SysError("changing permissions on '%1%'", path); + + if (listen(fdSocket.get(), 5) == -1) + throw SysError("cannot listen on socket '%1%'", path); + + return fdSocket; +} + } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index fd09f8f2b..bbc2429d6 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -30,8 +30,12 @@ struct Sink; struct Source; +/* The system for which Nix is compiled. */ +extern const std::string nativeSystem; + + /* Return an environment variable. */ -string getEnv(const string & key, const string & def = ""); +std::optional getEnv(const std::string & key); /* Get the entire environment. */ std::map getEnv(); @@ -58,7 +62,7 @@ Path dirOf(const Path & path); /* Return the base name of the given canonical path, i.e., everything following the final `/'. */ -string baseNameOf(const Path & path); +std::string_view baseNameOf(std::string_view path); /* Check whether 'path' is a descendant of 'dir'. */ bool isInDir(const Path & path, const Path & dir); @@ -122,6 +126,8 @@ void deletePath(const Path & path, unsigned long long & bytesFreed); Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755); +std::string getUserName(); + /* Return $HOME or the user's home directory from /etc/passwd. */ Path getHome(); @@ -154,7 +160,7 @@ void readFull(int fd, unsigned char * buf, size_t count); void writeFull(int fd, const unsigned char * buf, size_t count, bool allowInterrupts = true); void writeFull(int fd, const string & s, bool allowInterrupts = true); -MakeError(EndOfFile, Error) +MakeError(EndOfFile, Error); /* Read a file descriptor until EOF occurs. */ @@ -263,12 +269,17 @@ string runProgram(Path program, bool searchPath = false, struct RunOptions { + std::optional uid; + std::optional gid; + std::optional chdir; + std::optional> environment; Path program; bool searchPath = true; Strings args; std::optional input; Source * standardIn = nullptr; Sink * standardOut = nullptr; + bool mergeStderrToStdout = false; bool _killStderr = false; RunOptions(const Path & program, const Strings & args) @@ -288,7 +299,7 @@ public: int status; template - ExecError(int status, Args... args) + ExecError(int status, const Args & ... args) : Error(args...), status(status) { } }; @@ -322,14 +333,14 @@ void inline checkInterrupt() _interrupted(); } -MakeError(Interrupted, BaseError) +MakeError(Interrupted, BaseError); -MakeError(FormatError, Error) +MakeError(FormatError, Error); /* String tokenizer. */ -template C tokenizeString(const string & s, const string & separators = " \t\n\r"); +template C tokenizeString(std::string_view s, const string & separators = " \t\n\r"); /* Concatenate the given strings with a separator between the @@ -351,6 +362,20 @@ string replaceStrings(const std::string & s, const std::string & from, const std::string & to); +std::string rewriteStrings(const std::string & s, const StringMap & rewrites); + + +/* If a set contains 'from', remove it and insert 'to'. */ +template +void replaceInSet(std::set & set, const T & from, const T & to) +{ + auto i = set.find(from); + if (i == set.end()) return; + set.erase(i); + set.insert(to); +} + + /* Convert the exit status of a child as returned by wait() into an error string. */ string statusToString(int status); @@ -403,7 +428,7 @@ bool hasPrefix(const string & s, const string & prefix); /* Return true iff `s' ends in `suffix'. */ -bool hasSuffix(const string & s, const string & suffix); +bool hasSuffix(std::string_view s, std::string_view suffix); /* Convert a string to lower case. */ @@ -422,8 +447,10 @@ void ignoreException(); /* Some ANSI escape sequences. */ #define ANSI_NORMAL "\e[0m" #define ANSI_BOLD "\e[1m" +#define ANSI_FAINT "\e[2m" #define ANSI_RED "\e[31;1m" #define ANSI_GREEN "\e[32;1m" +#define ANSI_YELLOW "\e[33;1m" #define ANSI_BLUE "\e[34;1m" @@ -445,10 +472,10 @@ string base64Decode(const string & s); /* Get a value for the specified key from an associate container, or a default value if the key doesn't exist. */ template -string get(const T & map, const string & key, const string & def = "") +std::optional get(const T & map, const std::string & key) { auto i = map.find(key); - return i == map.end() ? def : i->second; + return i == map.end() ? std::optional() : i->second; } @@ -456,21 +483,34 @@ string get(const T & map, const string & key, const string & def = "") type T or an exception. (We abuse std::future to pass the value or exception.) */ template -struct Callback +class Callback { std::function)> fun; + std::atomic_flag done = ATOMIC_FLAG_INIT; + +public: Callback(std::function)> fun) : fun(fun) { } - void operator()(T && t) const + Callback(Callback && callback) : fun(std::move(callback.fun)) { + auto prev = callback.done.test_and_set(); + if (prev) done.test_and_set(); + } + + void operator()(T && t) noexcept + { + auto prev = done.test_and_set(); + assert(!prev); std::promise promise; promise.set_value(std::move(t)); fun(promise.get_future()); } - void rethrow(const std::exception_ptr & exc = std::current_exception()) const + void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept { + auto prev = done.test_and_set(); + assert(!prev); std::promise promise; promise.set_exception(exc); fun(promise.get_future()); @@ -535,4 +575,8 @@ typedef std::function PathFilter; extern PathFilter defaultPathFilter; +/* Create a Unix domain socket in listen mode. */ +AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode); + + } diff --git a/src/libutil/xml-writer.cc b/src/libutil/xml-writer.cc index e5cc2e9fc..68857e34d 100644 --- a/src/libutil/xml-writer.cc +++ b/src/libutil/xml-writer.cc @@ -1,10 +1,10 @@ -#include +#include #include "xml-writer.hh" namespace nix { - + XMLWriter::XMLWriter(bool indent, std::ostream & output) : output(output), indent(indent) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 8fd5acca8..2a91d8500 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -106,7 +106,7 @@ static void _main(int argc, char * * argv) // Heuristic to see if we're invoked as a shebang script, namely, // if we have at least one argument, it's the name of an // executable file, and it starts with "#!". - if (runEnv && argc > 1 && !std::regex_search(argv[1], std::regex("nix-shell"))) { + if (runEnv && argc > 1) { script = argv[1]; try { auto lines = tokenizeString(readFile(script), "\n"); @@ -247,6 +247,16 @@ static void _main(int argc, char * * argv) auto autoArgs = myArgs.getAutoArgs(*state); + if (runEnv) { + auto newArgs = Bindings::allocBindings(autoArgs->size() + 1); + auto tru = state->allocValue(); + mkBool(*tru, true); + newArgs->push_back(Attr(state->symbols.create("inNixShell"), tru)); + for (auto & i : *autoArgs) newArgs->push_back(i); + newArgs->sort(); + autoArgs = newArgs; + } + if (packages) { std::ostringstream joined; joined << "with import { }; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; @@ -274,19 +284,21 @@ static void _main(int argc, char * * argv) exprs = {state->parseStdin()}; else for (auto i : left) { - auto absolute = i; - try { - absolute = canonPath(absPath(i), true); - } catch (Error e) {}; if (fromArgs) exprs.push_back(state->parseExprFromString(i, absPath("."))); - else if (store->isStorePath(absolute) && std::regex_match(absolute, std::regex(".*\\.drv(!.*)?"))) - drvs.push_back(DrvInfo(*state, store, absolute)); - else - /* If we're in a #! script, interpret filenames - relative to the script. */ - exprs.push_back(state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, - inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); + else { + auto absolute = i; + try { + absolute = canonPath(absPath(i), true); + } catch (Error & e) {}; + if (store->isStorePath(absolute) && std::regex_match(absolute, std::regex(".*\\.drv(!.*)?"))) + drvs.push_back(DrvInfo(*state, store, absolute)); + else + /* If we're in a #! script, interpret filenames + relative to the script. */ + exprs.push_back(state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, + inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); + } } /* Evaluate them into derivations. */ @@ -305,11 +317,11 @@ static void _main(int argc, char * * argv) state->printStats(); - auto buildPaths = [&](const PathSet & paths) { + auto buildPaths = [&](const std::vector & paths) { /* Note: we do this even when !printMissing to efficiently fetch binary cache data. */ unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; + StorePathSet willBuild, willSubstitute, unknown; store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize); @@ -325,16 +337,16 @@ static void _main(int argc, char * * argv) throw UsageError("nix-shell requires a single derivation"); auto & drvInfo = drvs.front(); - auto drv = store->derivationFromPath(drvInfo.queryDrvPath()); + auto drv = store->derivationFromPath(store->parseStorePath(drvInfo.queryDrvPath())); - PathSet pathsToBuild; + std::vector pathsToBuild; /* Figure out what bash shell to use. If $NIX_BUILD_SHELL is not set, then build bashInteractive from . */ - auto shell = getEnv("NIX_BUILD_SHELL", ""); + auto shell = getEnv("NIX_BUILD_SHELL"); - if (shell == "") { + if (!shell) { try { auto expr = state->parseExprFromString("(import {}).bashInteractive", absPath(".")); @@ -346,7 +358,7 @@ static void _main(int argc, char * * argv) if (!drv) throw Error("the 'bashInteractive' attribute in did not evaluate to a derivation"); - pathsToBuild.insert(drv->queryDrvPath()); + pathsToBuild.emplace_back(store->parseStorePath(drv->queryDrvPath())); shell = drv->queryOutPath() + "/bin/bash"; @@ -358,10 +370,11 @@ static void _main(int argc, char * * argv) // Build or fetch all dependencies of the derivation. for (const auto & input : drv.inputDrvs) - if (std::all_of(envExclude.cbegin(), envExclude.cend(), [&](const string & exclude) { return !std::regex_search(input.first, std::regex(exclude)); })) - pathsToBuild.insert(makeDrvPathWithOutputs(input.first, input.second)); + if (std::all_of(envExclude.cbegin(), envExclude.cend(), + [&](const string & exclude) { return !std::regex_search(store->printStorePath(input.first), std::regex(exclude)); })) + pathsToBuild.emplace_back(input.first, input.second); for (const auto & src : drv.inputSrcs) - pathsToBuild.insert(src); + pathsToBuild.emplace_back(src); buildPaths(pathsToBuild); @@ -370,7 +383,8 @@ static void _main(int argc, char * * argv) // Set the environment. auto env = getEnv(); - auto tmp = getEnv("TMPDIR", getEnv("XDG_RUNTIME_DIR", "/tmp")); + auto tmp = getEnv("TMPDIR"); + if (!tmp) tmp = getEnv("XDG_RUNTIME_DIR").value_or("/tmp"); if (pure) { decltype(env) newEnv; @@ -382,11 +396,11 @@ static void _main(int argc, char * * argv) env["__ETC_PROFILE_SOURCED"] = "1"; } - env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; + env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = *tmp; env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); - auto passAsFile = tokenizeString(get(drv.env, "passAsFile", "")); + auto passAsFile = tokenizeString(get(drv.env, "passAsFile").value_or("")); bool keepTmp = false; int fileNr = 0; @@ -410,7 +424,7 @@ static void _main(int argc, char * * argv) auto rcfile = (Path) tmpDir + "/rc"; writeFile(rcfile, fmt( (keepTmp ? "" : "rm -rf '%1%'; "s) + - "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc; " + (pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") + "%2%" "dontAddDisableDepTrack=1; " "[ -e $stdenv/setup ] && source $stdenv/setup; " @@ -427,8 +441,8 @@ static void _main(int argc, char * * argv) (Path) tmpDir, (pure ? "" : "p=$PATH; "), (pure ? "" : "PATH=$PATH:$p; unset p; "), - dirOf(shell), - shell, + dirOf(*shell), + *shell, (getenv("TZ") ? (string("export TZ='") + getenv("TZ") + "'; ") : ""), envCommand)); @@ -448,14 +462,14 @@ static void _main(int argc, char * * argv) restoreSignals(); - execvp(shell.c_str(), argPtrs.data()); + execvp(shell->c_str(), argPtrs.data()); - throw SysError("executing shell '%s'", shell); + throw SysError("executing shell '%s'", *shell); } else { - PathSet pathsToBuild; + std::vector pathsToBuild; std::map drvPrefixes; std::map resultSymlinks; @@ -469,7 +483,7 @@ static void _main(int argc, char * * argv) if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", drvPath); - pathsToBuild.insert(drvPath + "!" + outputName); + pathsToBuild.emplace_back(store->parseStorePath(drvPath), StringSet{outputName}); std::string drvPrefix; auto i = drvPrefixes.find(drvPath); @@ -495,7 +509,7 @@ static void _main(int argc, char * * argv) for (auto & symlink : resultSymlinks) if (auto store2 = store.dynamic_pointer_cast()) - store2->addPermRoot(symlink.second, absPath(symlink.first), true); + store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first), true); for (auto & path : outPaths) std::cout << path << '\n'; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 8b66cc7e3..1b337a712 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -27,7 +27,7 @@ static void readChannels() continue; auto split = tokenizeString>(line, " "); auto url = std::regex_replace(split[0], std::regex("/*$"), ""); - auto name = split.size() > 1 ? split[1] : baseNameOf(url); + auto name = split.size() > 1 ? split[1] : std::string(baseNameOf(url)); channels[name] = url; } } @@ -86,20 +86,21 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - std::string effectiveUrl; + CachedDownloadRequest request(url); + request.ttl = 0; auto dl = getDownloader(); - auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0); - url = chomp(std::move(effectiveUrl)); + auto result = dl->downloadCached(store, request); + auto filename = result.path; + url = chomp(result.effectiveUri); // If the URL contains a version number, append it to the name // attribute (so that "nix-env -q" on the channels profile // shows something useful). auto cname = name; std::smatch match; - auto urlBase = baseNameOf(url); - if (std::regex_search(urlBase, match, std::regex("(-\\d.*)$"))) { + auto urlBase = std::string(baseNameOf(url)); + if (std::regex_search(urlBase, match, std::regex("(-\\d.*)$"))) cname = cname + (string) match[1]; - } std::string extraAttrs; @@ -111,22 +112,11 @@ static void update(const StringSet & channelNames) } if (!unpacked) { - // The URL doesn't unpack directly, so let's try treating it like a full channel folder with files in it - // Check if the channel advertises a binary cache. - DownloadRequest request(url + "/binary-cache-url"); - try { - auto dlRes = dl->download(request); - extraAttrs = "binaryCacheURL = \"" + *dlRes.data + "\";"; - } catch (DownloadError & e) { - } - // Download the channel tarball. - auto fullURL = url + "/nixexprs.tar.xz"; try { - filename = dl->downloadCached(store, fullURL, false); + filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path; } catch (DownloadError & e) { - fullURL = url + "/nixexprs.tar.bz2"; - filename = dl->downloadCached(store, fullURL, false); + filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path; } chomp(filename); } @@ -168,13 +158,7 @@ static int _main(int argc, char ** argv) nixDefExpr = home + "/.nix-defexpr"; // Figure out the name of the channels profile. - ; - auto pw = getpwuid(geteuid()); - std::string name = pw ? pw->pw_name : getEnv("USER", ""); - if (name.empty()) - throw Error("cannot figure out user name"); - profile = settings.nixStateDir + "/profiles/per-user/" + name + "/channels"; - createDirs(dirOf(profile)); + profile = fmt("%s/profiles/per-user/%s/channels", settings.nixStateDir, getUserName()); enum { cNone, diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index fdcde8b07..f87035760 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -52,11 +52,11 @@ static int _main(int argc, char ** argv) auto to = toMode ? openStore(remoteUri) : openStore(); auto from = toMode ? openStore() : openStore(remoteUri); - PathSet storePaths2; + StorePathSet storePaths2; for (auto & path : storePaths) storePaths2.insert(from->followLinksToStorePath(path)); - PathSet closure; + StorePathSet closure; from->computeFSClosure(storePaths2, closure, false, includeOutputs); copyPaths(from, to, closure, NoRepair, NoCheckSigs, useSubstitutes); diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 8d63b8f36..134898561 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -2,18 +2,17 @@ #include "local-store.hh" #include "util.hh" #include "serialise.hh" -#include "worker-protocol.hh" #include "archive.hh" -#include "affinity.hh" #include "globals.hh" -#include "monitor-fd.hh" #include "derivations.hh" #include "finally.hh" #include "legacy.hh" +#include "daemon.hh" #include - +#include #include + #include #include #include @@ -25,13 +24,13 @@ #include #include #include -#include #if __APPLE__ || __FreeBSD__ #include #endif using namespace nix; +using namespace nix::daemon; #ifndef __linux__ #define SPLICE_F_MOVE 0 @@ -53,790 +52,6 @@ static ssize_t splice(int fd_in, void *off_in, int fd_out, void *off_out, size_t } #endif -static FdSource from(STDIN_FILENO); -static FdSink to(STDOUT_FILENO); - - -Sink & operator << (Sink & sink, const Logger::Fields & fields) -{ - sink << fields.size(); - for (auto & f : fields) { - sink << f.type; - if (f.type == Logger::Field::tInt) - sink << f.i; - else if (f.type == Logger::Field::tString) - sink << f.s; - else abort(); - } - return sink; -} - - -/* Logger that forwards log messages to the client, *if* we're in a - state where the protocol allows it (i.e., when canSendStderr is - true). */ -struct TunnelLogger : public Logger -{ - struct State - { - bool canSendStderr = false; - std::vector pendingMsgs; - }; - - Sync state_; - - unsigned int clientVersion; - - TunnelLogger(unsigned int clientVersion) : clientVersion(clientVersion) { } - - void enqueueMsg(const std::string & s) - { - auto state(state_.lock()); - - if (state->canSendStderr) { - assert(state->pendingMsgs.empty()); - try { - to(s); - to.flush(); - } catch (...) { - /* Write failed; that means that the other side is - gone. */ - state->canSendStderr = false; - throw; - } - } else - state->pendingMsgs.push_back(s); - } - - void log(Verbosity lvl, const FormatOrString & fs) override - { - if (lvl > verbosity) return; - - StringSink buf; - buf << STDERR_NEXT << (fs.s + "\n"); - enqueueMsg(*buf.s); - } - - /* startWork() means that we're starting an operation for which we - want to send out stderr to the client. */ - void startWork() - { - auto state(state_.lock()); - state->canSendStderr = true; - - for (auto & msg : state->pendingMsgs) - to(msg); - - state->pendingMsgs.clear(); - - to.flush(); - } - - /* stopWork() means that we're done; stop sending stderr to the - client. */ - void stopWork(bool success = true, const string & msg = "", unsigned int status = 0) - { - auto state(state_.lock()); - - state->canSendStderr = false; - - if (success) - to << STDERR_LAST; - else { - to << STDERR_ERROR << msg; - if (status != 0) to << status; - } - } - - void startActivity(ActivityId act, Verbosity lvl, ActivityType type, - const std::string & s, const Fields & fields, ActivityId parent) override - { - if (GET_PROTOCOL_MINOR(clientVersion) < 20) { - if (!s.empty()) - log(lvl, s + "..."); - return; - } - - StringSink buf; - buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent; - enqueueMsg(*buf.s); - } - - void stopActivity(ActivityId act) override - { - if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; - StringSink buf; - buf << STDERR_STOP_ACTIVITY << act; - enqueueMsg(*buf.s); - } - - void result(ActivityId act, ResultType type, const Fields & fields) override - { - if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; - StringSink buf; - buf << STDERR_RESULT << act << type << fields; - enqueueMsg(*buf.s); - } -}; - - -struct TunnelSink : Sink -{ - Sink & to; - TunnelSink(Sink & to) : to(to) { } - virtual void operator () (const unsigned char * data, size_t len) - { - to << STDERR_WRITE; - writeString(data, len, to); - } -}; - - -struct TunnelSource : BufferedSource -{ - Source & from; - TunnelSource(Source & from) : from(from) { } -protected: - size_t readUnbuffered(unsigned char * data, size_t len) override - { - to << STDERR_READ << len; - to.flush(); - size_t n = readString(data, len, from); - if (n == 0) throw EndOfFile("unexpected end-of-file"); - return n; - } -}; - - -/* If the NAR archive contains a single file at top-level, then save - the contents of the file to `s'. Otherwise barf. */ -struct RetrieveRegularNARSink : ParseSink -{ - bool regular; - string s; - - RetrieveRegularNARSink() : regular(true) { } - - void createDirectory(const Path & path) - { - regular = false; - } - - void receiveContents(unsigned char * data, unsigned int len) - { - s.append((const char *) data, len); - } - - void createSymlink(const Path & path, const string & target) - { - regular = false; - } -}; - - -static void performOp(TunnelLogger * logger, ref store, - bool trusted, unsigned int clientVersion, - Source & from, Sink & to, unsigned int op) -{ - switch (op) { - - case wopIsValidPath: { - /* 'readStorePath' could raise an error leading to the connection - being closed. To be able to recover from an invalid path error, - call 'startWork' early, and do 'assertStorePath' afterwards so - that the 'Error' exception handler doesn't close the - connection. */ - Path path = readString(from); - logger->startWork(); - store->assertStorePath(path); - bool result = store->isValidPath(path); - logger->stopWork(); - to << result; - break; - } - - case wopQueryValidPaths: { - PathSet paths = readStorePaths(*store, from); - logger->startWork(); - PathSet res = store->queryValidPaths(paths); - logger->stopWork(); - to << res; - break; - } - - case wopHasSubstitutes: { - Path path = readStorePath(*store, from); - logger->startWork(); - PathSet res = store->querySubstitutablePaths({path}); - logger->stopWork(); - to << (res.find(path) != res.end()); - break; - } - - case wopQuerySubstitutablePaths: { - PathSet paths = readStorePaths(*store, from); - logger->startWork(); - PathSet res = store->querySubstitutablePaths(paths); - logger->stopWork(); - to << res; - break; - } - - case wopQueryPathHash: { - Path path = readStorePath(*store, from); - logger->startWork(); - auto hash = store->queryPathInfo(path)->narHash; - logger->stopWork(); - to << hash.to_string(Base16, false); - break; - } - - case wopQueryReferences: - case wopQueryReferrers: - case wopQueryValidDerivers: - case wopQueryDerivationOutputs: { - Path path = readStorePath(*store, from); - logger->startWork(); - PathSet paths; - if (op == wopQueryReferences) - paths = store->queryPathInfo(path)->references; - else if (op == wopQueryReferrers) - store->queryReferrers(path, paths); - else if (op == wopQueryValidDerivers) - paths = store->queryValidDerivers(path); - else paths = store->queryDerivationOutputs(path); - logger->stopWork(); - to << paths; - break; - } - - case wopQueryDerivationOutputNames: { - Path path = readStorePath(*store, from); - logger->startWork(); - StringSet names; - names = store->queryDerivationOutputNames(path); - logger->stopWork(); - to << names; - break; - } - - case wopQueryDeriver: { - Path path = readStorePath(*store, from); - logger->startWork(); - auto deriver = store->queryPathInfo(path)->deriver; - logger->stopWork(); - to << deriver; - break; - } - - case wopQueryPathFromHashPart: { - string hashPart = readString(from); - logger->startWork(); - Path path = store->queryPathFromHashPart(hashPart); - logger->stopWork(); - to << path; - break; - } - - case wopAddToStore: { - bool fixed, recursive; - std::string s, baseName; - from >> baseName >> fixed /* obsolete */ >> recursive >> s; - /* Compatibility hack. */ - if (!fixed) { - s = "sha256"; - recursive = true; - } - HashType hashAlgo = parseHashType(s); - - TeeSource savedNAR(from); - RetrieveRegularNARSink savedRegular; - - if (recursive) { - /* Get the entire NAR dump from the client and save it to - a string so that we can pass it to - addToStoreFromDump(). */ - ParseSink sink; /* null sink; just parse the NAR */ - parseDump(sink, savedNAR); - } else - parseDump(savedRegular, from); - - logger->startWork(); - if (!savedRegular.regular) throw Error("regular file expected"); - - auto store2 = store.dynamic_pointer_cast(); - if (!store2) throw Error("operation is only supported by LocalStore"); - - Path path = store2->addToStoreFromDump(recursive ? *savedNAR.data : savedRegular.s, baseName, recursive, hashAlgo); - logger->stopWork(); - - to << path; - break; - } - - case wopAddTextToStore: { - string suffix = readString(from); - string s = readString(from); - PathSet refs = readStorePaths(*store, from); - logger->startWork(); - Path path = store->addTextToStore(suffix, s, refs, NoRepair); - logger->stopWork(); - to << path; - break; - } - - case wopExportPath: { - Path path = readStorePath(*store, from); - readInt(from); // obsolete - logger->startWork(); - TunnelSink sink(to); - store->exportPath(path, sink); - logger->stopWork(); - to << 1; - break; - } - - case wopImportPaths: { - logger->startWork(); - TunnelSource source(from); - Paths paths = store->importPaths(source, nullptr, - trusted ? NoCheckSigs : CheckSigs); - logger->stopWork(); - to << paths; - break; - } - - case wopBuildPaths: { - PathSet drvs = readStorePaths(*store, from); - BuildMode mode = bmNormal; - if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { - mode = (BuildMode) readInt(from); - - /* Repairing is not atomic, so disallowed for "untrusted" - clients. */ - if (mode == bmRepair && !trusted) - throw Error("repairing is not allowed because you are not in 'trusted-users'"); - } - logger->startWork(); - store->buildPaths(drvs, mode); - logger->stopWork(); - to << 1; - break; - } - - case wopBuildDerivation: { - Path drvPath = readStorePath(*store, from); - BasicDerivation drv; - readDerivation(from, *store, drv); - BuildMode buildMode = (BuildMode) readInt(from); - logger->startWork(); - if (!trusted) - throw Error("you are not privileged to build derivations"); - auto res = store->buildDerivation(drvPath, drv, buildMode); - logger->stopWork(); - to << res.status << res.errorMsg; - break; - } - - case wopEnsurePath: { - Path path = readStorePath(*store, from); - logger->startWork(); - store->ensurePath(path); - logger->stopWork(); - to << 1; - break; - } - - case wopAddTempRoot: { - Path path = readStorePath(*store, from); - logger->startWork(); - store->addTempRoot(path); - logger->stopWork(); - to << 1; - break; - } - - case wopAddIndirectRoot: { - Path path = absPath(readString(from)); - logger->startWork(); - store->addIndirectRoot(path); - logger->stopWork(); - to << 1; - break; - } - - case wopSyncWithGC: { - logger->startWork(); - store->syncWithGC(); - logger->stopWork(); - to << 1; - break; - } - - case wopFindRoots: { - logger->startWork(); - Roots roots = store->findRoots(!trusted); - logger->stopWork(); - - size_t size = 0; - for (auto & i : roots) - size += i.second.size(); - - to << size; - - for (auto & [target, links] : roots) - for (auto & link : links) - to << link << target; - - break; - } - - case wopCollectGarbage: { - GCOptions options; - options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = readStorePaths(*store, from); - from >> options.ignoreLiveness >> options.maxFreed; - // obsolete fields - readInt(from); - readInt(from); - readInt(from); - - GCResults results; - - logger->startWork(); - if (options.ignoreLiveness) - throw Error("you are not allowed to ignore liveness"); - store->collectGarbage(options, results); - logger->stopWork(); - - to << results.paths << results.bytesFreed << 0 /* obsolete */; - - break; - } - - case wopSetOptions: { - settings.keepFailed = readInt(from); - settings.keepGoing = readInt(from); - settings.tryFallback = readInt(from); - verbosity = (Verbosity) readInt(from); - settings.maxBuildJobs.assign(readInt(from)); - settings.maxSilentTime = readInt(from); - readInt(from); // obsolete useBuildHook - settings.verboseBuild = lvlError == (Verbosity) readInt(from); - readInt(from); // obsolete logType - readInt(from); // obsolete printBuildTrace - settings.buildCores = readInt(from); - settings.useSubstitutes = readInt(from); - - StringMap overrides; - if (GET_PROTOCOL_MINOR(clientVersion) >= 12) { - unsigned int n = readInt(from); - for (unsigned int i = 0; i < n; i++) { - string name = readString(from); - string value = readString(from); - overrides.emplace(name, value); - } - } - - logger->startWork(); - - for (auto & i : overrides) { - auto & name(i.first); - auto & value(i.second); - - auto setSubstituters = [&](Setting & res) { - if (name != res.name && res.aliases.count(name) == 0) - return false; - StringSet trusted = settings.trustedSubstituters; - for (auto & s : settings.substituters.get()) - trusted.insert(s); - Strings subs; - auto ss = tokenizeString(value); - for (auto & s : ss) - if (trusted.count(s)) - subs.push_back(s); - else - warn("ignoring untrusted substituter '%s'", s); - res = subs; - return true; - }; - - try { - if (name == "ssh-auth-sock") // obsolete - ; - else if (trusted - || name == settings.buildTimeout.name - || name == "connect-timeout" - || (name == "builders" && value == "")) - settings.set(name, value); - else if (setSubstituters(settings.substituters)) - ; - else if (setSubstituters(settings.extraSubstituters)) - ; - else - debug("ignoring untrusted setting '%s'", name); - } catch (UsageError & e) { - warn(e.what()); - } - } - - logger->stopWork(); - break; - } - - case wopQuerySubstitutablePathInfo: { - Path path = absPath(readString(from)); - logger->startWork(); - SubstitutablePathInfos infos; - store->querySubstitutablePathInfos({path}, infos); - logger->stopWork(); - SubstitutablePathInfos::iterator i = infos.find(path); - if (i == infos.end()) - to << 0; - else { - to << 1 << i->second.deriver << i->second.references << i->second.downloadSize << i->second.narSize; - } - break; - } - - case wopQuerySubstitutablePathInfos: { - PathSet paths = readStorePaths(*store, from); - logger->startWork(); - SubstitutablePathInfos infos; - store->querySubstitutablePathInfos(paths, infos); - logger->stopWork(); - to << infos.size(); - for (auto & i : infos) { - to << i.first << i.second.deriver << i.second.references - << i.second.downloadSize << i.second.narSize; - } - break; - } - - case wopQueryAllValidPaths: { - logger->startWork(); - PathSet paths = store->queryAllValidPaths(); - logger->stopWork(); - to << paths; - break; - } - - case wopQueryPathInfo: { - Path path = readStorePath(*store, from); - std::shared_ptr info; - logger->startWork(); - try { - info = store->queryPathInfo(path); - } catch (InvalidPath &) { - if (GET_PROTOCOL_MINOR(clientVersion) < 17) throw; - } - logger->stopWork(); - if (info) { - if (GET_PROTOCOL_MINOR(clientVersion) >= 17) - to << 1; - to << info->deriver << info->narHash.to_string(Base16, false) << info->references - << info->registrationTime << info->narSize; - if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { - to << info->ultimate - << info->sigs - << info->ca; - } - } else { - assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); - to << 0; - } - break; - } - - case wopOptimiseStore: - logger->startWork(); - store->optimiseStore(); - logger->stopWork(); - to << 1; - break; - - case wopVerifyStore: { - bool checkContents, repair; - from >> checkContents >> repair; - logger->startWork(); - if (repair && !trusted) - throw Error("you are not privileged to repair paths"); - bool errors = store->verifyStore(checkContents, (RepairFlag) repair); - logger->stopWork(); - to << errors; - break; - } - - case wopAddSignatures: { - Path path = readStorePath(*store, from); - StringSet sigs = readStrings(from); - logger->startWork(); - if (!trusted) - throw Error("you are not privileged to add signatures"); - store->addSignatures(path, sigs); - logger->stopWork(); - to << 1; - break; - } - - case wopNarFromPath: { - auto path = readStorePath(*store, from); - logger->startWork(); - logger->stopWork(); - dumpPath(path, to); - break; - } - - case wopAddToStoreNar: { - bool repair, dontCheckSigs; - ValidPathInfo info; - info.path = readStorePath(*store, from); - from >> info.deriver; - if (!info.deriver.empty()) - store->assertStorePath(info.deriver); - info.narHash = Hash(readString(from), htSHA256); - info.references = readStorePaths(*store, from); - from >> info.registrationTime >> info.narSize >> info.ultimate; - info.sigs = readStrings(from); - from >> info.ca >> repair >> dontCheckSigs; - if (!trusted && dontCheckSigs) - dontCheckSigs = false; - if (!trusted) - info.ultimate = false; - - std::string saved; - std::unique_ptr source; - if (GET_PROTOCOL_MINOR(clientVersion) >= 21) - source = std::make_unique(from); - else { - TeeSink tee(from); - parseDump(tee, tee.source); - saved = std::move(*tee.source.data); - source = std::make_unique(saved); - } - - logger->startWork(); - - // FIXME: race if addToStore doesn't read source? - store->addToStore(info, *source, (RepairFlag) repair, - dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr); - - logger->stopWork(); - break; - } - - case wopQueryMissing: { - PathSet targets = readStorePaths(*store, from); - logger->startWork(); - PathSet willBuild, willSubstitute, unknown; - unsigned long long downloadSize, narSize; - store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); - logger->stopWork(); - to << willBuild << willSubstitute << unknown << downloadSize << narSize; - break; - } - - default: - throw Error(format("invalid operation %1%") % op); - } -} - - -static void processConnection(bool trusted) -{ - MonitorFdHup monitor(from.fd); - - /* Exchange the greeting. */ - unsigned int magic = readInt(from); - if (magic != WORKER_MAGIC_1) throw Error("protocol mismatch"); - to << WORKER_MAGIC_2 << PROTOCOL_VERSION; - to.flush(); - unsigned int clientVersion = readInt(from); - - if (clientVersion < 0x10a) - throw Error("the Nix client version is too old"); - - auto tunnelLogger = new TunnelLogger(clientVersion); - auto prevLogger = nix::logger; - logger = tunnelLogger; - - unsigned int opCount = 0; - - Finally finally([&]() { - _isInterrupted = false; - prevLogger->log(lvlDebug, fmt("%d operations", opCount)); - }); - - if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) - setAffinityTo(readInt(from)); - - readInt(from); // obsolete reserveSpace - - /* Send startup error messages to the client. */ - tunnelLogger->startWork(); - - try { - - /* If we can't accept clientVersion, then throw an error - *here* (not above). */ - -#if 0 - /* Prevent users from doing something very dangerous. */ - if (geteuid() == 0 && - querySetting("build-users-group", "") == "") - throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!"); -#endif - - /* Open the store. */ - Store::Params params; // FIXME: get params from somewhere - // Disable caching since the client already does that. - params["path-info-cache-size"] = "0"; - auto store = openStore(settings.storeUri, params); - - tunnelLogger->stopWork(); - to.flush(); - - /* Process client requests. */ - while (true) { - WorkerOp op; - try { - op = (WorkerOp) readInt(from); - } catch (Interrupted & e) { - break; - } catch (EndOfFile & e) { - break; - } - - opCount++; - - try { - performOp(tunnelLogger, store, trusted, clientVersion, from, to, op); - } catch (Error & e) { - /* If we're not in a state where we can send replies, then - something went wrong processing the input of the - client. This can happen especially if I/O errors occur - during addTextToStore() / importPath(). If that - happens, just send the error message and exit. */ - bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr; - tunnelLogger->stopWork(false, e.msg(), e.status); - if (!errorAllowed) throw; - } catch (std::bad_alloc & e) { - tunnelLogger->stopWork(false, "Nix daemon out of memory", 1); - throw; - } - - to.flush(); - - assert(!tunnelLogger->state_.lock()->canSendStderr); - }; - - } catch (std::exception & e) { - tunnelLogger->stopWork(false, e.what(), 1); - to.flush(); - return; - } -} - static void sigChldHandler(int sigNo) { @@ -925,6 +140,15 @@ static PeerInfo getPeerInfo(int remote) #define SD_LISTEN_FDS_START 3 +static ref openUncachedStore() +{ + Store::Params params; // FIXME: get params from somewhere + // Disable caching since the client already does that. + params["path-info-cache-size"] = "0"; + return openStore(settings.storeUri, params); +} + + static void daemonLoop(char * * argv) { if (chdir("/") == -1) @@ -937,57 +161,20 @@ static void daemonLoop(char * * argv) AutoCloseFD fdSocket; /* Handle socket-based activation by systemd. */ - if (getEnv("LISTEN_FDS") != "") { - if (getEnv("LISTEN_PID") != std::to_string(getpid()) || getEnv("LISTEN_FDS") != "1") + auto listenFds = getEnv("LISTEN_FDS"); + if (listenFds) { + if (getEnv("LISTEN_PID") != std::to_string(getpid()) || listenFds != "1") throw Error("unexpected systemd environment variables"); fdSocket = SD_LISTEN_FDS_START; + closeOnExec(fdSocket.get()); } /* Otherwise, create and bind to a Unix domain socket. */ else { - - /* Create and bind to a Unix domain socket. */ - fdSocket = socket(PF_UNIX, SOCK_STREAM, 0); - if (!fdSocket) - throw SysError("cannot create Unix domain socket"); - - string socketPath = settings.nixDaemonSocketFile; - - createDirs(dirOf(socketPath)); - - /* Urgh, sockaddr_un allows path names of only 108 characters. - So chdir to the socket directory so that we can pass a - relative path name. */ - if (chdir(dirOf(socketPath).c_str()) == -1) - throw SysError("cannot change current directory"); - Path socketPathRel = "./" + baseNameOf(socketPath); - - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - if (socketPathRel.size() >= sizeof(addr.sun_path)) - throw Error(format("socket path '%1%' is too long") % socketPathRel); - strcpy(addr.sun_path, socketPathRel.c_str()); - - unlink(socketPath.c_str()); - - /* Make sure that the socket is created with 0666 permission - (everybody can connect --- provided they have access to the - directory containing the socket). */ - mode_t oldMode = umask(0111); - int res = bind(fdSocket.get(), (struct sockaddr *) &addr, sizeof(addr)); - umask(oldMode); - if (res == -1) - throw SysError(format("cannot bind to socket '%1%'") % socketPath); - - if (chdir("/") == -1) /* back to the root */ - throw SysError("cannot change current directory"); - - if (listen(fdSocket.get(), 5) == -1) - throw SysError(format("cannot listen on socket '%1%'") % socketPath); + createDirs(dirOf(settings.nixDaemonSocketFile)); + fdSocket = createUnixDomainSocket(settings.nixDaemonSocketFile, 0666); } - closeOnExec(fdSocket.get()); - /* Loop accepting connections. */ while (1) { @@ -1006,7 +193,7 @@ static void daemonLoop(char * * argv) closeOnExec(remote.get()); - bool trusted = false; + TrustedFlag trusted = NotTrusted; PeerInfo peer = getPeerInfo(remote.get()); struct passwd * pw = peer.uidKnown ? getpwuid(peer.uid) : 0; @@ -1019,7 +206,7 @@ static void daemonLoop(char * * argv) Strings allowedUsers = settings.allowedUsers; if (matchUser(user, group, trustedUsers)) - trusted = true; + trusted = Trusted; if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup) throw Error(format("user '%1%' is not allowed to connect to the Nix daemon") % user); @@ -1051,9 +238,9 @@ static void daemonLoop(char * * argv) } /* Handle the connection. */ - from.fd = remote.get(); - to.fd = remote.get(); - processConnection(trusted); + FdSource from(remote.get()); + FdSink to(remote.get()); + processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid); exit(0); }, options); @@ -1099,7 +286,7 @@ static int _main(int argc, char * * argv) if (chdir(socketDir.c_str()) == -1) throw SysError(format("changing to socket directory '%1%'") % socketDir); - auto socketName = baseNameOf(socketPath); + auto socketName = std::string(baseNameOf(socketPath)); auto addr = sockaddr_un{}; addr.sun_family = AF_UNIX; if (socketName.size() + 1 >= sizeof(addr.sun_path)) @@ -1133,7 +320,9 @@ static int _main(int argc, char * * argv) } } } else { - processConnection(true); + FdSource from(STDIN_FILENO); + FdSink to(STDOUT_FILENO); + processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0); } } else { daemonLoop(argv); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 6def4a7ff..496dd7977 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -124,11 +124,10 @@ static void getAllExprs(EvalState & state, string attrName = i; if (hasSuffix(attrName, ".nix")) attrName = string(attrName, 0, attrName.size() - 4); - if (attrs.find(attrName) != attrs.end()) { + if (!attrs.insert(attrName).second) { printError(format("warning: name collision in input Nix expressions, skipping '%1%'") % path2); continue; } - attrs.insert(attrName); /* Load the expression on demand. */ Value & vFun = state.getBuiltin("import"); auto vArg = state.allocValue(); @@ -193,12 +192,6 @@ static void loadDerivations(EvalState & state, Path nixExprPath, } -static Path getDefNixExprPath() -{ - return getHome() + "/.nix-defexpr"; -} - - static long getPriority(EvalState & state, DrvInfo & drv) { return drv.queryMetaInt("priority", 0); @@ -215,10 +208,11 @@ static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) // at a time. static bool isPrebuilt(EvalState & state, DrvInfo & elem) { - Path path = elem.queryOutPath(); + auto path = state.store->parseStorePath(elem.queryOutPath()); if (state.store->isValidPath(path)) return true; - PathSet ps = state.store->querySubstitutablePaths({path}); - return ps.find(path) != ps.end(); + StorePathSet paths; + paths.insert(path.clone()); // FIXME: why doesn't StorePathSet{path.clone()} work? + return state.store->querySubstitutablePaths(paths).count(path); } @@ -307,10 +301,8 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, /* Insert only those elements in the final list that we haven't inserted before. */ for (auto & j : matches) - if (done.find(j.second) == done.end()) { - done.insert(j.second); + if (done.insert(j.second).second) elems.push_back(j.first); - } } checkSelectorUse(selectors); @@ -381,24 +373,21 @@ static void queryInstSources(EvalState & state, case srcStorePaths: { for (auto & i : args) { - Path path = state.store->followLinksToStorePath(i); + auto path = state.store->followLinksToStorePath(i); - string name = baseNameOf(path); - string::size_type dash = name.find('-'); - if (dash != string::npos) - name = string(name, dash + 1); + std::string name(path.name()); DrvInfo elem(state, "", nullptr); elem.setName(name); - if (isDerivation(path)) { - elem.setDrvPath(path); - elem.setOutPath(state.store->derivationFromPath(path).findOutput("out")); + if (path.isDerivation()) { + elem.setDrvPath(state.store->printStorePath(path)); + elem.setOutPath(state.store->printStorePath(state.store->derivationFromPath(path).findOutput("out"))); if (name.size() >= drvExtension.size() && string(name, name.size() - drvExtension.size()) == drvExtension) name = string(name, 0, name.size() - drvExtension.size()); } - else elem.setOutPath(path); + else elem.setOutPath(state.store->printStorePath(path)); elems.push_back(elem); } @@ -431,13 +420,13 @@ static void queryInstSources(EvalState & state, static void printMissing(EvalState & state, DrvInfos & elems) { - PathSet targets; + std::vector targets; for (auto & i : elems) { Path drvPath = i.queryDrvPath(); if (drvPath != "") - targets.insert(drvPath); + targets.emplace_back(state.store->parseStorePath(drvPath)); else - targets.insert(i.queryOutPath()); + targets.emplace_back(state.store->parseStorePath(i.queryOutPath())); } printMissing(state.store, targets); @@ -707,15 +696,15 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) drv.setName(globals.forceName); if (drv.queryDrvPath() != "") { - PathSet paths = {drv.queryDrvPath()}; + std::vector paths{globals.state->store->parseStorePath(drv.queryDrvPath())}; printMissing(globals.state->store, paths); if (globals.dryRun) return; globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal); - } - else { - printMissing(globals.state->store, {drv.queryOutPath()}); + } else { + printMissing(globals.state->store, + {globals.state->store->parseStorePath(drv.queryOutPath())}); if (globals.dryRun) return; - globals.state->store->ensurePath(drv.queryOutPath()); + globals.state->store->ensurePath(globals.state->store->parseStorePath(drv.queryOutPath())); } debug(format("switching to new user environment")); @@ -739,7 +728,7 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, for (auto & j : selectors) /* !!! the repeated calls to followLinksToStorePath() are expensive, should pre-compute them. */ - if ((isPath(j) && i.queryOutPath() == globals.state->store->followLinksToStorePath(j)) + if ((isPath(j) && globals.state->store->parseStorePath(i.queryOutPath()) == globals.state->store->followLinksToStorePath(j)) || DrvName(j).matches(drvName)) { printInfo("uninstalling '%s'", i.queryName()); @@ -861,7 +850,10 @@ static void queryJSON(Globals & globals, vector & elems) for (auto & i : elems) { JSONObject pkgObj = topObj.object(i.attrPath); - pkgObj.attr("name", i.queryName()); + auto drvName = DrvName(i.queryName()); + pkgObj.attr("name", drvName.fullName); + pkgObj.attr("pname", drvName.name); + pkgObj.attr("version", drvName.version); pkgObj.attr("system", i.querySystem()); JSONObject metaObj = pkgObj.object("meta"); @@ -960,12 +952,13 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) /* Query which paths have substitutes. */ - PathSet validPaths, substitutablePaths; + StorePathSet validPaths; + StorePathSet substitutablePaths; if (printStatus || globals.prebuiltOnly) { - PathSet paths; + StorePathSet paths; for (auto & i : elems) try { - paths.insert(i.queryOutPath()); + paths.insert(globals.state->store->parseStorePath(i.queryOutPath())); } catch (AssertionError & e) { printMsg(lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); i.setFailed(); @@ -996,8 +989,8 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) //Activity act(*logger, lvlDebug, format("outputting query result '%1%'") % i.attrPath); if (globals.prebuiltOnly && - validPaths.find(i.queryOutPath()) == validPaths.end() && - substitutablePaths.find(i.queryOutPath()) == substitutablePaths.end()) + !validPaths.count(globals.state->store->parseStorePath(i.queryOutPath())) && + !substitutablePaths.count(globals.state->store->parseStorePath(i.queryOutPath()))) continue; /* For table output. */ @@ -1008,9 +1001,9 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (printStatus) { Path outPath = i.queryOutPath(); - bool hasSubs = substitutablePaths.find(outPath) != substitutablePaths.end(); + bool hasSubs = substitutablePaths.count(globals.state->store->parseStorePath(outPath)); bool isInstalled = installed.find(outPath) != installed.end(); - bool isValid = validPaths.find(outPath) != validPaths.end(); + bool isValid = validPaths.count(globals.state->store->parseStorePath(outPath)); if (xmlOutput) { attrs["installed"] = isInstalled ? "1" : "0"; attrs["valid"] = isValid ? "1" : "0"; @@ -1027,10 +1020,14 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) else if (printAttrPath) columns.push_back(i.attrPath); - if (xmlOutput) - attrs["name"] = i.queryName(); - else if (printName) + if (xmlOutput) { + auto drvName = DrvName(i.queryName()); + attrs["name"] = drvName.fullName; + attrs["pname"] = drvName.name; + attrs["version"] = drvName.version; + } else if (printName) { columns.push_back(i.queryName()); + } if (compareVersions) { /* Compare this element against the versions of the @@ -1324,9 +1321,22 @@ static int _main(int argc, char * * argv) Globals globals; globals.instSource.type = srcUnknown; - globals.instSource.nixExprPath = getDefNixExprPath(); + globals.instSource.nixExprPath = getHome() + "/.nix-defexpr"; globals.instSource.systemFilter = "*"; + if (!pathExists(globals.instSource.nixExprPath)) { + try { + createDirs(globals.instSource.nixExprPath); + replaceSymlink( + fmt("%s/profiles/per-user/%s/channels", settings.nixStateDir, getUserName()), + globals.instSource.nixExprPath + "/channels"); + if (getuid() != 0) + replaceSymlink( + fmt("%s/profiles/per-user/root/channels", settings.nixStateDir), + globals.instSource.nixExprPath + "/channels_root"); + } catch (Error &) { } + } + globals.dryRun = false; globals.preserveInstalled = false; globals.removeAll = false; @@ -1337,7 +1347,7 @@ static int _main(int argc, char * * argv) using LegacyArgs::LegacyArgs; }; - MyArgs myArgs(baseNameOf(argv[0]), [&](Strings::iterator & arg, const Strings::iterator & end) { + MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) { Operation oldOp = op; if (*arg == "--help") @@ -1415,13 +1425,22 @@ static int _main(int argc, char * * argv) globals.instSource.autoArgs = myArgs.getAutoArgs(*globals.state); if (globals.profile == "") - globals.profile = getEnv("NIX_PROFILE", ""); + globals.profile = getEnv("NIX_PROFILE").value_or(""); if (globals.profile == "") { Path profileLink = getHome() + "/.nix-profile"; - globals.profile = pathExists(profileLink) - ? absPath(readLink(profileLink), dirOf(profileLink)) - : canonPath(settings.nixStateDir + "/profiles/default"); + try { + if (!pathExists(profileLink)) { + replaceSymlink( + getuid() == 0 + ? settings.nixStateDir + "/profiles/default" + : fmt("%s/profiles/per-user/%s/profile", settings.nixStateDir, getUserName()), + profileLink); + } + globals.profile = absPath(readLink(profileLink), dirOf(profileLink)); + } catch (Error &) { + globals.profile = profileLink; + } } op(globals, opFlags, opArgs); diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 4222e48c6..e80d455ba 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -32,16 +32,16 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, { /* Build the components in the user environment, if they don't exist already. */ - PathSet drvsToBuild; + std::vector drvsToBuild; for (auto & i : elems) if (i.queryDrvPath() != "") - drvsToBuild.insert(i.queryDrvPath()); + drvsToBuild.push_back({state.store->parseStorePath(i.queryDrvPath())}); debug(format("building user environment dependencies")); state.store->buildPaths(drvsToBuild, state.repair ? bmRepair : bmNormal); /* Construct the whole top level derivation. */ - PathSet references; + StorePathSet references; auto manifest = state.allocValue(); state.mkList(manifest, elems.size()); unsigned int n = 0; @@ -77,10 +77,10 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* This is only necessary when installing store paths, e.g., `nix-env -i /nix/store/abcd...-foo'. */ - state.store->addTempRoot(j.second); - state.store->ensurePath(j.second); + state.store->addTempRoot(state.store->parseStorePath(j.second)); + state.store->ensurePath(state.store->parseStorePath(j.second)); - references.insert(j.second); + references.insert(state.store->parseStorePath(j.second)); } // Copy the meta attributes. @@ -95,13 +95,13 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, vMeta.attrs->sort(); v->attrs->sort(); - if (drvPath != "") references.insert(drvPath); + if (drvPath != "") references.insert(state.store->parseStorePath(drvPath)); } /* Also write a copy of the list of user environment elements to the store; we need it for future modifications of the environment. */ - Path manifestFile = state.store->addTextToStore("env-manifest.nix", + auto manifestFile = state.store->addTextToStore("env-manifest.nix", fmt("%1%", (Value &) manifest), references); /* Get the environment builder expression. */ @@ -114,7 +114,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, Root topLevel; state.mkAttrs(args, 3); mkString(*state.allocAttr(args, state.symbols.create("manifest")), - manifestFile, {manifestFile}); + state.store->printStorePath(manifestFile), {state.store->printStorePath(manifestFile)}); args->attrs->push_back(Attr(state.symbols.create("derivations"), (Value *) manifest)); args->attrs->sort(); mkApp(topLevel, envBuilder, args); @@ -124,13 +124,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, state.forceValue(topLevel); PathSet context; Attr & aDrvPath(*topLevel->attrs->find(state.sDrvPath)); - Path topLevelDrv = state.coerceToPath(aDrvPath.pos ? *(aDrvPath.pos) : noPos, *(aDrvPath.value), context); + auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(aDrvPath.pos ? *(aDrvPath.pos) : noPos, *(aDrvPath.value), context)); Attr & aOutPath(*topLevel->attrs->find(state.sOutPath)); Path topLevelOut = state.coerceToPath(aOutPath.pos ? *(aOutPath.pos) : noPos, *(aOutPath.value), context); /* Realise the resulting store expression. */ debug("building user environment"); - state.store->buildPaths({topLevelDrv}, state.repair ? bmRepair : bmNormal); + std::vector topLevelDrvs; + topLevelDrvs.push_back(StorePathWithOutputs{topLevelDrv.clone()}); + state.store->buildPaths(topLevelDrvs, state.repair ? bmRepair : bmNormal); /* Switch the current user environment to the output path. */ auto store2 = state.store.dynamic_pointer_cast(); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index e7038e831..186ed119e 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -75,9 +75,9 @@ void processExpr(EvalState & state, const Strings & attrPaths, if (++rootNr > 1) rootName += "-" + std::to_string(rootNr); auto store2 = state.store.dynamic_pointer_cast(); if (store2) - drvPath = store2->addPermRoot(drvPath, rootName, indirectRoot); + drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName, indirectRoot); } - std::cout << format("%1%%2%\n") % drvPath % (outputName != "out" ? "!" + outputName : ""); + std::cout << fmt("%s%s\n", drvPath, (outputName != "out" ? "!" + outputName : "")); } } } @@ -105,7 +105,7 @@ static int _main(int argc, char * * argv) using LegacyArgs::LegacyArgs; }; - MyArgs myArgs(baseNameOf(argv[0]), [&](Strings::iterator & arg, const Strings::iterator & end) { + MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--help") showManPage("nix-instantiate"); else if (*arg == "--version") diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index b77f2dee6..b998894a7 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -9,6 +9,7 @@ #include "legacy.hh" #include "finally.hh" #include "progress-bar.hh" +#include "tarfile.hh" #include @@ -52,7 +53,7 @@ static int _main(int argc, char * * argv) { HashType ht = htSHA256; std::vector args; - bool printPath = getEnv("PRINT_PATH") != ""; + bool printPath = getEnv("PRINT_PATH") == "1"; bool fromExpr = false; string attrPath; bool unpack = false; @@ -63,7 +64,7 @@ static int _main(int argc, char * * argv) using LegacyArgs::LegacyArgs; }; - MyArgs myArgs(baseNameOf(argv[0]), [&](Strings::iterator & arg, const Strings::iterator & end) { + MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--help") showManPage("nix-prefetch-url"); else if (*arg == "--version") @@ -155,17 +156,17 @@ static int _main(int argc, char * * argv) /* If an expected hash is given, the file may already exist in the store. */ Hash hash, expectedHash(ht); - Path storePath; + std::optional storePath; if (args.size() == 2) { expectedHash = Hash(args[1], ht); storePath = store->makeFixedOutputPath(unpack, expectedHash, name); - if (store->isValidPath(storePath)) + if (store->isValidPath(*storePath)) hash = expectedHash; else - storePath.clear(); + storePath.reset(); } - if (storePath.empty()) { + if (!storePath) { auto actualUri = resolveMirrorUri(*state, uri); @@ -189,11 +190,7 @@ static int _main(int argc, char * * argv) printInfo("unpacking..."); Path unpacked = (Path) tmpDir + "/unpacked"; createDirs(unpacked); - if (hasSuffix(baseNameOf(uri), ".zip")) - runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked}); - else - // FIXME: this requires GNU tar for decompression. - runProgram("tar", true, {"xf", tmpFile, "-C", unpacked}); + unpackTarfile(tmpFile, unpacked); /* If the archive unpacks to a single file/directory, then use that as the top-level. */ @@ -217,17 +214,17 @@ static int _main(int argc, char * * argv) into the Nix store. */ storePath = store->addToStore(name, tmpFile, unpack, ht); - assert(storePath == store->makeFixedOutputPath(unpack, hash, name)); + assert(*storePath == store->makeFixedOutputPath(unpack, hash, name)); } stopProgressBar(); if (!printPath) - printInfo(format("path is '%1%'") % storePath); + printInfo("path is '%s'", store->printStorePath(*storePath)); std::cout << printHash16or32(hash) << std::endl; if (printPath) - std::cout << storePath << std::endl; + std::cout << store->printStorePath(*storePath) << std::endl; return 0; } diff --git a/src/nix-store/dotgraph.cc b/src/nix-store/dotgraph.cc index abdfa5e58..a9d0dbbed 100644 --- a/src/nix-store/dotgraph.cc +++ b/src/nix-store/dotgraph.cc @@ -10,9 +10,9 @@ using std::cout; namespace nix { -static string dotQuote(const string & s) +static string dotQuote(std::string_view s) { - return "\"" + s + "\""; + return "\"" + std::string(s) + "\""; } @@ -34,7 +34,7 @@ static string makeEdge(const string & src, const string & dst) } -static string makeNode(const string & id, const string & label, +static string makeNode(const string & id, std::string_view label, const string & colour) { format f = format("%1% [label = %2%, shape = box, " @@ -44,13 +44,6 @@ static string makeNode(const string & id, const string & label, } -static string symbolicName(const string & path) -{ - string p = baseNameOf(path); - return string(p, p.find('-') + 1); -} - - #if 0 string pathLabel(const Path & nePath, const string & elemPath) { @@ -71,9 +64,7 @@ void printClosure(const Path & nePath, const StoreExpr & fs) Path path = *(workList.begin()); workList.erase(path); - if (doneSet.find(path) == doneSet.end()) { - doneSet.insert(path); - + if (doneSet.insert(path).second) { ClosureElems::const_iterator elem = fs.closure.elems.find(path); if (elem == fs.closure.elems.end()) throw Error(format("bad closure, missing path '%1%'") % path); @@ -93,26 +84,24 @@ void printClosure(const Path & nePath, const StoreExpr & fs) #endif -void printDotGraph(ref store, const PathSet & roots) +void printDotGraph(ref store, StorePathSet && roots) { - PathSet workList(roots); - PathSet doneSet; + StorePathSet workList(std::move(roots)); + StorePathSet doneSet; cout << "digraph G {\n"; while (!workList.empty()) { - Path path = *(workList.begin()); - workList.erase(path); + auto path = std::move(workList.extract(workList.begin()).value()); - if (doneSet.find(path) != doneSet.end()) continue; - doneSet.insert(path); + if (!doneSet.insert(path.clone()).second) continue; - cout << makeNode(path, symbolicName(path), "#ff0000"); + cout << makeNode(std::string(path.to_string()), path.name(), "#ff0000"); for (auto & p : store->queryPathInfo(path)->references) { if (p != path) { - workList.insert(p); - cout << makeEdge(p, path); + workList.insert(p.clone()); + cout << makeEdge(std::string(p.to_string()), std::string(p.to_string())); } } diff --git a/src/nix-store/dotgraph.hh b/src/nix-store/dotgraph.hh index e2b5fc72f..73b8d06b9 100644 --- a/src/nix-store/dotgraph.hh +++ b/src/nix-store/dotgraph.hh @@ -1,11 +1,9 @@ #pragma once -#include "types.hh" +#include "store-api.hh" namespace nix { -class Store; - -void printDotGraph(ref store, const PathSet & roots); +void printDotGraph(ref store, StorePathSet && roots); } diff --git a/src/nix-store/graphml.cc b/src/nix-store/graphml.cc index 670fbe227..347708851 100644 --- a/src/nix-store/graphml.cc +++ b/src/nix-store/graphml.cc @@ -11,7 +11,7 @@ using std::cout; namespace nix { -static inline const string & xmlQuote(const string & s) +static inline std::string_view xmlQuote(std::string_view s) { // Luckily, store paths shouldn't contain any character that needs to be // quoted. @@ -19,14 +19,13 @@ static inline const string & xmlQuote(const string & s) } -static string symbolicName(const string & path) +static string symbolicName(const std::string & p) { - string p = baseNameOf(path); return string(p, p.find('-') + 1); } -static string makeEdge(const string & src, const string & dst) +static string makeEdge(std::string_view src, std::string_view dst) { return fmt(" \n", xmlQuote(src), xmlQuote(dst)); @@ -41,18 +40,18 @@ static string makeNode(const ValidPathInfo & info) " %3%\n" " %4%\n" " \n", - info.path, + info.path.to_string(), info.narSize, - symbolicName(info.path), - (isDerivation(info.path) ? "derivation" : "output-path")); + symbolicName(std::string(info.path.name())), + (info.path.isDerivation() ? "derivation" : "output-path")); } -void printGraphML(ref store, const PathSet & roots) +void printGraphML(ref store, StorePathSet && roots) { - PathSet workList(roots); - PathSet doneSet; - std::pair ret; + StorePathSet workList(std::move(roots)); + StorePathSet doneSet; + std::pair ret; cout << "\n" << " store, const PathSet & roots) << "\n"; while (!workList.empty()) { - Path path = *(workList.begin()); - workList.erase(path); + auto path = std::move(workList.extract(workList.begin()).value()); - ret = doneSet.insert(path); + ret = doneSet.insert(path.clone()); if (ret.second == false) continue; - ValidPathInfo info = *(store->queryPathInfo(path)); - cout << makeNode(info); + auto info = store->queryPathInfo(path); + cout << makeNode(*info); - for (auto & p : store->queryPathInfo(path)->references) { + for (auto & p : info->references) { if (p != path) { - workList.insert(p); - cout << makeEdge(path, p); + workList.insert(p.clone()); + cout << makeEdge(path.to_string(), p.to_string()); } } diff --git a/src/nix-store/graphml.hh b/src/nix-store/graphml.hh index b78df1e49..78be8a367 100644 --- a/src/nix-store/graphml.hh +++ b/src/nix-store/graphml.hh @@ -1,11 +1,9 @@ #pragma once -#include "types.hh" +#include "store-api.hh" namespace nix { -class Store; - -void printGraphML(ref store, const PathSet & roots); +void printGraphML(ref store, StorePathSet && roots); } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index f324056bb..45e152c47 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -47,38 +47,37 @@ ref ensureLocalStore() } -static Path useDeriver(Path path) +static StorePath useDeriver(const StorePath & path) { - if (isDerivation(path)) return path; - Path drvPath = store->queryPathInfo(path)->deriver; - if (drvPath == "") - throw Error(format("deriver of path '%1%' is not known") % path); - return drvPath; + if (path.isDerivation()) return path.clone(); + auto info = store->queryPathInfo(path); + if (!info->deriver) + throw Error("deriver of path '%s' is not known", store->printStorePath(path)); + return info->deriver->clone(); } /* Realise the given path. For a derivation that means build it; for other paths it means ensure their validity. */ -static PathSet realisePath(Path path, bool build = true) +static PathSet realisePath(StorePathWithOutputs path, bool build = true) { - DrvPathWithOutputs p = parseDrvPathWithOutputs(path); - auto store2 = std::dynamic_pointer_cast(store); - if (isDerivation(p.first)) { + if (path.path.isDerivation()) { if (build) store->buildPaths({path}); - Derivation drv = store->derivationFromPath(p.first); + Derivation drv = store->derivationFromPath(path.path); rootNr++; - if (p.second.empty()) - for (auto & i : drv.outputs) p.second.insert(i.first); + if (path.outputs.empty()) + for (auto & i : drv.outputs) path.outputs.insert(i.first); PathSet outputs; - for (auto & j : p.second) { + for (auto & j : path.outputs) { DerivationOutputs::iterator i = drv.outputs.find(j); if (i == drv.outputs.end()) - throw Error(format("derivation '%1%' does not have an output named '%2%'") % p.first % j); - Path outPath = i->second.path; + throw Error("derivation '%s' does not have an output named '%s'", + store2->printStorePath(path.path), j); + auto outPath = store2->printStorePath(i->second.path); if (store2) { if (gcRoot == "") printGCWarning(); @@ -86,7 +85,7 @@ static PathSet realisePath(Path path, bool build = true) Path rootName = gcRoot; if (rootNr > 1) rootName += "-" + std::to_string(rootNr); if (i->first != "out") rootName += "-" + i->first; - outPath = store2->addPermRoot(outPath, rootName, indirectRoot); + outPath = store2->addPermRoot(store->parseStorePath(outPath), rootName, indirectRoot); } } outputs.insert(outPath); @@ -95,8 +94,9 @@ static PathSet realisePath(Path path, bool build = true) } else { - if (build) store->ensurePath(path); - else if (!store->isValidPath(path)) throw Error(format("path '%1%' does not exist and cannot be created") % path); + if (build) store->ensurePath(path.path); + else if (!store->isValidPath(path.path)) + throw Error("path '%s' does not exist and cannot be created", store->printStorePath(path.path)); if (store2) { if (gcRoot == "") printGCWarning(); @@ -104,10 +104,10 @@ static PathSet realisePath(Path path, bool build = true) Path rootName = gcRoot; rootNr++; if (rootNr > 1) rootName += "-" + std::to_string(rootNr); - path = store2->addPermRoot(path, rootName, indirectRoot); + return {store2->addPermRoot(path.path, rootName, indirectRoot)}; } } - return {path}; + return {store->printStorePath(path.path)}; } } @@ -126,23 +126,20 @@ static void opRealise(Strings opFlags, Strings opArgs) else if (i == "--ignore-unknown") ignoreUnknown = true; else throw UsageError(format("unknown flag '%1%'") % i); - Paths paths; - for (auto & i : opArgs) { - DrvPathWithOutputs p = parseDrvPathWithOutputs(i); - paths.push_back(makeDrvPathWithOutputs(store->followLinksToStorePath(p.first), p.second)); - } + std::vector paths; + for (auto & i : opArgs) + paths.push_back(store->followLinksToStorePathWithOutputs(i)); unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; - store->queryMissing(PathSet(paths.begin(), paths.end()), - willBuild, willSubstitute, unknown, downloadSize, narSize); + StorePathSet willBuild, willSubstitute, unknown; + store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize); if (ignoreUnknown) { - Paths paths2; + std::vector paths2; for (auto & i : paths) - if (unknown.find(i) == unknown.end()) paths2.push_back(i); - paths = paths2; - unknown = PathSet(); + if (!unknown.count(i.path)) paths2.push_back(i); + paths = std::move(paths2); + unknown = StorePathSet(); } if (settings.printMissing) @@ -151,14 +148,14 @@ static void opRealise(Strings opFlags, Strings opArgs) if (dryRun) return; /* Build all paths at the same time to exploit parallelism. */ - store->buildPaths(PathSet(paths.begin(), paths.end()), buildMode); + store->buildPaths(paths, buildMode); if (!ignoreUnknown) for (auto & i : paths) { - PathSet paths = realisePath(i, false); + auto paths2 = realisePath(i, false); if (!noOutput) - for (auto & j : paths) - cout << format("%1%\n") % j; + for (auto & j : paths2) + cout << fmt("%1%\n", j); } } @@ -169,7 +166,7 @@ static void opAdd(Strings opFlags, Strings opArgs) if (!opFlags.empty()) throw UsageError("unknown flag"); for (auto & i : opArgs) - cout << format("%1%\n") % store->addToStore(baseNameOf(i), i); + cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i))); } @@ -190,7 +187,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs) opArgs.pop_front(); for (auto & i : opArgs) - cout << format("%1%\n") % store->addToStore(baseNameOf(i), i, recursive, hashAlgo); + cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i, recursive, hashAlgo))); } @@ -211,22 +208,21 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs) string hash = *i++; string name = *i++; - cout << format("%1%\n") % - store->makeFixedOutputPath(recursive, Hash(hash, hashAlgo), name); + cout << fmt("%s\n", store->printStorePath(store->makeFixedOutputPath(recursive, Hash(hash, hashAlgo), name))); } -static PathSet maybeUseOutputs(const Path & storePath, bool useOutput, bool forceRealise) +static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput, bool forceRealise) { if (forceRealise) realisePath(storePath); - if (useOutput && isDerivation(storePath)) { - Derivation drv = store->derivationFromPath(storePath); - PathSet outputs; + if (useOutput && storePath.isDerivation()) { + auto drv = store->derivationFromPath(storePath); + StorePathSet outputs; for (auto & i : drv.outputs) - outputs.insert(i.second.path); + outputs.insert(i.second.path.clone()); return outputs; } - else return {storePath}; + else return singleton(storePath.clone()); } @@ -239,24 +235,23 @@ const string treeLine = "| "; const string treeNull = " "; -static void printTree(const Path & path, - const string & firstPad, const string & tailPad, PathSet & done) +static void printTree(const StorePath & path, + const string & firstPad, const string & tailPad, StorePathSet & done) { - if (done.find(path) != done.end()) { - cout << format("%1%%2% [...]\n") % firstPad % path; + if (!done.insert(path.clone()).second) { + cout << fmt("%s%s [...]\n", firstPad, store->printStorePath(path)); return; } - done.insert(path); - cout << format("%1%%2%\n") % firstPad % path; + cout << fmt("%s%s\n", firstPad, store->printStorePath(path)); - auto references = store->queryPathInfo(path)->references; + auto info = store->queryPathInfo(path); /* Topologically sort under the relation A < B iff A \in closure(B). That is, if derivation A is an (possibly indirect) input of B, then A is printed first. This has the effect of flattening the tree, preventing deeply nested structures. */ - Paths sorted = store->topoSortPaths(references); + auto sorted = store->topoSortPaths(info->references); reverse(sorted.begin(), sorted.end()); for (auto i = sorted.begin(); i != sorted.end(); ++i) { @@ -319,11 +314,11 @@ static void opQuery(Strings opFlags, Strings opArgs) case qOutputs: { for (auto & i : opArgs) { - i = store->followLinksToStorePath(i); - if (forceRealise) realisePath(i); - Derivation drv = store->derivationFromPath(i); + auto i2 = store->followLinksToStorePath(i); + if (forceRealise) realisePath(i2); + Derivation drv = store->derivationFromPath(i2); for (auto & j : drv.outputs) - cout << format("%1%\n") % j.second.path; + cout << fmt("%1%\n", store->printStorePath(j.second.path)); } break; } @@ -332,51 +327,54 @@ static void opQuery(Strings opFlags, Strings opArgs) case qReferences: case qReferrers: case qReferrersClosure: { - PathSet paths; + StorePathSet paths; for (auto & i : opArgs) { - PathSet ps = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); + auto ps = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); for (auto & j : ps) { if (query == qRequisites) store->computeFSClosure(j, paths, false, includeOutputs); else if (query == qReferences) { for (auto & p : store->queryPathInfo(j)->references) - paths.insert(p); + paths.insert(p.clone()); + } + else if (query == qReferrers) { + StorePathSet tmp; + store->queryReferrers(j, tmp); + for (auto & i : tmp) + paths.insert(i.clone()); } - else if (query == qReferrers) store->queryReferrers(j, paths); else if (query == qReferrersClosure) store->computeFSClosure(j, paths, true); } } - Paths sorted = store->topoSortPaths(paths); - for (Paths::reverse_iterator i = sorted.rbegin(); + auto sorted = store->topoSortPaths(paths); + for (StorePaths::reverse_iterator i = sorted.rbegin(); i != sorted.rend(); ++i) - cout << format("%s\n") % *i; + cout << fmt("%s\n", store->printStorePath(*i)); break; } case qDeriver: for (auto & i : opArgs) { - Path deriver = store->queryPathInfo(store->followLinksToStorePath(i))->deriver; - cout << format("%1%\n") % - (deriver == "" ? "unknown-deriver" : deriver); + auto info = store->queryPathInfo(store->followLinksToStorePath(i)); + cout << fmt("%s\n", info->deriver ? store->printStorePath(*info->deriver) : "unknown-deriver"); } break; case qBinding: for (auto & i : opArgs) { - Path path = useDeriver(store->followLinksToStorePath(i)); + auto path = useDeriver(store->followLinksToStorePath(i)); Derivation drv = store->derivationFromPath(path); StringPairs::iterator j = drv.env.find(bindingName); if (j == drv.env.end()) - throw Error(format("derivation '%1%' has no environment binding named '%2%'") - % path % bindingName); - cout << format("%1%\n") % j->second; + throw Error("derivation '%s' has no environment binding named '%s'", + store->printStorePath(path), bindingName); + cout << fmt("%s\n", j->second); } break; case qHash: case qSize: for (auto & i : opArgs) { - PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); - for (auto & j : paths) { + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { auto info = store->queryPathInfo(j); if (query == qHash) { assert(info->narHash.type == htSHA256); @@ -388,50 +386,51 @@ static void opQuery(Strings opFlags, Strings opArgs) break; case qTree: { - PathSet done; + StorePathSet done; for (auto & i : opArgs) printTree(store->followLinksToStorePath(i), "", "", done); break; } case qGraph: { - PathSet roots; - for (auto & i : opArgs) { - PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); - roots.insert(paths.begin(), paths.end()); - } - printDotGraph(ref(store), roots); + StorePathSet roots; + for (auto & i : opArgs) + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + roots.insert(j.clone()); + printDotGraph(ref(store), std::move(roots)); break; } case qGraphML: { - PathSet roots; - for (auto & i : opArgs) { - PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); - roots.insert(paths.begin(), paths.end()); - } - printGraphML(ref(store), roots); + StorePathSet roots; + for (auto & i : opArgs) + for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + roots.insert(j.clone()); + printGraphML(ref(store), std::move(roots)); break; } case qResolve: { for (auto & i : opArgs) - cout << format("%1%\n") % store->followLinksToStorePath(i); + cout << fmt("%s\n", store->printStorePath(store->followLinksToStorePath(i))); break; } case qRoots: { - PathSet referrers; - for (auto & i : opArgs) { - store->computeFSClosure( - maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise), - referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations); - } + StorePathSet args; + for (auto & i : opArgs) + for (auto & p : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) + args.insert(p.clone()); + + StorePathSet referrers; + store->computeFSClosure( + args, referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations); + Roots roots = store->findRoots(false); for (auto & [target, links] : roots) if (referrers.find(target) != referrers.end()) for (auto & link : links) - cout << format("%1% -> %2%\n") % link % target; + cout << fmt("%1% -> %2%\n", link, store->printStorePath(target)); break; } @@ -447,7 +446,7 @@ static void opPrintEnv(Strings opFlags, Strings opArgs) if (opArgs.size() != 1) throw UsageError("'--print-env' requires one derivation store path"); Path drvPath = opArgs.front(); - Derivation drv = store->derivationFromPath(drvPath); + Derivation drv = store->derivationFromPath(store->parseStorePath(drvPath)); /* Print each environment variable in the derivation in a format that can be sourced by the shell. */ @@ -477,7 +476,7 @@ static void opReadLog(Strings opFlags, Strings opArgs) auto path = store->followLinksToStorePath(i); auto log = store->getBuildLog(path); if (!log) - throw Error("build log of derivation '%s' is not available", path); + throw Error("build log of derivation '%s' is not available", store->printStorePath(path)); std::cout << *log; } } @@ -488,13 +487,10 @@ static void opDumpDB(Strings opFlags, Strings opArgs) if (!opFlags.empty()) throw UsageError("unknown flag"); if (!opArgs.empty()) { for (auto & i : opArgs) - i = store->followLinksToStorePath(i); - for (auto & i : opArgs) - cout << store->makeValidityRegistration({i}, true, true); + cout << store->makeValidityRegistration(singleton(store->followLinksToStorePath(i)), true, true); } else { - PathSet validPaths = store->queryAllValidPaths(); - for (auto & i : validPaths) - cout << store->makeValidityRegistration({i}, true, true); + for (auto & i : store->queryAllValidPaths()) + cout << store->makeValidityRegistration(singleton(i), true, true); } } @@ -504,18 +500,18 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) ValidPathInfos infos; while (1) { - ValidPathInfo info = decodeValidPathInfo(cin, hashGiven); - if (info.path == "") break; - if (!store->isValidPath(info.path) || reregister) { + auto info = decodeValidPathInfo(*store, cin, hashGiven); + if (!info) break; + if (!store->isValidPath(info->path) || reregister) { /* !!! races */ if (canonicalise) - canonicalisePathMetaData(info.path, -1); + canonicalisePathMetaData(store->printStorePath(info->path), -1); if (!hashGiven) { - HashResult hash = hashPath(htSHA256, info.path); - info.narHash = hash.first; - info.narSize = hash.second; + HashResult hash = hashPath(htSHA256, store->printStorePath(info->path)); + info->narHash = hash.first; + info->narSize = hash.second; } - infos.push_back(info); + infos.push_back(std::move(*info)); } } @@ -557,12 +553,12 @@ static void opCheckValidity(Strings opFlags, Strings opArgs) else throw UsageError(format("unknown flag '%1%'") % i); for (auto & i : opArgs) { - Path path = store->followLinksToStorePath(i); + auto path = store->followLinksToStorePath(i); if (!store->isValidPath(path)) { if (printInvalid) - cout << format("%1%\n") % path; + cout << fmt("%s\n", store->printStorePath(path)); else - throw Error(format("path '%1%' is not valid") % path); + throw Error("path '%s' is not valid", store->printStorePath(path)); } } } @@ -592,13 +588,13 @@ static void opGC(Strings opFlags, Strings opArgs) if (printRoots) { Roots roots = store->findRoots(false); - std::set> roots2; + std::set> roots2; // Transpose and sort the roots. for (auto & [target, links] : roots) for (auto & link : links) - roots2.emplace(link, target); + roots2.emplace(link, target.clone()); for (auto & [link, target] : roots2) - std::cout << link << " -> " << target << "\n"; + std::cout << link << " -> " << store->printStorePath(target) << "\n"; } else { @@ -633,8 +629,7 @@ static void opDelete(Strings opFlags, Strings opArgs) } -/* Dump a path as a Nix archive. The archive is written to standard - output. */ +/* Dump a path as a Nix archive. The archive is written to stdout */ static void opDump(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); @@ -647,8 +642,7 @@ static void opDump(Strings opFlags, Strings opArgs) } -/* Restore a value from a Nix archive. The archive is read from - standard input. */ +/* Restore a value from a Nix archive. The archive is read from stdin. */ static void opRestore(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); @@ -664,11 +658,13 @@ static void opExport(Strings opFlags, Strings opArgs) for (auto & i : opFlags) throw UsageError(format("unknown flag '%1%'") % i); + StorePathSet paths; + for (auto & i : opArgs) - i = store->followLinksToStorePath(i); + paths.insert(store->followLinksToStorePath(i)); FdSink sink(STDOUT_FILENO); - store->exportPaths(opArgs, sink); + store->exportPaths(paths, sink); sink.flush(); } @@ -681,10 +677,10 @@ static void opImport(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); - Paths paths = store->importPaths(source, nullptr, NoCheckSigs); + auto paths = store->importPaths(source, nullptr, NoCheckSigs); for (auto & i : paths) - cout << format("%1%\n") % i << std::flush; + cout << fmt("%s\n", store->printStorePath(i)) << std::flush; } @@ -729,16 +725,16 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) int status = 0; for (auto & i : opArgs) { - Path path = store->followLinksToStorePath(i); - printMsg(lvlTalkative, format("checking path '%1%'...") % path); + auto path = store->followLinksToStorePath(i); + printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); auto info = store->queryPathInfo(path); HashSink sink(info->narHash.type); store->narFromPath(path, sink); auto current = sink.finish(); if (current.first != info->narHash) { printError( - format("path '%1%' was modified! expected hash '%2%', got '%3%'") - % path % info->narHash.to_string() % current.first.to_string()); + "path '%s' was modified! expected hash '%s', got '%s'", + store->printStorePath(path), info->narHash.to_string(), current.first.to_string()); status = 1; } } @@ -754,10 +750,8 @@ static void opRepairPath(Strings opFlags, Strings opArgs) if (!opFlags.empty()) throw UsageError("no flags expected"); - for (auto & i : opArgs) { - Path path = store->followLinksToStorePath(i); - ensureLocalStore()->repairPath(path); - } + for (auto & i : opArgs) + ensureLocalStore()->repairPath(store->followLinksToStorePath(i)); } /* Optimise the disk space usage of the Nix store by hard-linking @@ -821,7 +815,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - PathSet paths = readStorePaths(*store, in); + auto paths = readStorePaths(*store, in); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -831,34 +825,39 @@ static void opServe(Strings opFlags, Strings opArgs) flag. */ if (substitute && writeAllowed) { /* Filter out .drv files (we don't want to build anything). */ - PathSet paths2; + std::vector paths2; for (auto & path : paths) - if (!isDerivation(path)) paths2.insert(path); + if (!path.isDerivation()) + paths2.emplace_back(path.clone()); unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; - store->queryMissing(PathSet(paths2.begin(), paths2.end()), + StorePathSet willBuild, willSubstitute, unknown; + store->queryMissing(paths2, willBuild, willSubstitute, unknown, downloadSize, narSize); /* FIXME: should use ensurePath(), but it only does one path at a time. */ if (!willSubstitute.empty()) try { - store->buildPaths(willSubstitute); + std::vector subs; + for (auto & p : willSubstitute) subs.emplace_back(p.clone()); + store->buildPaths(subs); } catch (Error & e) { printError(format("warning: %1%") % e.msg()); } } - out << store->queryValidPaths(paths); + writeStorePaths(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - PathSet paths = readStorePaths(*store, in); + auto paths = readStorePaths(*store, in); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); - out << info->path << info->deriver << info->references; + out << store->printStorePath(info->path) + << (info->deriver ? store->printStorePath(*info->deriver) : ""); + writeStorePaths(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -872,7 +871,7 @@ static void opServe(Strings opFlags, Strings opArgs) } case cmdDumpStorePath: - store->narFromPath(readStorePath(*store, in), out); + store->narFromPath(store->parseStorePath(readString(in)), out); break; case cmdImportPaths: { @@ -884,14 +883,17 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(readStorePaths(*store, in), out); + store->exportPaths(readStorePaths(*store, in), out); break; } case cmdBuildPaths: { if (!writeAllowed) throw Error("building paths is not allowed"); - PathSet paths = readStorePaths(*store, in); + + std::vector paths; + for (auto & s : readStrings(in)) + paths.emplace_back(store->parsePathWithOutputs(s)); getBuildSettings(); @@ -910,7 +912,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (!writeAllowed) throw Error("building paths is not allowed"); - Path drvPath = readStorePath(*store, in); // informational only + auto drvPath = store->parseStorePath(readString(in)); // informational only BasicDerivation drv; readDerivation(in, *store, drv); @@ -929,29 +931,36 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); - PathSet closure; - store->computeFSClosure(readStorePaths(*store, in), + StorePathSet closure; + store->computeFSClosure(readStorePaths(*store, in), closure, false, includeOutputs); - out << closure; + writeStorePaths(*store, out, closure); break; } case cmdAddToStoreNar: { if (!writeAllowed) throw Error("importing paths is not allowed"); - ValidPathInfo info; - info.path = readStorePath(*store, in); - in >> info.deriver; - if (!info.deriver.empty()) - store->assertStorePath(info.deriver); + auto path = readString(in); + ValidPathInfo info(store->parseStorePath(path)); + auto deriver = readString(in); + if (deriver != "") + info.deriver = store->parseStorePath(deriver); info.narHash = Hash(readString(in), htSHA256); - info.references = readStorePaths(*store, in); + info.references = readStorePaths(*store, in); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); in >> info.ca; - // FIXME: race if addToStore doesn't read source? - store->addToStore(info, in, NoRepair, NoCheckSigs); + if (info.narSize == 0) + throw Error("narInfo is too old and missing the narSize field"); + + SizedSource sizedSource(in, info.narSize); + + store->addToStore(info, sizedSource, NoRepair, NoCheckSigs); + + // consume all the data that has been sent before continuing. + sizedSource.drainAll(); out << 1; // indicate success diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index e86b96e3f..139db3657 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -22,11 +22,6 @@ struct CmdAddToStore : MixDryRun, StoreCommand .dest(&namePart); } - std::string name() override - { - return "add-to-store"; - } - std::string description() override { return "add a path to the Nix store"; @@ -45,17 +40,18 @@ struct CmdAddToStore : MixDryRun, StoreCommand StringSink sink; dumpPath(path, sink); - ValidPathInfo info; - info.narHash = hashString(htSHA256, *sink.s); + auto narHash = hashString(htSHA256, *sink.s); + + ValidPathInfo info(store->makeFixedOutputPath(true, narHash, *namePart)); + info.narHash = narHash; info.narSize = sink.s->size(); - info.path = store->makeFixedOutputPath(true, info.narHash, *namePart); info.ca = makeFixedOutputCA(true, info.narHash); if (!dryRun) store->addToStore(info, sink.s); - std::cout << fmt("%s\n", info.path); + std::cout << fmt("%s\n", store->printStorePath(info.path)); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("add-to-store"); diff --git a/src/nix/build.cc b/src/nix/build.cc index b329ac38a..3c9d2df39 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -24,11 +24,6 @@ struct CmdBuild : MixDryRun, InstallablesCommand .set(&outLink, Path("")); } - std::string name() override - { - return "build"; - } - std::string description() override { return "build a derivation or fetch a store path"; @@ -69,4 +64,4 @@ struct CmdBuild : MixDryRun, InstallablesCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("build"); diff --git a/src/nix/cat.cc b/src/nix/cat.cc index a35f640d8..851f90abd 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -28,11 +28,6 @@ struct CmdCatStore : StoreCommand, MixCat expectArg("path", &path); } - std::string name() override - { - return "cat-store"; - } - std::string description() override { return "print the contents of a store file on stdout"; @@ -54,11 +49,6 @@ struct CmdCatNar : StoreCommand, MixCat expectArg("path", &path); } - std::string name() override - { - return "cat-nar"; - } - std::string description() override { return "print the contents of a file inside a NAR file"; @@ -70,5 +60,5 @@ struct CmdCatNar : StoreCommand, MixCat } }; -static RegisterCommand r1(make_ref()); -static RegisterCommand r2(make_ref()); +static auto r1 = registerCommand("cat-store"); +static auto r2 = registerCommand("cat-nar"); diff --git a/src/nix/command.cc b/src/nix/command.cc index 3d7d582d6..442bc6c53 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -1,82 +1,11 @@ #include "command.hh" #include "store-api.hh" #include "derivations.hh" +#include "nixexpr.hh" namespace nix { -Commands * RegisterCommand::commands = 0; - -void Command::printHelp(const string & programName, std::ostream & out) -{ - Args::printHelp(programName, out); - - auto exs = examples(); - if (!exs.empty()) { - out << "\n"; - out << "Examples:\n"; - for (auto & ex : exs) - out << "\n" - << " " << ex.description << "\n" // FIXME: wrap - << " $ " << ex.command << "\n"; - } -} - -MultiCommand::MultiCommand(const Commands & _commands) - : commands(_commands) -{ - expectedArgs.push_back(ExpectedArg{"command", 1, true, [=](std::vector ss) { - assert(!command); - auto i = commands.find(ss[0]); - if (i == commands.end()) - throw UsageError("'%s' is not a recognised command", ss[0]); - command = i->second; - }}); -} - -void MultiCommand::printHelp(const string & programName, std::ostream & out) -{ - if (command) { - command->printHelp(programName + " " + command->name(), out); - return; - } - - out << "Usage: " << programName << " ... ...\n"; - - out << "\n"; - out << "Common flags:\n"; - printFlags(out); - - out << "\n"; - out << "Available commands:\n"; - - Table2 table; - for (auto & command : commands) { - auto descr = command.second->description(); - if (!descr.empty()) - table.push_back(std::make_pair(command.second->name(), descr)); - } - printTable(out, table); - -#if 0 - out << "\n"; - out << "For full documentation, run 'man " << programName << "' or 'man " << programName << "-'.\n"; -#endif -} - -bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end) -{ - if (Args::processFlag(pos, end)) return true; - if (command && command->processFlag(pos, end)) return true; - return false; -} - -bool MultiCommand::processArgs(const Strings & args, bool finish) -{ - if (command) - return command->processArgs(args, finish); - else - return Args::processArgs(args, finish); -} +Commands * RegisterCommand::commands = nullptr; StoreCommand::StoreCommand() { @@ -119,28 +48,29 @@ StorePathsCommand::StorePathsCommand(bool recursive) void StorePathsCommand::run(ref store) { - Paths storePaths; + StorePaths storePaths; if (all) { if (installables.size()) throw UsageError("'--all' does not expect arguments"); for (auto & p : store->queryAllValidPaths()) - storePaths.push_back(p); + storePaths.push_back(p.clone()); } else { - for (auto & p : toStorePaths(store, NoBuild, installables)) - storePaths.push_back(p); + for (auto & p : toStorePaths(store, realiseMode, installables)) + storePaths.push_back(p.clone()); if (recursive) { - PathSet closure; - store->computeFSClosure(PathSet(storePaths.begin(), storePaths.end()), - closure, false, false); - storePaths = Paths(closure.begin(), closure.end()); + StorePathSet closure; + store->computeFSClosure(storePathsToSet(storePaths), closure, false, false); + storePaths.clear(); + for (auto & p : closure) + storePaths.push_back(p.clone()); } } - run(store, storePaths); + run(store, std::move(storePaths)); } void StorePathCommand::run(ref store) @@ -153,4 +83,17 @@ void StorePathCommand::run(ref store) run(store, *storePaths.begin()); } +Strings editorFor(const Pos & pos) +{ + auto editor = getEnv("EDITOR").value_or("cat"); + auto args = tokenizeString(editor); + if (pos.line > 0 && ( + editor.find("emacs") != std::string::npos || + editor.find("nano") != std::string::npos || + editor.find("vim") != std::string::npos)) + args.push_back(fmt("+%d", pos.line)); + args.push_back(pos.file); + return args; +} + } diff --git a/src/nix/command.hh b/src/nix/command.hh index 5e4b0f399..8f75e4289 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -3,6 +3,7 @@ #include "args.hh" #include "gc.hh" #include "common-eval-args.hh" +#include "path.hh" namespace nix { @@ -11,31 +12,10 @@ extern std::string programPath; struct Value; class Bindings; class EvalState; - -/* A command is an argument parser that can be executed by calling its - run() method. */ -struct Command : virtual Args -{ - virtual std::string name() = 0; - virtual void prepare() { }; - virtual void run() = 0; - - struct Example - { - std::string description; - std::string command; - }; - - typedef std::list Examples; - - virtual Examples examples() { return Examples(); } - - void printHelp(const string & programName, std::ostream & out) override; -}; - +struct Pos; class Store; -/* A command that require a Nix store. */ +/* A command that requires a Nix store. */ struct StoreCommand : virtual Command { StoreCommand(); @@ -50,14 +30,16 @@ private: struct Buildable { - Path drvPath; // may be empty - std::map outputs; + std::optional drvPath; + std::map outputs; }; typedef std::vector Buildables; struct Installable { + virtual ~Installable() { } + virtual std::string what() = 0; virtual Buildables toBuildables() @@ -118,6 +100,7 @@ private: std::vector _installables; }; +/* A command that operates on exactly one "installable" */ struct InstallableCommand : virtual Args, SourceExprCommand { std::shared_ptr installable; @@ -142,13 +125,17 @@ private: bool recursive = false; bool all = false; +protected: + + RealiseMode realiseMode = NoBuild; + public: StorePathsCommand(bool recursive = false); using StoreCommand::run; - virtual void run(ref store, Paths storePaths) = 0; + virtual void run(ref store, std::vector storePaths) = 0; void run(ref store) override; @@ -160,43 +147,30 @@ struct StorePathCommand : public InstallablesCommand { using StoreCommand::run; - virtual void run(ref store, const Path & storePath) = 0; + virtual void run(ref store, const StorePath & storePath) = 0; void run(ref store) override; }; -typedef std::map> Commands; - -/* An argument parser that supports multiple subcommands, - i.e. ‘ ’. */ -class MultiCommand : virtual Args -{ -public: - Commands commands; - - std::shared_ptr command; - - MultiCommand(const Commands & commands); - - void printHelp(const string & programName, std::ostream & out) override; - - bool processFlag(Strings::iterator & pos, Strings::iterator end) override; - - bool processArgs(const Strings & args, bool finish) override; -}; - /* A helper class for registering commands globally. */ struct RegisterCommand { static Commands * commands; - RegisterCommand(ref command) + RegisterCommand(const std::string & name, + std::function()> command) { if (!commands) commands = new Commands; - commands->emplace(command->name(), command); + commands->emplace(name, command); } }; +template +static RegisterCommand registerCommand(const std::string & name) +{ + return RegisterCommand(name, [](){ return make_ref(); }); +} + std::shared_ptr parseInstallable( SourceExprCommand & cmd, ref store, const std::string & installable, bool useDefaultInstallables); @@ -204,14 +178,18 @@ std::shared_ptr parseInstallable( Buildables build(ref store, RealiseMode mode, std::vector> installables); -PathSet toStorePaths(ref store, RealiseMode mode, +std::set toStorePaths(ref store, RealiseMode mode, std::vector> installables); -Path toStorePath(ref store, RealiseMode mode, +StorePath toStorePath(ref store, RealiseMode mode, std::shared_ptr installable); -PathSet toDerivations(ref store, +std::set toDerivations(ref store, std::vector> installables, bool useDeriver = false); +/* Helper function to generate args that invoke $EDITOR on + filename:lineno. */ +Strings editorFor(const Pos & pos); + } diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 96bd453d8..85c777d38 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -36,17 +36,12 @@ struct CmdCopy : StorePathsCommand .set(&checkSigs, NoCheckSigs); mkFlag() - .longName("substitute") + .longName("substitute-on-destination") .shortName('s') .description("whether to try substitutes on the destination store (only supported by SSH)") .set(&substitute, Substitute); } - std::string name() override - { - return "copy"; - } - std::string description() override { return "copy paths between Nix stores"; @@ -85,16 +80,16 @@ struct CmdCopy : StorePathsCommand return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri); } - void run(ref srcStore, Paths storePaths) override + void run(ref srcStore, StorePaths storePaths) override { if (srcUri.empty() && dstUri.empty()) throw UsageError("you must pass '--from' and/or '--to'"); ref dstStore = dstUri.empty() ? openStore() : openStore(dstUri); - copyPaths(srcStore, dstStore, PathSet(storePaths.begin(), storePaths.end()), + copyPaths(srcStore, dstStore, storePathsToSet(storePaths), NoRepair, checkSigs, substitute); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("copy"); diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 7b5444619..0aa634d6e 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -1,11 +1,17 @@ +#include + #include "command.hh" +#include "logging.hh" #include "serve-protocol.hh" #include "shared.hh" #include "store-api.hh" +#include "util.hh" #include "worker-protocol.hh" using namespace nix; +namespace { + std::string formatProtocol(unsigned int proto) { if (proto) { @@ -16,24 +22,30 @@ std::string formatProtocol(unsigned int proto) return "unknown"; } +bool checkPass(const std::string & msg) { + logger->log(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg); + return true; +} + +bool checkFail(const std::string & msg) { + logger->log(ANSI_RED "[FAIL] " ANSI_NORMAL + msg); + return false; +} + +} + struct CmdDoctor : StoreCommand { bool success = true; - std::string name() override - { - return "doctor"; - } - std::string description() override { - return "check your system for potential problems"; + return "check your system for potential problems and print a PASS or FAIL for each check."; } void run(ref store) override { - std::cout << "Store uri: " << store->getUri() << std::endl; - std::cout << std::endl; + logger->log("Running checks against store uri: " + store->getUri()); auto type = getStoreType(); @@ -51,27 +63,26 @@ struct CmdDoctor : StoreCommand { PathSet dirs; - for (auto & dir : tokenizeString(getEnv("PATH"), ":")) + for (auto & dir : tokenizeString(getEnv("PATH").value_or(""), ":")) if (pathExists(dir + "/nix-env")) dirs.insert(dirOf(canonPath(dir + "/nix-env", true))); if (dirs.size() != 1) { - std::cout << "Warning: multiple versions of nix found in PATH." << std::endl; - std::cout << std::endl; + std::stringstream ss; + ss << "Multiple versions of nix found in PATH:\n"; for (auto & dir : dirs) - std::cout << " " << dir << std::endl; - std::cout << std::endl; - return false; + ss << " " << dir << "\n"; + return checkFail(ss.str()); } - return true; + return checkPass("PATH contains only one nix version."); } bool checkProfileRoots(ref store) { PathSet dirs; - for (auto & dir : tokenizeString(getEnv("PATH"), ":")) { + for (auto & dir : tokenizeString(getEnv("PATH").value_or(""), ":")) { Path profileDir = dirOf(dir); try { Path userEnv = canonPath(profileDir, true); @@ -87,17 +98,17 @@ struct CmdDoctor : StoreCommand } if (!dirs.empty()) { - std::cout << "Warning: found profiles outside of " << settings.nixStateDir << "/profiles." << std::endl; - std::cout << "The generation this profile points to might not have a gcroot and could be" << std::endl; - std::cout << "garbage collected, resulting in broken symlinks." << std::endl; - std::cout << std::endl; + std::stringstream ss; + ss << "Found profiles outside of " << settings.nixStateDir << "/profiles.\n" + << "The generation this profile points to might not have a gcroot and could be\n" + << "garbage collected, resulting in broken symlinks.\n\n"; for (auto & dir : dirs) - std::cout << " " << dir << std::endl; - std::cout << std::endl; - return false; + ss << " " << dir << "\n"; + ss << "\n"; + return checkFail(ss.str()); } - return true; + return checkPass("All profiles are gcroots."); } bool checkStoreProtocol(unsigned int storeProto) @@ -107,18 +118,17 @@ struct CmdDoctor : StoreCommand : PROTOCOL_VERSION; if (clientProto != storeProto) { - std::cout << "Warning: protocol version of this client does not match the store." << std::endl; - std::cout << "While this is not necessarily a problem it's recommended to keep the client in" << std::endl; - std::cout << "sync with the daemon." << std::endl; - std::cout << std::endl; - std::cout << "Client protocol: " << formatProtocol(clientProto) << std::endl; - std::cout << "Store protocol: " << formatProtocol(storeProto) << std::endl; - std::cout << std::endl; - return false; + std::stringstream ss; + ss << "Warning: protocol version of this client does not match the store.\n" + << "While this is not necessarily a problem it's recommended to keep the client in\n" + << "sync with the daemon.\n\n" + << "Client protocol: " << formatProtocol(clientProto) << "\n" + << "Store protocol: " << formatProtocol(storeProto) << "\n\n"; + return checkFail(ss.str()); } - return true; + return checkPass("Client protocol matches store protocol."); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("doctor"); diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index f411c0cb7..bb741b572 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -5,11 +5,6 @@ using namespace nix; struct CmdDumpPath : StorePathCommand { - std::string name() override - { - return "dump-path"; - } - std::string description() override { return "dump a store path to stdout (in NAR format)"; @@ -25,7 +20,7 @@ struct CmdDumpPath : StorePathCommand }; } - void run(ref store, const Path & storePath) override + void run(ref store, const StorePath & storePath) override { FdSink sink(STDOUT_FILENO); store->narFromPath(storePath, sink); @@ -33,4 +28,4 @@ struct CmdDumpPath : StorePathCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("dump-path"); diff --git a/src/nix/edit.cc b/src/nix/edit.cc index a347a5597..ca410cd1f 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -10,11 +10,6 @@ using namespace nix; struct CmdEdit : InstallableCommand { - std::string name() override - { - return "edit"; - } - std::string description() override { return "open the Nix expression of a Nix package in $EDITOR"; @@ -36,46 +31,18 @@ struct CmdEdit : InstallableCommand auto v = installable->toValue(*state); - Ptr v2; - try { - auto dummyArgs = Bindings::allocBindings(0); - v2 = findAlongAttrPath(*state, "meta.position", dummyArgs, *v); - } catch (Error &) { - throw Error("package '%s' has no source location information", installable->what()); - } - - auto pos = state->forceString(*v2); - debug("position is %s", pos); - - auto colon = pos.rfind(':'); - if (colon == std::string::npos) - throw Error("cannot parse meta.position attribute '%s'", pos); - - std::string filename(pos, 0, colon); - int lineno; - try { - lineno = std::stoi(std::string(pos, colon + 1)); - } catch (std::invalid_argument e) { - throw Error("cannot parse line number '%s'", pos); - } - - auto editor = getEnv("EDITOR", "cat"); - - auto args = tokenizeString(editor); - - if (editor.find("emacs") != std::string::npos || - editor.find("nano") != std::string::npos || - editor.find("vim") != std::string::npos) - args.push_back(fmt("+%d", lineno)); - - args.push_back(filename); + Pos pos = findDerivationFilename(*state, *v, installable->what()); stopProgressBar(); + auto args = editorFor(pos); + execvp(args.front().c_str(), stringsToCharPtrs(args).data()); - throw SysError("cannot run editor '%s'", editor); + std::string command; + for (const auto &arg : args) command += " '" + arg + "'"; + throw SysError("cannot run command%s", command); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("edit"); diff --git a/src/nix/eval.cc b/src/nix/eval.cc index b7058361c..276fdf003 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -18,11 +18,6 @@ struct CmdEval : MixJSON, InstallableCommand mkFlag(0, "raw", "print strings unquoted", &raw); } - std::string name() override - { - return "eval"; - } - std::string description() override { return "evaluate a Nix expression"; @@ -41,7 +36,7 @@ struct CmdEval : MixJSON, InstallableCommand }, Example{ "To get the current version of Nixpkgs:", - "nix eval --raw nixpkgs.lib.nixpkgsVersion" + "nix eval --raw nixpkgs.lib.version" }, Example{ "To print the store path of the Hello package:", @@ -74,4 +69,4 @@ struct CmdEval : MixJSON, InstallableCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("eval"); diff --git a/src/nix/hash.cc b/src/nix/hash.cc index af4105e28..0cc523f50 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -2,6 +2,8 @@ #include "hash.hh" #include "legacy.hh" #include "shared.hh" +#include "references.hh" +#include "archive.hh" using namespace nix; @@ -13,6 +15,7 @@ struct CmdHash : Command bool truncate = false; HashType ht = htSHA256; std::vector paths; + std::optional modulus; CmdHash(Mode mode) : mode(mode) { @@ -23,14 +26,16 @@ struct CmdHash : Command mkFlag() .longName("type") .mkHashTypeFlag(&ht); + #if 0 + mkFlag() + .longName("modulo") + .description("compute hash modulo specified string") + .labels({"modulus"}) + .dest(&modulus); + #endif expectArgs("paths", &paths); } - std::string name() override - { - return mode == mFile ? "hash-file" : "hash-path"; - } - std::string description() override { return mode == mFile @@ -41,7 +46,19 @@ struct CmdHash : Command void run() override { for (auto path : paths) { - Hash h = mode == mFile ? hashFile(ht, path) : hashPath(ht, path).first; + + std::unique_ptr hashSink; + if (modulus) + hashSink = std::make_unique(ht, *modulus); + else + hashSink = std::make_unique(ht); + + if (mode == mFile) + readFile(path, *hashSink); + else + dumpPath(path, *hashSink); + + Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); std::cout << format("%1%\n") % h.to_string(base, base == SRI); @@ -49,8 +66,8 @@ struct CmdHash : Command } }; -static RegisterCommand r1(make_ref(CmdHash::mFile)); -static RegisterCommand r2(make_ref(CmdHash::mPath)); +static RegisterCommand r1("hash-file", [](){ return make_ref(CmdHash::mFile); }); +static RegisterCommand r2("hash-path", [](){ return make_ref(CmdHash::mPath); }); struct CmdToBase : Command { @@ -66,15 +83,6 @@ struct CmdToBase : Command expectArgs("strings", &args); } - std::string name() override - { - return - base == Base16 ? "to-base16" : - base == Base32 ? "to-base32" : - base == Base64 ? "to-base64" : - "to-sri"; - } - std::string description() override { return fmt("convert a hash to %s representation", @@ -91,10 +99,10 @@ struct CmdToBase : Command } }; -static RegisterCommand r3(make_ref(Base16)); -static RegisterCommand r4(make_ref(Base32)); -static RegisterCommand r5(make_ref(Base64)); -static RegisterCommand r6(make_ref(SRI)); +static RegisterCommand r3("to-base16", [](){ return make_ref(Base16); }); +static RegisterCommand r4("to-base32", [](){ return make_ref(Base32); }); +static RegisterCommand r5("to-base64", [](){ return make_ref(Base64); }); +static RegisterCommand r6("to-sri", [](){ return make_ref(SRI); }); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 424aacd41..e715e82ab 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -39,31 +39,32 @@ Ptr SourceExprCommand::getSourceExpr(EvalState & state) auto searchPath = state.getSearchPath(); - state.mkAttrs(*vSourceExpr, searchPath.size() + 1); + state.mkAttrs(*vSourceExpr, 1024); mkBool(*state.allocAttr(*vSourceExpr, sToplevel), true); std::unordered_set seen; - for (auto & i : searchPath) { - if (i.first == "") continue; - if (seen.count(i.first)) continue; - seen.insert(i.first); -#if 0 - auto res = state.resolveSearchPathElem(i); - if (!res.first) continue; - if (!pathExists(res.second)) continue; - mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(i.first)), - state.getBuiltin("import"), - mkString(*state.allocValue(), res.second)); -#endif + auto addEntry = [&](const std::string & name) { + if (name == "") return; + if (!seen.insert(name).second) return; auto v1 = state.allocValue(); mkPrimOpApp(*v1, state.getBuiltin("findFile"), state.getBuiltin("nixPath")); auto v2 = state.allocValue(); - mkApp(*v2, *v1, mkString(*state.allocValue(), i.first)); - mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(i.first)), + mkApp(*v2, *v1, mkString(state.allocValue(), name)); + mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(name)), state.getBuiltin("import"), *v2); - } + }; + + for (auto & i : searchPath) + /* Hack to handle channels. */ + if (i.first.empty() && pathExists(i.second + "/manifest.nix")) { + for (auto & j : readDirectory(i.second)) + if (j.name != "manifest.nix" + && pathExists(fmt("%s/%s/default.nix", i.second, j.name))) + addEntry(j.name); + } else + addEntry(i.first); vSourceExpr->attrs->sort(); } @@ -88,15 +89,25 @@ Buildable Installable::toBuildable() struct InstallableStorePath : Installable { - Path storePath; + ref store; + StorePath storePath; - InstallableStorePath(const Path & storePath) : storePath(storePath) { } + InstallableStorePath(ref store, const Path & storePath) + : store(store), storePath(store->parseStorePath(storePath)) { } - std::string what() override { return storePath; } + std::string what() override { return store->printStorePath(storePath); } Buildables toBuildables() override { - return {{isDerivation(storePath) ? storePath : "", {{"out", storePath}}}}; + std::map outputs; + outputs.insert_or_assign("out", storePath.clone()); + Buildable b{ + .drvPath = storePath.isDerivation() ? storePath.clone() : std::optional(), + .outputs = std::move(outputs) + }; + Buildables bs; + bs.push_back(std::move(b)); + return bs; } }; @@ -119,17 +130,17 @@ struct InstallableValue : Installable Buildables res; - PathSet drvPaths; + StorePathSet drvPaths; for (auto & drv : drvs) { - Buildable b{drv.queryDrvPath()}; - drvPaths.insert(b.drvPath); + Buildable b{.drvPath = state->store->parseStorePath(drv.queryDrvPath())}; + drvPaths.insert(b.drvPath->clone()); auto outputName = drv.queryOutputName(); if (outputName == "") - throw Error("derivation '%s' lacks an 'outputName' attribute", b.drvPath); + throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(*b.drvPath)); - b.outputs.emplace(outputName, drv.queryOutPath()); + b.outputs.emplace(outputName, state->store->parseStorePath(drv.queryOutPath())); res.push_back(std::move(b)); } @@ -137,10 +148,13 @@ struct InstallableValue : Installable // Hack to recognize .all: if all drvs have the same drvPath, // merge the buildables. if (drvPaths.size() == 1) { - Buildable b{*drvPaths.begin()}; + Buildable b{.drvPath = drvPaths.begin()->clone()}; for (auto & b2 : res) - b.outputs.insert(b2.outputs.begin(), b2.outputs.end()); - return {b}; + for (auto & output : b2.outputs) + b.outputs.insert_or_assign(output.first, output.second.clone()); + Buildables bs; + bs.push_back(std::move(b)); + return bs; } else return res; } @@ -211,7 +225,7 @@ static std::vector> parseInstallables( auto path = store->toStorePath(store->followLinksToStore(s)); if (store->isStorePath(path)) - result.push_back(std::make_shared(path)); + result.push_back(std::make_shared(store, path)); } else if (s == "" || std::regex_match(s, attrPathRegex)) @@ -241,19 +255,18 @@ Buildables build(ref store, RealiseMode mode, Buildables buildables; - PathSet pathsToBuild; + std::vector pathsToBuild; for (auto & i : installables) { for (auto & b : i->toBuildables()) { - if (b.drvPath != "") { + if (b.drvPath) { StringSet outputNames; for (auto & output : b.outputs) outputNames.insert(output.first); - pathsToBuild.insert( - b.drvPath + "!" + concatStringsSep(",", outputNames)); + pathsToBuild.push_back({*b.drvPath, outputNames}); } else for (auto & output : b.outputs) - pathsToBuild.insert(output.second); + pathsToBuild.push_back({output.second.clone()}); buildables.push_back(std::move(b)); } } @@ -266,19 +279,19 @@ Buildables build(ref store, RealiseMode mode, return buildables; } -PathSet toStorePaths(ref store, RealiseMode mode, +StorePathSet toStorePaths(ref store, RealiseMode mode, std::vector> installables) { - PathSet outPaths; + StorePathSet outPaths; for (auto & b : build(store, mode, installables)) for (auto & output : b.outputs) - outPaths.insert(output.second); + outPaths.insert(output.second.clone()); return outPaths; } -Path toStorePath(ref store, RealiseMode mode, +StorePath toStorePath(ref store, RealiseMode mode, std::shared_ptr installable) { auto paths = toStorePaths(store, mode, {installable}); @@ -286,17 +299,17 @@ Path toStorePath(ref store, RealiseMode mode, if (paths.size() != 1) throw Error("argument '%s' should evaluate to one store path", installable->what()); - return *paths.begin(); + return paths.begin()->clone(); } -PathSet toDerivations(ref store, +StorePathSet toDerivations(ref store, std::vector> installables, bool useDeriver) { - PathSet drvPaths; + StorePathSet drvPaths; for (auto & i : installables) for (auto & b : i->toBuildables()) { - if (b.drvPath.empty()) { + if (!b.drvPath) { if (!useDeriver) throw Error("argument '%s' did not evaluate to a derivation", i->what()); for (auto & output : b.outputs) { @@ -304,10 +317,10 @@ PathSet toDerivations(ref store, if (derivers.empty()) throw Error("'%s' does not have a known deriver", i->what()); // FIXME: use all derivers? - drvPaths.insert(*derivers.begin()); + drvPaths.insert(derivers.begin()->clone()); } } else - drvPaths.insert(b.drvPath); + drvPaths.insert(b.drvPath->clone()); } return drvPaths; diff --git a/src/nix/list-tarballs.cc b/src/nix/list-tarballs.cc index 05eedc59d..ebf77433f 100644 --- a/src/nix/list-tarballs.cc +++ b/src/nix/list-tarballs.cc @@ -1,3 +1,4 @@ +#if 0 #include "command.hh" #include "eval.hh" #include "eval-inline.hh" @@ -138,3 +139,4 @@ struct CmdListTarballs : MixJSON, InstallablesCommand }; static RegisterCommand r1(make_ref()); +#endif diff --git a/src/nix/local.mk b/src/nix/local.mk index ca4604d56..a34cca9fd 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -15,9 +15,9 @@ nix_SOURCES := \ $(wildcard src/nix-prefetch-url/*.cc) \ $(wildcard src/nix-store/*.cc) \ -nix_LIBS = libexpr libmain libstore libutil +nix_LIBS = libexpr libmain libstore libutil libnixrust -nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) +nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system $(foreach name, \ nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ diff --git a/src/nix/log.cc b/src/nix/log.cc index f07ec4e93..795991cb7 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -8,15 +8,6 @@ using namespace nix; struct CmdLog : InstallableCommand { - CmdLog() - { - } - - std::string name() override - { - return "log"; - } - std::string description() override { return "show the build log of the specified packages or paths, if available"; @@ -52,7 +43,7 @@ struct CmdLog : InstallableCommand RunPager pager; for (auto & sub : subs) { - auto log = b.drvPath != "" ? sub->getBuildLog(b.drvPath) : nullptr; + auto log = b.drvPath ? sub->getBuildLog(*b.drvPath) : nullptr; for (auto & output : b.outputs) { if (log) break; log = sub->getBuildLog(output.second); @@ -68,4 +59,4 @@ struct CmdLog : InstallableCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("log"); diff --git a/src/nix/ls.cc b/src/nix/ls.cc index d089be42f..3ef1f2750 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -67,7 +67,7 @@ struct MixLs : virtual Args, MixJSON if (st.type == FSAccessor::Type::tMissing) throw Error(format("path '%1%' does not exist") % path); doPath(st, path, - st.type == FSAccessor::Type::tDirectory ? "." : baseNameOf(path), + st.type == FSAccessor::Type::tDirectory ? "." : std::string(baseNameOf(path)), showDirectory); } @@ -100,11 +100,6 @@ struct CmdLsStore : StoreCommand, MixLs }; } - std::string name() override - { - return "ls-store"; - } - std::string description() override { return "show information about a store path"; @@ -136,11 +131,6 @@ struct CmdLsNar : Command, MixLs }; } - std::string name() override - { - return "ls-nar"; - } - std::string description() override { return "show information about the contents of a NAR file"; @@ -152,5 +142,5 @@ struct CmdLsNar : Command, MixLs } }; -static RegisterCommand r1(make_ref()); -static RegisterCommand r2(make_ref()); +static auto r1 = registerCommand("ls-store"); +static auto r2 = registerCommand("ls-nar"); diff --git a/src/nix/main.cc b/src/nix/main.cc index 803f1ef80..a6313dd81 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -8,18 +8,54 @@ #include "shared.hh" #include "store-api.hh" #include "progress-bar.hh" +#include "download.hh" #include "finally.hh" +#include +#include +#include +#include +#include + extern std::string chrootHelperName; void chrootHelper(int argc, char * * argv); namespace nix { +/* Check if we have a non-loopback/link-local network interface. */ +static bool haveInternet() +{ + struct ifaddrs * addrs; + + if (getifaddrs(&addrs)) + return true; + + Finally free([&]() { freeifaddrs(addrs); }); + + for (auto i = addrs; i; i = i->ifa_next) { + if (!i->ifa_addr) continue; + if (i->ifa_addr->sa_family == AF_INET) { + if (ntohl(((sockaddr_in *) i->ifa_addr)->sin_addr.s_addr) != INADDR_LOOPBACK) { + return true; + } + } else if (i->ifa_addr->sa_family == AF_INET6) { + if (!IN6_IS_ADDR_LOOPBACK(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr) && + !IN6_IS_ADDR_LINKLOCAL(&((sockaddr_in6 *) i->ifa_addr)->sin6_addr)) + return true; + } + } + + return false; +} + std::string programPath; struct NixArgs : virtual MultiCommand, virtual MixCommonArgs { + bool printBuildLogs = false; + bool useNet = true; + NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix") { mkFlag() @@ -41,10 +77,21 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs throw Exit(); }); + mkFlag() + .longName("print-build-logs") + .shortName('L') + .description("print full build logs on stderr") + .set(&printBuildLogs, true); + mkFlag() .longName("version") .description("show version information") .handler([&]() { printVersion(programName); }); + + mkFlag() + .longName("no-net") + .description("disable substituters and consider all previously downloaded files up-to-date") + .handler([&]() { useNet = false; }); } void printFlags(std::ostream & out) override @@ -57,10 +104,20 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs "--help-config' for a list of configuration settings.\n"; } + void printHelp(const string & programName, std::ostream & out) override + { + MultiCommand::printHelp(programName, out); + +#if 0 + out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-'.\n"; +#endif + + std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n"; + } + void showHelpAndExit() { printHelp(programName, std::cout); - std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n"; throw Exit(); } }; @@ -77,28 +134,46 @@ void mainWrapped(int argc, char * * argv) initNix(); programPath = argv[0]; - string programName = baseNameOf(programPath); + auto programName = std::string(baseNameOf(programPath)); { auto legacy = (*RegisterLegacyCommand::commands)[programName]; if (legacy) return legacy(argc, argv); } - verbosity = lvlError; + verbosity = lvlWarn; settings.verboseBuild = false; NixArgs args; args.parseCmdline(argvToStrings(argc, argv)); + settings.requireExperimentalFeature("nix-command"); + initPlugins(); if (!args.command) args.showHelpAndExit(); Finally f([]() { stopProgressBar(); }); - if (isatty(STDERR_FILENO)) - startProgressBar(); + startProgressBar(args.printBuildLogs); + + if (args.useNet && !haveInternet()) { + warn("you don't have Internet access; disabling some network-dependent features"); + args.useNet = false; + } + + if (!args.useNet) { + // FIXME: should check for command line overrides only. + if (!settings.useSubstitutes.overriden) + settings.useSubstitutes = false; + if (!settings.tarballTtl.overriden) + settings.tarballTtl = std::numeric_limits::max(); + if (!downloadSettings.tries.overriden) + downloadSettings.tries = 0; + if (!downloadSettings.connectTimeout.overriden) + downloadSettings.connectTimeout = 1; + } args.command->prepare(); args.command->run(); diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc new file mode 100644 index 000000000..f9c7fef3f --- /dev/null +++ b/src/nix/make-content-addressable.cc @@ -0,0 +1,103 @@ +#include "command.hh" +#include "store-api.hh" +#include "references.hh" +#include "common-args.hh" +#include "json.hh" + +using namespace nix; + +struct CmdMakeContentAddressable : StorePathsCommand, MixJSON +{ + CmdMakeContentAddressable() + { + realiseMode = Build; + } + + std::string description() override + { + return "rewrite a path or closure to content-addressable form"; + } + + Examples examples() override + { + return { + Example{ + "To create a content-addressable representation of GNU Hello (but not its dependencies):", + "nix make-content-addressable nixpkgs.hello" + }, + Example{ + "To compute a content-addressable representation of the current NixOS system closure:", + "nix make-content-addressable -r /run/current-system" + }, + }; + } + void run(ref store, StorePaths storePaths) override + { + auto paths = store->topoSortPaths(storePathsToSet(storePaths)); + + std::reverse(paths.begin(), paths.end()); + + std::map remappings; + + auto jsonRoot = json ? std::make_unique(std::cout) : nullptr; + auto jsonRewrites = json ? std::make_unique(jsonRoot->object("rewrites")) : nullptr; + + for (auto & path : paths) { + auto pathS = store->printStorePath(path); + auto oldInfo = store->queryPathInfo(path); + auto oldHashPart = storePathToHash(pathS); + + StringSink sink; + store->narFromPath(path, sink); + + StringMap rewrites; + + StorePathSet references; + bool hasSelfReference = false; + for (auto & ref : oldInfo->references) { + if (ref == path) + hasSelfReference = true; + else { + auto i = remappings.find(ref); + auto replacement = i != remappings.end() ? i->second.clone() : ref.clone(); + // FIXME: warn about unremapped paths? + if (replacement != ref) + rewrites.insert_or_assign(store->printStorePath(ref), store->printStorePath(replacement)); + references.insert(std::move(replacement)); + } + } + + *sink.s = rewriteStrings(*sink.s, rewrites); + + HashModuloSink hashModuloSink(htSHA256, oldHashPart); + hashModuloSink((unsigned char *) sink.s->data(), sink.s->size()); + + auto narHash = hashModuloSink.finish().first; + + ValidPathInfo info(store->makeFixedOutputPath(true, narHash, path.name(), references, hasSelfReference)); + info.references = std::move(references); + if (hasSelfReference) info.references.insert(info.path.clone()); + info.narHash = narHash; + info.narSize = sink.s->size(); + info.ca = makeFixedOutputCA(true, info.narHash); + + if (!json) + printError("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path)); + + auto source = sinkToSource([&](Sink & nextSink) { + RewritingSink rsink2(oldHashPart, storePathToHash(store->printStorePath(info.path)), nextSink); + rsink2((unsigned char *) sink.s->data(), sink.s->size()); + rsink2.flush(); + }); + + store->addToStore(info, *source); + + if (json) + jsonRewrites->attr(store->printStorePath(path), store->printStorePath(info.path)); + + remappings.insert_or_assign(std::move(path), std::move(info.path)); + } + } +}; + +static auto r1 = registerCommand("make-content-addressable"); diff --git a/src/nix/optimise-store.cc b/src/nix/optimise-store.cc index 725fb75a1..fed012b04 100644 --- a/src/nix/optimise-store.cc +++ b/src/nix/optimise-store.cc @@ -8,15 +8,6 @@ using namespace nix; struct CmdOptimiseStore : StoreCommand { - CmdOptimiseStore() - { - } - - std::string name() override - { - return "optimise-store"; - } - std::string description() override { return "replace identical files in the store by hard links"; @@ -38,4 +29,4 @@ struct CmdOptimiseStore : StoreCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("optimise-store"); diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index dea5f0557..bffa7b356 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -24,11 +24,6 @@ struct CmdPathInfo : StorePathsCommand, MixJSON mkFlag(0, "sigs", "show signatures", &showSigs); } - std::string name() override - { - return "path-info"; - } - std::string description() override { return "query information about store paths"; @@ -83,36 +78,36 @@ struct CmdPathInfo : StorePathsCommand, MixJSON std::cout << fmt("\t%6.1f%c", res, idents.at(power)); } - void run(ref store, Paths storePaths) override + void run(ref store, StorePaths storePaths) override { size_t pathLen = 0; for (auto & storePath : storePaths) - pathLen = std::max(pathLen, storePath.size()); + pathLen = std::max(pathLen, store->printStorePath(storePath).size()); if (json) { JSONPlaceholder jsonRoot(std::cout); store->pathInfoToJSON(jsonRoot, // FIXME: preserve order? - PathSet(storePaths.begin(), storePaths.end()), + storePathsToSet(storePaths), true, showClosureSize, AllowInvalid); } else { - for (auto storePath : storePaths) { + for (auto & storePath : storePaths) { auto info = store->queryPathInfo(storePath); - storePath = info->path; // FIXME: screws up padding + auto storePathS = store->printStorePath(storePath); - std::cout << storePath; + std::cout << storePathS; if (showSize || showClosureSize || showSigs) - std::cout << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' '); + std::cout << std::string(std::max(0, (int) pathLen - (int) storePathS.size()), ' '); if (showSize) printSize(info->narSize); if (showClosureSize) - printSize(store->getClosureSize(storePath).first); + printSize(store->getClosureSize(info->path).first); if (showSigs) { std::cout << '\t'; @@ -130,4 +125,4 @@ struct CmdPathInfo : StorePathsCommand, MixJSON } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("path-info"); diff --git a/src/nix/ping-store.cc b/src/nix/ping-store.cc index 310942574..3a2e542a3 100644 --- a/src/nix/ping-store.cc +++ b/src/nix/ping-store.cc @@ -6,11 +6,6 @@ using namespace nix; struct CmdPingStore : StoreCommand { - std::string name() override - { - return "ping-store"; - } - std::string description() override { return "test whether a store can be opened"; @@ -32,4 +27,4 @@ struct CmdPingStore : StoreCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("ping-store"); diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 40b905ba3..c445f31cc 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -2,6 +2,7 @@ #include "util.hh" #include "sync.hh" #include "store-api.hh" +#include "names.hh" #include #include @@ -23,6 +24,13 @@ static uint64_t getI(const std::vector & fields, size_t n) return fields[n].i; } +static std::string_view storePathToName(std::string_view path) +{ + auto base = baseNameOf(path); + auto i = base.find('-'); + return i == std::string::npos ? base.substr(0, 0) : base.substr(i + 1); +} + class ProgressBar : public Logger { private: @@ -38,6 +46,7 @@ private: std::map expectedByType; bool visible = true; ActivityId parent; + std::optional name; }; struct ActivitiesByType @@ -60,6 +69,7 @@ private: uint64_t corruptedPaths = 0, untrustedPaths = 0; bool active = true; + bool haveUpdate = true; }; Sync state_; @@ -68,14 +78,21 @@ private: std::condition_variable quitCV, updateCV; + bool printBuildLogs; + bool isTTY; + public: - ProgressBar() + ProgressBar(bool printBuildLogs, bool isTTY) + : printBuildLogs(printBuildLogs) + , isTTY(isTTY) { + state_.lock()->active = isTTY; updateThread = std::thread([&]() { auto state(state_.lock()); while (state->active) { - state.wait(updateCV); + if (!state->haveUpdate) + state.wait(updateCV); draw(*state); state.wait_for(quitCV, std::chrono::milliseconds(50)); } @@ -109,8 +126,14 @@ public: void log(State & state, Verbosity lvl, const std::string & s) { - writeToStderr("\r\e[K" + s + ANSI_NORMAL "\n"); - draw(state); + if (state.active) { + writeToStderr("\r\e[K" + filterANSIEscapes(s, !isTTY) + ANSI_NORMAL "\n"); + draw(state); + } else { + auto s2 = s + ANSI_NORMAL "\n"; + if (!isTTY) s2 = filterANSIEscapes(s2, true); + writeToStderr(s2); + } } void startActivity(ActivityId act, Verbosity lvl, ActivityType type, @@ -132,7 +155,7 @@ public: if (type == actBuild) { auto name = storePathToName(getS(fields, 0)); if (hasSuffix(name, ".drv")) - name.resize(name.size() - 4); + name = name.substr(0, name.size() - 4); i->s = fmt("building " ANSI_BOLD "%s" ANSI_NORMAL, name); auto machineName = getS(fields, 1); if (machineName != "") @@ -141,6 +164,7 @@ public: auto nrRounds = getI(fields, 3); if (nrRounds != 1) i->s += fmt(" (round %d/%d)", curRound, nrRounds); + i->name = DrvName(name).name; } if (type == actSubstitute) { @@ -153,6 +177,14 @@ public: name, sub); } + if (type == actPostBuildHook) { + auto name = storePathToName(getS(fields, 0)); + if (hasSuffix(name, ".drv")) + name = name.substr(0, name.size() - 4); + i->s = fmt("post-build " ANSI_BOLD "%s" ANSI_NORMAL, name); + i->name = DrvName(name).name; + } + if (type == actQueryPathInfo) { auto name = storePathToName(getS(fields, 0)); i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1)); @@ -163,7 +195,7 @@ public: || (type == actCopyPath && hasAncestor(*state, actSubstitute, parent))) i->visible = false; - update(); + update(*state); } /* Check whether an activity has an ancestore with the specified @@ -198,7 +230,7 @@ public: state->its.erase(i); } - update(); + update(*state); } void result(ActivityId act, ResultType type, const std::vector & fields) override @@ -208,38 +240,46 @@ public: if (type == resFileLinked) { state->filesLinked++; state->bytesLinked += getI(fields, 0); - update(); + update(*state); } - else if (type == resBuildLogLine) { + else if (type == resBuildLogLine || type == resPostBuildLogLine) { auto lastLine = trim(getS(fields, 0)); if (!lastLine.empty()) { auto i = state->its.find(act); assert(i != state->its.end()); ActInfo info = *i->second; - state->activities.erase(i->second); - info.lastLine = lastLine; - state->activities.emplace_back(info); - i->second = std::prev(state->activities.end()); - update(); + if (printBuildLogs) { + auto suffix = "> "; + if (type == resPostBuildLogLine) { + suffix = " (post)> "; + } + log(*state, lvlInfo, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine); + } else { + state->activities.erase(i->second); + info.lastLine = lastLine; + state->activities.emplace_back(info); + i->second = std::prev(state->activities.end()); + update(*state); + } } } else if (type == resUntrustedPath) { state->untrustedPaths++; - update(); + update(*state); } else if (type == resCorruptedPath) { state->corruptedPaths++; - update(); + update(*state); } else if (type == resSetPhase) { auto i = state->its.find(act); assert(i != state->its.end()); i->second->phase = getS(fields, 0); - update(); + update(*state); } else if (type == resProgress) { @@ -250,7 +290,7 @@ public: actInfo.expected = getI(fields, 1); actInfo.running = getI(fields, 2); actInfo.failed = getI(fields, 3); - update(); + update(*state); } else if (type == resSetExpected) { @@ -262,17 +302,19 @@ public: state->activitiesByType[type].expected -= j; j = getI(fields, 1); state->activitiesByType[type].expected += j; - update(); + update(*state); } } - void update() + void update(State & state) { + state.haveUpdate = true; updateCV.notify_one(); } void draw(State & state) { + state.haveUpdate = false; if (!state.active) return; std::string line; @@ -306,7 +348,7 @@ public: } auto width = getWindowSize().second; - if (width <= 0) std::numeric_limits::max(); + if (width <= 0) width = std::numeric_limits::max(); writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K"); } @@ -333,11 +375,18 @@ public: if (running || done || expected || failed) { if (running) - s = fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, - running / unit, done / unit, expected / unit); + if (expected != 0) + s = fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, + running / unit, done / unit, expected / unit); + else + s = fmt(ANSI_BLUE + numberFmt + ANSI_NORMAL "/" ANSI_GREEN + numberFmt + ANSI_NORMAL, + running / unit, done / unit); else if (expected != done) - s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, - done / unit, expected / unit); + if (expected != 0) + s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL "/" + numberFmt, + done / unit, expected / unit); + else + s = fmt(ANSI_GREEN + numberFmt + ANSI_NORMAL, done / unit); else s = fmt(done ? ANSI_GREEN + numberFmt + ANSI_NORMAL : numberFmt, done / unit); s = fmt(itemFmt, s); @@ -395,9 +444,9 @@ public: } }; -void startProgressBar() +void startProgressBar(bool printBuildLogs) { - logger = new ProgressBar(); + logger = new ProgressBar(printBuildLogs, isatty(STDERR_FILENO)); } void stopProgressBar() diff --git a/src/nix/progress-bar.hh b/src/nix/progress-bar.hh index af8eda5a8..4d61175c2 100644 --- a/src/nix/progress-bar.hh +++ b/src/nix/progress-bar.hh @@ -4,7 +4,7 @@ namespace nix { -void startProgressBar(); +void startProgressBar(bool printBuildLogs = false); void stopProgressBar(); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 34b853cca..b6e1afb72 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -9,12 +9,20 @@ #include #include #else +// editline < 1.15.2 don't wrap their API for C++ usage +// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461). +// This results in linker errors due to to name-mangling of editline C symbols. +// For compatibility with these versions, we wrap the API here +// (wrapping multiple times on newer versions is no problem). +extern "C" { #include +} #endif #include "shared.hh" #include "eval.hh" #include "eval-inline.hh" +#include "attr-path.hh" #include "store-api.hh" #include "common-eval-args.hh" #include "get-drvs.hh" @@ -192,6 +200,14 @@ static int listPossibleCallback(char *s, char ***avp) { return ac; } +namespace { + // Used to communicate to NixRepl::getLine whether a signal occurred in ::readline. + volatile sig_atomic_t g_signal_received = 0; + + void sigintHandler(int signo) { + g_signal_received = signo; + } +} void NixRepl::mainLoop(const std::vector & files) { @@ -251,8 +267,40 @@ void NixRepl::mainLoop(const std::vector & files) bool NixRepl::getLine(string & input, const std::string &prompt) { + struct sigaction act, old; + sigset_t savedSignalMask, set; + + auto setupSignals = [&]() { + act.sa_handler = sigintHandler; + sigfillset(&act.sa_mask); + act.sa_flags = 0; + if (sigaction(SIGINT, &act, &old)) + throw SysError("installing handler for SIGINT"); + + sigemptyset(&set); + sigaddset(&set, SIGINT); + if (sigprocmask(SIG_UNBLOCK, &set, &savedSignalMask)) + throw SysError("unblocking SIGINT"); + }; + auto restoreSignals = [&]() { + if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr)) + throw SysError("restoring signals"); + + if (sigaction(SIGINT, &old, 0)) + throw SysError("restoring handler for SIGINT"); + }; + + setupSignals(); char * s = readline(prompt.c_str()); Finally doFree([&]() { free(s); }); + restoreSignals(); + + if (g_signal_received) { + g_signal_received = 0; + input.clear(); + return true; + } + if (!s) return false; input += s; @@ -365,7 +413,7 @@ Path NixRepl::getDerivationPath(Value & v) { if (!drvInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); Path drvPath = drvInfo->queryDrvPath(); - if (drvPath == "" || !state.store->isValidPath(drvPath)) + if (drvPath == "" || !state.store->isValidPath(state.store->parseStorePath(drvPath))) throw Error("expression did not evaluate to a valid derivation"); return drvPath; } @@ -393,6 +441,7 @@ bool NixRepl::processLine(string line) << " = Bind expression to variable\n" << " :a Add attributes from resulting set to scope\n" << " :b Build derivation\n" + << " :e Open the derivation in $EDITOR\n" << " :i Build derivation, then install result into current profile\n" << " :l Load Nix expression and add it to scope\n" << " :p Evaluate and print expression recursively\n" @@ -419,6 +468,34 @@ bool NixRepl::processLine(string line) reloadFiles(); } + else if (command == ":e" || command == ":edit") { + Root v; + evalString(arg, v); + + Pos pos; + + if (v->type == tPath || v->isString()) { + PathSet context; + auto filename = state.coerceToString(noPos, v, context); + pos.file = state.symbols.create(filename); + } else if (v->type == tLambda) { + pos = v->lambda.fun->pos; + } else { + // assume it's a derivation + pos = findDerivationFilename(state, v, arg); + } + + // Open in EDITOR + auto args = editorFor(pos); + auto editor = args.front(); + args.pop_front(); + runProgram(editor, args); + + // Reload right after exiting the editor + state.resetFileCache(); + reloadFiles(); + } + else if (command == ":t") { Root v; evalString(arg, v); @@ -444,10 +521,10 @@ bool NixRepl::processLine(string line) but doing it in a child makes it easier to recover from problems / SIGINT. */ if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) { - Derivation drv = readDerivation(drvPath); + auto drv = readDerivation(*state.store, drvPath); std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; for (auto & i : drv.outputs) - std::cout << format(" %1% -> %2%") % i.first % i.second.path << std::endl; + std::cout << fmt(" %s -> %s\n", i.first, state.store->printStorePath(i.second.path)); } } else if (command == ":i") { runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath}); @@ -727,8 +804,6 @@ struct CmdRepl : StoreCommand, MixEvalArgs expectArgs("files", &files); } - std::string name() override { return "repl"; } - std::string description() override { return "start an interactive environment for evaluating Nix expressions"; @@ -742,6 +817,6 @@ struct CmdRepl : StoreCommand, MixEvalArgs } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("repl"); } diff --git a/src/nix/run.cc b/src/nix/run.cc index 35b763345..f885c5e49 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -61,11 +61,6 @@ struct CmdRun : InstallablesCommand .handler([&](std::vector ss) { unset.insert(ss.front()); }); } - std::string name() override - { - return "run"; - } - std::string description() override { return "run a shell in which the specified packages are available"; @@ -124,24 +119,24 @@ struct CmdRun : InstallablesCommand unsetenv(var.c_str()); } - std::unordered_set done; - std::queue todo; - for (auto & path : outPaths) todo.push(path); + std::unordered_set done; + std::queue todo; + for (auto & path : outPaths) todo.push(path.clone()); - auto unixPath = tokenizeString(getEnv("PATH"), ":"); + auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); while (!todo.empty()) { - Path path = todo.front(); + auto path = todo.front().clone(); todo.pop(); - if (!done.insert(path).second) continue; + if (!done.insert(path.clone()).second) continue; if (true) - unixPath.push_front(path + "/bin"); + unixPath.push_front(store->printStorePath(path) + "/bin"); - auto propPath = path + "/nix-support/propagated-user-env-packages"; + auto propPath = store->printStorePath(path) + "/nix-support/propagated-user-env-packages"; if (accessor->stat(propPath).type == FSAccessor::tRegular) { for (auto & p : tokenizeString(readFile(propPath))) - todo.push(p); + todo.push(store->parseStorePath(p)); } } @@ -182,7 +177,7 @@ struct CmdRun : InstallablesCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("run"); void chrootHelper(int argc, char * * argv) { @@ -199,7 +194,10 @@ void chrootHelper(int argc, char * * argv) uid_t gid = getgid(); if (unshare(CLONE_NEWUSER | CLONE_NEWNS) == -1) - throw SysError("setting up a private mount namespace"); + /* Try with just CLONE_NEWNS in case user namespaces are + specifically disabled. */ + if (unshare(CLONE_NEWNS) == -1) + throw SysError("setting up a private mount namespace"); /* Bind-mount realStoreDir on /nix/store. If the latter mount point doesn't already exists, we have to create a chroot diff --git a/src/nix/search.cc b/src/nix/search.cc index 0d07103bc..b1fe6c8e6 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -52,11 +52,6 @@ struct CmdSearch : SourceExprCommand, MixJSON .handler([&]() { writeCache = false; useCache = false; }); } - std::string name() override - { - return "search"; - } - std::string description() override { return "query available packages"; @@ -80,10 +75,6 @@ struct CmdSearch : SourceExprCommand, MixJSON Example{ "To search for git and frontend or gui:", "nix search git 'frontend|gui'" - }, - Example{ - "To display the description of the found packages:", - "nix search git --verbose" } }; } @@ -265,6 +256,7 @@ struct CmdSearch : SourceExprCommand, MixJSON https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66145 */ if (!jsonCacheFile) throw Error("error writing to %s", tmpFile); + throw; } if (writeCache && rename(tmpFile.c_str(), jsonCacheFileName.c_str()) == -1) @@ -280,4 +272,4 @@ struct CmdSearch : SourceExprCommand, MixJSON } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("search"); diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc index 86638b50d..87544f937 100644 --- a/src/nix/show-config.cc +++ b/src/nix/show-config.cc @@ -8,15 +8,6 @@ using namespace nix; struct CmdShowConfig : Command, MixJSON { - CmdShowConfig() - { - } - - std::string name() override - { - return "show-config"; - } - std::string description() override { return "show the Nix configuration"; @@ -37,4 +28,4 @@ struct CmdShowConfig : Command, MixJSON } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("show-config"); diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc index ee94fded3..0ede7b468 100644 --- a/src/nix/show-derivation.cc +++ b/src/nix/show-derivation.cc @@ -22,11 +22,6 @@ struct CmdShowDerivation : InstallablesCommand .set(&recursive, true); } - std::string name() override - { - return "show-derivation"; - } - std::string description() override { return "show the contents of a store derivation"; @@ -51,9 +46,9 @@ struct CmdShowDerivation : InstallablesCommand auto drvPaths = toDerivations(store, installables, true); if (recursive) { - PathSet closure; + StorePathSet closure; store->computeFSClosure(drvPaths, closure); - drvPaths = closure; + drvPaths = std::move(closure); } { @@ -61,17 +56,19 @@ struct CmdShowDerivation : InstallablesCommand JSONObject jsonRoot(std::cout, true); for (auto & drvPath : drvPaths) { - if (!isDerivation(drvPath)) continue; + if (!drvPath.isDerivation()) continue; - auto drvObj(jsonRoot.object(drvPath)); + auto drvPathS = store->printStorePath(drvPath); - auto drv = readDerivation(drvPath); + auto drvObj(jsonRoot.object(drvPathS)); + + auto drv = readDerivation(*store, drvPathS); { auto outputsObj(drvObj.object("outputs")); for (auto & output : drv.outputs) { auto outputObj(outputsObj.object(output.first)); - outputObj.attr("path", output.second.path); + outputObj.attr("path", store->printStorePath(output.second.path)); if (output.second.hash != "") { outputObj.attr("hashAlgo", output.second.hashAlgo); outputObj.attr("hash", output.second.hash); @@ -82,13 +79,13 @@ struct CmdShowDerivation : InstallablesCommand { auto inputsList(drvObj.list("inputSrcs")); for (auto & input : drv.inputSrcs) - inputsList.elem(input); + inputsList.elem(store->printStorePath(input)); } { auto inputDrvsObj(drvObj.object("inputDrvs")); for (auto & input : drv.inputDrvs) { - auto inputList(inputDrvsObj.list(input.first)); + auto inputList(inputDrvsObj.list(store->printStorePath(input.first))); for (auto & outputId : input.second) inputList.elem(outputId); } @@ -116,4 +113,4 @@ struct CmdShowDerivation : InstallablesCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("show-derivation"); diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index b1825c412..5f07448e0 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -22,17 +22,12 @@ struct CmdCopySigs : StorePathsCommand .handler([&](std::vector ss) { substituterUris.push_back(ss[0]); }); } - std::string name() override - { - return "copy-sigs"; - } - std::string description() override { return "copy path signatures from substituters (like binary caches)"; } - void run(ref store, Paths storePaths) override + void run(ref store, StorePaths storePaths) override { if (substituterUris.empty()) throw UsageError("you must specify at least one substituter using '-s'"); @@ -49,18 +44,20 @@ struct CmdCopySigs : StorePathsCommand //logger->setExpected(doneLabel, storePaths.size()); - auto doPath = [&](const Path & storePath) { + auto doPath = [&](const Path & storePathS) { //Activity act(*logger, lvlInfo, format("getting signatures for '%s'") % storePath); checkInterrupt(); + auto storePath = store->parseStorePath(storePathS); + auto info = store->queryPathInfo(storePath); StringSet newSigs; for (auto & store2 : substituters) { try { - auto info2 = store2->queryPathInfo(storePath); + auto info2 = store2->queryPathInfo(info->path); /* Don't import signatures that don't match this binary. */ @@ -85,15 +82,15 @@ struct CmdCopySigs : StorePathsCommand }; for (auto & storePath : storePaths) - pool.enqueue(std::bind(doPath, storePath)); + pool.enqueue(std::bind(doPath, store->printStorePath(storePath))); pool.process(); - printInfo(format("imported %d signatures") % added); + printInfo("imported %d signatures", added); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("copy-sigs"); struct CmdSignPaths : StorePathsCommand { @@ -109,17 +106,12 @@ struct CmdSignPaths : StorePathsCommand .dest(&secretKeyFile); } - std::string name() override - { - return "sign-paths"; - } - std::string description() override { return "sign the specified paths"; } - void run(ref store, Paths storePaths) override + void run(ref store, StorePaths storePaths) override { if (secretKeyFile.empty()) throw UsageError("you must specify a secret key file using '-k'"); @@ -133,7 +125,7 @@ struct CmdSignPaths : StorePathsCommand auto info2(*info); info2.sigs.clear(); - info2.sign(secretKey); + info2.sign(*store, secretKey); assert(!info2.sigs.empty()); if (!info->sigs.count(*info2.sigs.begin())) { @@ -142,8 +134,8 @@ struct CmdSignPaths : StorePathsCommand } } - printInfo(format("added %d signatures") % added); + printInfo("added %d signatures", added); } }; -static RegisterCommand r3(make_ref()); +static auto r2 = registerCommand("sign-paths"); diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 50e6d5eb7..2a1bf9674 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -30,11 +30,6 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand .dest(&storePathsUrl); } - std::string name() override - { - return "upgrade-nix"; - } - std::string description() override { return "upgrade Nix to the latest stable version"; @@ -63,13 +58,9 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand printInfo("upgrading Nix in profile '%s'", profileDir); - Path storePath; - { - Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); - storePath = getLatestNix(store); - } + auto storePath = getLatestNix(store); - auto version = DrvName(storePathToName(storePath)).version; + auto version = DrvName(storePath.name()).version; if (dryRun) { stopProgressBar(); @@ -78,13 +69,13 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand } { - Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", storePath)); + Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", store->printStorePath(storePath))); store->ensurePath(storePath); } { - Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", storePath)); - auto program = storePath + "/bin/nix-env"; + Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", store->printStorePath(storePath))); + auto program = store->printStorePath(storePath) + "/bin/nix-env"; auto s = runProgram(program, false, {"--version"}); if (s.find("Nix") == std::string::npos) throw Error("could not verify that '%s' works", program); @@ -93,9 +84,10 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand stopProgressBar(); { - Activity act(*logger, lvlInfo, actUnknown, fmt("installing '%s' into profile '%s'...", storePath, profileDir)); + Activity act(*logger, lvlInfo, actUnknown, + fmt("installing '%s' into profile '%s'...", store->printStorePath(storePath), profileDir)); runProgram(settings.nixBinDir + "/nix-env", false, - {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); + {"--profile", profileDir, "-i", store->printStorePath(storePath), "--no-sandbox"}); } printError(ANSI_GREEN "upgrade to version %s done" ANSI_NORMAL, version); @@ -106,7 +98,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand { Path where; - for (auto & dir : tokenizeString(getEnv("PATH"), ":")) + for (auto & dir : tokenizeString(getEnv("PATH").value_or(""), ":")) if (pathExists(dir + "/nix-env")) { where = dir; break; @@ -134,15 +126,17 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand !hasSuffix(userEnv, "user-environment")) throw Error("directory '%s' does not appear to be part of a Nix profile", where); - if (!store->isValidPath(userEnv)) + if (!store->isValidPath(store->parseStorePath(userEnv))) throw Error("directory '%s' is not in the Nix store", userEnv); return profileDir; } /* Return the store path of the latest stable Nix. */ - Path getLatestNix(ref store) + StorePath getLatestNix(ref store) { + Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); + // FIXME: use nixos.org? auto req = DownloadRequest(storePathsUrl); auto res = getDownloader()->download(req); @@ -153,8 +147,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Bindings & bindings(*Bindings::allocBindings(0)); auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v); - return state->forceString(*v2); + return store->parseStorePath(state->forceString(*v2)); } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("upgrade-nix"); diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 7ef571561..9b0658803 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -3,6 +3,7 @@ #include "store-api.hh" #include "sync.hh" #include "thread-pool.hh" +#include "references.hh" #include @@ -13,7 +14,7 @@ struct CmdVerify : StorePathsCommand bool noContents = false; bool noTrust = false; Strings substituterUris; - size_t sigsNeeded; + size_t sigsNeeded = 0; CmdVerify() { @@ -29,11 +30,6 @@ struct CmdVerify : StorePathsCommand mkIntFlag('n', "sigs-needed", "require that each path has at least N valid signatures", &sigsNeeded); } - std::string name() override - { - return "verify"; - } - std::string description() override { return "verify the integrity of store paths"; @@ -53,7 +49,7 @@ struct CmdVerify : StorePathsCommand }; } - void run(ref store, Paths storePaths) override + void run(ref store, StorePaths storePaths) override { std::vector> substituters; for (auto & s : substituterUris) @@ -84,21 +80,26 @@ struct CmdVerify : StorePathsCommand MaintainCount> mcActive(active); update(); - auto info = store->queryPathInfo(storePath); + auto info = store->queryPathInfo(store->parseStorePath(storePath)); if (!noContents) { - HashSink sink(info->narHash.type); - store->narFromPath(info->path, sink); + std::unique_ptr hashSink; + if (info->ca == "") + hashSink = std::make_unique(info->narHash.type); + else + hashSink = std::make_unique(info->narHash.type, storePathToHash(store->printStorePath(info->path))); - auto hash = sink.finish(); + store->narFromPath(info->path, *hashSink); + + auto hash = hashSink->finish(); if (hash.first != info->narHash) { corrupted++; - act2.result(resCorruptedPath, info->path); + act2.result(resCorruptedPath, store->printStorePath(info->path)); printError( - format("path '%s' was modified! expected hash '%s', got '%s'") - % info->path % info->narHash.to_string() % hash.first.to_string()); + "path '%s' was modified! expected hash '%s', got '%s'", + store->printStorePath(info->path), info->narHash.to_string(), hash.first.to_string()); } } @@ -113,14 +114,13 @@ struct CmdVerify : StorePathsCommand else { StringSet sigsSeen; - size_t actualSigsNeeded = sigsNeeded ? sigsNeeded : 1; + size_t actualSigsNeeded = std::max(sigsNeeded, (size_t) 1); size_t validSigs = 0; auto doSigs = [&](StringSet sigs) { for (auto sig : sigs) { - if (sigsSeen.count(sig)) continue; - sigsSeen.insert(sig); - if (validSigs < ValidPathInfo::maxSigs && info->checkSignature(publicKeys, sig)) + if (!sigsSeen.insert(sig).second) continue; + if (validSigs < ValidPathInfo::maxSigs && info->checkSignature(*store, publicKeys, sig)) validSigs++; } }; @@ -147,8 +147,8 @@ struct CmdVerify : StorePathsCommand if (!good) { untrusted++; - act2.result(resUntrustedPath, info->path); - printError(format("path '%s' is untrusted") % info->path); + act2.result(resUntrustedPath, store->printStorePath(info->path)); + printError("path '%s' is untrusted", store->printStorePath(info->path)); } } @@ -164,7 +164,7 @@ struct CmdVerify : StorePathsCommand }; for (auto & storePath : storePaths) - pool.enqueue(std::bind(doPath, storePath)); + pool.enqueue(std::bind(doPath, store->printStorePath(storePath))); pool.process(); @@ -175,4 +175,4 @@ struct CmdVerify : StorePathsCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("verify"); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 325a2be0a..8566e5851 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -44,11 +44,6 @@ struct CmdWhyDepends : SourceExprCommand .set(&all, true); } - std::string name() override - { - return "why-depends"; - } - std::string description() override { return "show why a package has another package in its closure"; @@ -78,9 +73,9 @@ struct CmdWhyDepends : SourceExprCommand auto packagePath = toStorePath(store, Build, package); auto dependency = parseInstallable(*this, store, _dependency, false); auto dependencyPath = toStorePath(store, NoBuild, dependency); - auto dependencyPathHash = storePathToHash(dependencyPath); + auto dependencyPathHash = storePathToHash(store->printStorePath(dependencyPath)); - PathSet closure; + StorePathSet closure; store->computeFSClosure({packagePath}, closure, false, false); if (!closure.count(dependencyPath)) { @@ -96,28 +91,28 @@ struct CmdWhyDepends : SourceExprCommand struct Node { - Path path; - PathSet refs; - PathSet rrefs; + StorePath path; + StorePathSet refs; + StorePathSet rrefs; size_t dist = inf; Node * prev = nullptr; bool queued = false; bool visited = false; }; - std::map graph; + std::map graph; for (auto & path : closure) - graph.emplace(path, Node{path, store->queryPathInfo(path)->references}); + graph.emplace(path.clone(), Node{path.clone(), cloneStorePathSet(store->queryPathInfo(path)->references)}); // Transpose the graph. for (auto & node : graph) for (auto & ref : node.second.refs) - graph[ref].rrefs.insert(node.first); + graph.find(ref)->second.rrefs.insert(node.first.clone()); /* Run Dijkstra's shortest path algorithm to get the distance of every path in the closure to 'dependency'. */ - graph[dependencyPath].dist = 0; + graph[dependencyPath.clone()].dist = 0; std::priority_queue queue; @@ -156,12 +151,14 @@ struct CmdWhyDepends : SourceExprCommand struct BailOut { }; printNode = [&](Node & node, const string & firstPad, const string & tailPad) { + auto pathS = store->printStorePath(node.path); + assert(node.dist != inf); std::cout << fmt("%s%s%s%s" ANSI_NORMAL "\n", firstPad, node.visited ? "\e[38;5;244m" : "", firstPad != "" ? "=> " : "", - node.path); + pathS); if (node.path == dependencyPath && !all && packagePath != dependencyPath) @@ -180,7 +177,7 @@ struct CmdWhyDepends : SourceExprCommand auto & node2 = graph.at(ref); if (node2.dist == inf) continue; refs.emplace(node2.dist, &node2); - hashes.insert(storePathToHash(node2.path)); + hashes.insert(storePathToHash(store->printStorePath(node2.path))); } /* For each reference, find the files and symlinks that @@ -192,7 +189,7 @@ struct CmdWhyDepends : SourceExprCommand visitPath = [&](const Path & p) { auto st = accessor->stat(p); - auto p2 = p == node.path ? "/" : std::string(p, node.path.size() + 1); + auto p2 = p == pathS ? "/" : std::string(p, pathS.size() + 1); auto getColour = [&](const std::string & hash) { return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; @@ -236,11 +233,11 @@ struct CmdWhyDepends : SourceExprCommand // FIXME: should use scanForReferences(). - visitPath(node.path); + visitPath(pathS); RunPager pager; for (auto & ref : refs) { - auto hash = storePathToHash(ref.second->path); + auto hash = storePathToHash(store->printStorePath(ref.second->path)); bool last = all ? ref == *refs.rbegin() : true; @@ -264,4 +261,4 @@ struct CmdWhyDepends : SourceExprCommand } }; -static RegisterCommand r1(make_ref()); +static auto r1 = registerCommand("why-depends"); diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp deleted file mode 100644 index c9af0bed3..000000000 --- a/src/nlohmann/json.hpp +++ /dev/null @@ -1,20406 +0,0 @@ -/* - __ _____ _____ _____ - __| | __| | | | JSON for Modern C++ -| | |__ | | | | | | version 3.5.0 -|_____|_____|_____|_|___| https://github.com/nlohmann/json - -Licensed under the MIT License . -SPDX-License-Identifier: MIT -Copyright (c) 2013-2018 Niels Lohmann . - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ - -#ifndef NLOHMANN_JSON_HPP -#define NLOHMANN_JSON_HPP - -#define NLOHMANN_JSON_VERSION_MAJOR 3 -#define NLOHMANN_JSON_VERSION_MINOR 5 -#define NLOHMANN_JSON_VERSION_PATCH 0 - -#include // all_of, find, for_each -#include // assert -#include // and, not, or -#include // nullptr_t, ptrdiff_t, size_t -#include // hash, less -#include // initializer_list -#include // istream, ostream -#include // random_access_iterator_tag -#include // accumulate -#include // string, stoi, to_string -#include // declval, forward, move, pair, swap - -// #include -#ifndef NLOHMANN_JSON_FWD_HPP -#define NLOHMANN_JSON_FWD_HPP - -#include // int64_t, uint64_t -#include // map -#include // allocator -#include // string -#include // vector - -/*! -@brief namespace for Niels Lohmann -@see https://github.com/nlohmann -@since version 1.0.0 -*/ -namespace nlohmann -{ -/*! -@brief default JSONSerializer template argument - -This serializer ignores the template arguments and uses ADL -([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) -for serialization. -*/ -template -struct adl_serializer; - -template class ObjectType = - std::map, - template class ArrayType = std::vector, - class StringType = std::string, class BooleanType = bool, - class NumberIntegerType = std::int64_t, - class NumberUnsignedType = std::uint64_t, - class NumberFloatType = double, - template class AllocatorType = std::allocator, - template class JSONSerializer = - adl_serializer> -class basic_json; - -/*! -@brief JSON Pointer - -A JSON pointer defines a string syntax for identifying a specific value -within a JSON document. It can be used with functions `at` and -`operator[]`. Furthermore, JSON pointers are the base for JSON patches. - -@sa [RFC 6901](https://tools.ietf.org/html/rfc6901) - -@since version 2.0.0 -*/ -template -class json_pointer; - -/*! -@brief default JSON class - -This type is the default specialization of the @ref basic_json class which -uses the standard template types. - -@since version 1.0.0 -*/ -using json = basic_json<>; -} // namespace nlohmann - -#endif - -// #include - - -// This file contains all internal macro definitions -// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them - -// exclude unsupported compilers -#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) - #if defined(__clang__) - #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 - #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" - #endif - #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) - #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 - #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" - #endif - #endif -#endif - -// disable float-equal warnings on GCC/clang -#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wfloat-equal" -#endif - -// disable documentation warnings on clang -#if defined(__clang__) - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wdocumentation" -#endif - -// allow for portable deprecation warnings -#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) - #define JSON_DEPRECATED __attribute__((deprecated)) -#elif defined(_MSC_VER) - #define JSON_DEPRECATED __declspec(deprecated) -#else - #define JSON_DEPRECATED -#endif - -// allow to disable exceptions -#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) - #define JSON_THROW(exception) throw exception - #define JSON_TRY try - #define JSON_CATCH(exception) catch(exception) - #define JSON_INTERNAL_CATCH(exception) catch(exception) -#else - #define JSON_THROW(exception) std::abort() - #define JSON_TRY if(true) - #define JSON_CATCH(exception) if(false) - #define JSON_INTERNAL_CATCH(exception) if(false) -#endif - -// override exception macros -#if defined(JSON_THROW_USER) - #undef JSON_THROW - #define JSON_THROW JSON_THROW_USER -#endif -#if defined(JSON_TRY_USER) - #undef JSON_TRY - #define JSON_TRY JSON_TRY_USER -#endif -#if defined(JSON_CATCH_USER) - #undef JSON_CATCH - #define JSON_CATCH JSON_CATCH_USER - #undef JSON_INTERNAL_CATCH - #define JSON_INTERNAL_CATCH JSON_CATCH_USER -#endif -#if defined(JSON_INTERNAL_CATCH_USER) - #undef JSON_INTERNAL_CATCH - #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER -#endif - -// manual branch prediction -#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) - #define JSON_LIKELY(x) __builtin_expect(!!(x), 1) - #define JSON_UNLIKELY(x) __builtin_expect(!!(x), 0) -#else - #define JSON_LIKELY(x) x - #define JSON_UNLIKELY(x) x -#endif - -// C++ language standard detection -#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 - #define JSON_HAS_CPP_17 - #define JSON_HAS_CPP_14 -#elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) - #define JSON_HAS_CPP_14 -#endif - -/*! -@brief macro to briefly define a mapping between an enum and JSON -@def NLOHMANN_JSON_SERIALIZE_ENUM -@since version 3.4.0 -*/ -#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ - template \ - inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ - { \ - static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ - static const std::pair m[] = __VA_ARGS__; \ - auto it = std::find_if(std::begin(m), std::end(m), \ - [e](const std::pair& ej_pair) -> bool \ - { \ - return ej_pair.first == e; \ - }); \ - j = ((it != std::end(m)) ? it : std::begin(m))->second; \ - } \ - template \ - inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ - { \ - static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ - static const std::pair m[] = __VA_ARGS__; \ - auto it = std::find_if(std::begin(m), std::end(m), \ - [j](const std::pair& ej_pair) -> bool \ - { \ - return ej_pair.second == j; \ - }); \ - e = ((it != std::end(m)) ? it : std::begin(m))->first; \ - } - -// Ugly macros to avoid uglier copy-paste when specializing basic_json. They -// may be removed in the future once the class is split. - -#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ - template class ObjectType, \ - template class ArrayType, \ - class StringType, class BooleanType, class NumberIntegerType, \ - class NumberUnsignedType, class NumberFloatType, \ - template class AllocatorType, \ - template class JSONSerializer> - -#define NLOHMANN_BASIC_JSON_TPL \ - basic_json - -// #include - - -#include // not -#include // size_t -#include // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type - -namespace nlohmann -{ -namespace detail -{ -// alias templates to reduce boilerplate -template -using enable_if_t = typename std::enable_if::type; - -template -using uncvref_t = typename std::remove_cv::type>::type; - -// implementation of C++14 index_sequence and affiliates -// source: https://stackoverflow.com/a/32223343 -template -struct index_sequence -{ - using type = index_sequence; - using value_type = std::size_t; - static constexpr std::size_t size() noexcept - { - return sizeof...(Ints); - } -}; - -template -struct merge_and_renumber; - -template -struct merge_and_renumber, index_sequence> - : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; - -template -struct make_index_sequence - : merge_and_renumber < typename make_index_sequence < N / 2 >::type, - typename make_index_sequence < N - N / 2 >::type > {}; - -template<> struct make_index_sequence<0> : index_sequence<> {}; -template<> struct make_index_sequence<1> : index_sequence<0> {}; - -template -using index_sequence_for = make_index_sequence; - -// dispatch utility (taken from ranges-v3) -template struct priority_tag : priority_tag < N - 1 > {}; -template<> struct priority_tag<0> {}; - -// taken from ranges-v3 -template -struct static_const -{ - static constexpr T value{}; -}; - -template -constexpr T static_const::value; -} // namespace detail -} // namespace nlohmann - -// #include - - -#include // not -#include // numeric_limits -#include // false_type, is_constructible, is_integral, is_same, true_type -#include // declval - -// #include - -// #include - - -#include // random_access_iterator_tag - -// #include - - -namespace nlohmann -{ -namespace detail -{ -template struct make_void -{ - using type = void; -}; -template using void_t = typename make_void::type; -} // namespace detail -} // namespace nlohmann - -// #include - - -namespace nlohmann -{ -namespace detail -{ -template -struct iterator_types {}; - -template -struct iterator_types < - It, - void_t> -{ - using difference_type = typename It::difference_type; - using value_type = typename It::value_type; - using pointer = typename It::pointer; - using reference = typename It::reference; - using iterator_category = typename It::iterator_category; -}; - -// This is required as some compilers implement std::iterator_traits in a way that -// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. -template -struct iterator_traits -{ -}; - -template -struct iterator_traits < T, enable_if_t < !std::is_pointer::value >> - : iterator_types -{ -}; - -template -struct iterator_traits::value>> -{ - using iterator_category = std::random_access_iterator_tag; - using value_type = T; - using difference_type = ptrdiff_t; - using pointer = T*; - using reference = T&; -}; -} -} - -// #include - -// #include - - -#include - -// #include - - -// http://en.cppreference.com/w/cpp/experimental/is_detected -namespace nlohmann -{ -namespace detail -{ -struct nonesuch -{ - nonesuch() = delete; - ~nonesuch() = delete; - nonesuch(nonesuch const&) = delete; - void operator=(nonesuch const&) = delete; -}; - -template class Op, - class... Args> -struct detector -{ - using value_t = std::false_type; - using type = Default; -}; - -template class Op, class... Args> -struct detector>, Op, Args...> -{ - using value_t = std::true_type; - using type = Op; -}; - -template