1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-16 15:32:43 +01:00

Merge branch 'master' into document-derivation-and-deriving-path

This commit is contained in:
John Ericson 2025-02-05 21:35:00 -05:00
commit f003b8ca92
378 changed files with 8891 additions and 6157 deletions

View file

@ -106,3 +106,14 @@ pull_request_rules:
labels:
- automatic backport
- merge-queue
- name: backport patches to 2.26
conditions:
- label=backport 2.26-maintenance
actions:
backport:
branches:
- "2.26-maintenance"
labels:
- automatic backport
- merge-queue

View file

@ -1 +1 @@
2.26.0
2.27.0

View file

@ -1,10 +1,9 @@
(import
(
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
(import (
let
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
in
fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
sha256 = lock.nodes.flake-compat.locked.narHash;
}
)
{ src = ./.; }
).defaultNix
) { src = ./.; }).defaultNix

View file

@ -5,7 +5,15 @@ in
builtinsInfo:
let
showBuiltin = name: { doc, type ? null, args ? [ ], experimental-feature ? null, impure-only ? false }:
showBuiltin =
name:
{
doc,
type ? null,
args ? [ ],
experimental-feature ? null,
impure-only ? false,
}:
let
type' = optionalString (type != null) " (${type})";

View file

@ -32,7 +32,13 @@ let
commandInfo = fromJSON commandDump;
showCommand = { command, details, filename, toplevel }:
showCommand =
{
command,
details,
filename,
toplevel,
}:
let
result = ''
@ -56,26 +62,27 @@ let
${maybeOptions}
'';
showSynopsis = command: args:
showSynopsis =
command: args:
let
showArgument = arg: "*${arg.label}*" + optionalString (!arg ? arity) "...";
arguments = concatStringsSep " " (map showArgument args);
in ''
in
''
`${command}` [*option*...] ${arguments}
'';
maybeSubcommands = optionalString (details ? commands && details.commands != {})
''
maybeSubcommands = optionalString (details ? commands && details.commands != { }) ''
where *subcommand* is one of the following:
${subcommands}
'';
subcommands = if length categories > 1
then listCategories
else listSubcommands details.commands;
subcommands = if length categories > 1 then listCategories else listSubcommands details.commands;
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
categories = sort (x: y: x.id < y.id) (
unique (map (cmd: cmd.category) (attrValues details.commands))
);
listCategories = concatStrings (map showCategory categories);
@ -99,38 +106,39 @@ let
${allStores}
'';
index = replaceStrings
index =
replaceStrings
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
[ storesOverview "#local-store" "#local-daemon-store" ]
details.doc;
storesOverview =
let
showEntry = store:
"- [${store.name}](#${store.slug})";
showEntry = store: "- [${store.name}](#${store.slug})";
in
concatStringsSep "\n" (map showEntry storesList) + "\n";
allStores = concatStringsSep "\n" (attrValues storePages);
storePages = listToAttrs
(map (s: { name = s.filename; value = s.page; }) storesList);
storePages = listToAttrs (
map (s: {
name = s.filename;
value = s.page;
}) storesList
);
storesList = showStoreDocs {
storeInfo = commandInfo.stores;
inherit inlineHTML;
};
hasInfix = infix: content:
hasInfix =
infix: content:
builtins.stringLength content != builtins.stringLength (replaceStrings [ infix ] [ "" ] content);
in
optionalString (details ? doc) (
# An alternate implementation with builtins.match stack overflowed on some systems.
if hasInfix "@store-types@" details.doc
then help-stores
else details.doc
if hasInfix "@store-types@" details.doc then help-stores else details.doc
);
maybeOptions =
let
allVisibleOptions = filterAttrs
(_: o: ! o.hiddenCategory)
(details.flags // toplevel.flags);
allVisibleOptions = filterAttrs (_: o: !o.hiddenCategory) (details.flags // toplevel.flags);
in
optionalString (allVisibleOptions != { }) ''
# Options
@ -142,55 +150,73 @@ let
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
'';
showOptions = inlineHTML: allOptions:
showOptions =
inlineHTML: allOptions:
let
showCategory = cat: opts: ''
${optionalString (cat != "") "## ${cat}"}
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
'';
showOption = name: option:
showOption =
name: option:
let
result = trim ''
- ${item}
${option.description}
'';
item = if inlineHTML
then ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
else "`--${name}` ${shortName} ${labels}";
shortName = optionalString
(option ? shortName)
("/ `-${option.shortName}`");
labels = optionalString
(option ? labels)
(concatStringsSep " " (map (s: "*${s}*") option.labels));
in result;
categories = mapAttrs
item =
if inlineHTML then
''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
else
"`--${name}` ${shortName} ${labels}";
shortName = optionalString (option ? shortName) ("/ `-${option.shortName}`");
labels = optionalString (option ? labels) (concatStringsSep " " (map (s: "*${s}*") option.labels));
in
result;
categories =
mapAttrs
# Convert each group from a list of key-value pairs back to an attrset
(_: listToAttrs)
(groupBy
(cmd: cmd.value.category)
(attrsToList allOptions));
in concatStrings (attrValues (mapAttrs showCategory categories));
in squash result;
(groupBy (cmd: cmd.value.category) (attrsToList allOptions));
in
concatStrings (attrValues (mapAttrs showCategory categories));
in
squash result;
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
processCommand = { command, details, filename, toplevel }:
processCommand =
{
command,
details,
filename,
toplevel,
}:
let
cmd = {
inherit command;
name = filename + ".md";
value = showCommand { inherit command details filename toplevel; };
value = showCommand {
inherit
command
details
filename
toplevel
;
};
subcommand = subCmd: processCommand {
};
subcommand =
subCmd:
processCommand {
command = command + " " + subCmd;
details = details.commands.${subCmd};
filename = appendName filename subCmd;
inherit toplevel;
};
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
in
[ cmd ] ++ concatMap subcommand (attrNames details.commands or { });
manpages = processCommand {
command = "nix";
@ -199,9 +225,11 @@ let
toplevel = commandInfo.args;
};
tableOfContents = let
showEntry = page:
" - [${page.command}](command-ref/new-cli/${page.name})";
in concatStringsSep "\n" (map showEntry manpages) + "\n";
tableOfContents =
let
showEntry = page: " - [${page.command}](command-ref/new-cli/${page.name})";
in
concatStringsSep "\n" (map showEntry manpages) + "\n";
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
in
(listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }

View file

@ -1,23 +1,49 @@
let
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
inherit (import <nix/utils.nix>) concatStrings indent optionalString squash;
inherit (builtins)
attrValues
concatStringsSep
isAttrs
isBool
mapAttrs
;
inherit (import <nix/utils.nix>)
concatStrings
indent
optionalString
squash
;
in
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
{ prefix, inlineHTML ? true }: settingsInfo:
{
prefix,
inlineHTML ? true,
}:
settingsInfo:
let
showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
showSetting =
prefix: setting:
{
description,
documentDefault,
defaultValue,
aliases,
value,
experimentalFeature,
}:
let
result = squash ''
- ${item}
${indent " " body}
'';
item = if inlineHTML
then ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
else "`${setting}`";
item =
if inlineHTML then
''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
else
"`${setting}`";
# separate body to cleanly handle indentation
body = ''
${experimentalFeatureNote}
@ -46,22 +72,28 @@ let
> ```
'';
showDefault = documentDefault: defaultValue:
showDefault =
documentDefault: defaultValue:
if documentDefault then
# a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is `null` in
# JSON, but that converts to `{ }` here.
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
then "*empty*"
if defaultValue == "" || defaultValue == [ ] || isAttrs defaultValue then
"*empty*"
else if isBool defaultValue then
if defaultValue then "`true`" else "`false`"
else "`${toString defaultValue}`"
else "*machine-specific*";
else
"`${toString defaultValue}`"
else
"*machine-specific*";
showAliases = aliases:
showAliases =
aliases:
optionalString (aliases != [ ])
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
in result;
in
result;
in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))
in
concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))

View file

@ -1,6 +1,20 @@
let
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
inherit (import <nix/utils.nix>) optionalString filterAttrs trim squash toLower unique indent;
inherit (builtins)
attrNames
listToAttrs
concatStringsSep
readFile
replaceStrings
;
inherit (import <nix/utils.nix>)
optionalString
filterAttrs
trim
squash
toLower
unique
indent
;
showSettings = import <nix/generate-settings.nix>;
in
@ -14,7 +28,13 @@ in
let
showStore = { name, slug }: { settings, doc, experimentalFeature }:
showStore =
{ name, slug }:
{
settings,
doc,
experimentalFeature,
}:
let
result = squash ''
# ${name}
@ -25,7 +45,10 @@ let
## Settings
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
${showSettings {
prefix = "store-${slug}";
inherit inlineHTML;
} settings}
'';
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
@ -43,15 +66,15 @@ let
> extra-experimental-features = ${experimentalFeature}
> ```
'';
in result;
in
result;
storesList = map
(name: rec {
storesList = map (name: rec {
inherit name;
slug = replaceStrings [ " " ] [ "-" ] (toLower name);
filename = "${slug}.md";
page = showStore { inherit name slug; } storeInfo.${name};
})
(attrNames storeInfo);
}) (attrNames storeInfo);
in storesList
in
storesList

View file

@ -1,5 +1,11 @@
let
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
inherit (builtins)
attrNames
listToAttrs
concatStringsSep
readFile
replaceStrings
;
showSettings = import <nix/generate-settings.nix>;
showStoreDocs = import <nix/generate-store-info.nix>;
in
@ -14,26 +20,28 @@ let
index =
let
showEntry = store:
"- [${store.name}](./${store.filename})";
showEntry = store: "- [${store.name}](./${store.filename})";
in
concatStringsSep "\n" (map showEntry storesList);
"index.md" = replaceStrings
[ "@store-types@" ] [ index ]
"index.md" =
replaceStrings [ "@store-types@" ] [ index ]
(readFile ./source/store/types/index.md.in);
tableOfContents =
let
showEntry = store:
" - [${store.name}](store/types/${store.filename})";
showEntry = store: " - [${store.name}](store/types/${store.filename})";
in
concatStringsSep "\n" (map showEntry storesList) + "\n";
"SUMMARY.md" = tableOfContents;
storePages = listToAttrs
(map (s: { name = s.filename; value = s.page; }) storesList);
storePages = listToAttrs (
map (s: {
name = s.filename;
value = s.page;
}) storesList
);
in
storePages // { inherit "index.md" "SUMMARY.md"; }

View file

@ -2,8 +2,8 @@ with builtins;
with import <nix/utils.nix>;
let
showExperimentalFeature = name: doc:
''
showExperimentalFeature = name: doc: ''
- [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name})
'';
in xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))
in
xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))

View file

@ -2,7 +2,8 @@ with builtins;
with import <nix/utils.nix>;
let
showExperimentalFeature = name: doc:
showExperimentalFeature =
name: doc:
squash ''
## [`${name}`]{#xp-feature-${name}}

View file

@ -1,19 +1,20 @@
{ lib
, mkMesonDerivation
{
lib,
mkMesonDerivation,
, meson
, ninja
, lowdown-unsandboxed
, mdbook
, mdbook-linkcheck
, jq
, python3
, rsync
, nix-cli
meson,
ninja,
lowdown-unsandboxed,
mdbook,
mdbook-linkcheck,
jq,
python3,
rsync,
nix-cli,
# Configuration Options
, version
version,
}:
let
@ -25,7 +26,8 @@ mkMesonDerivation (finalAttrs: {
inherit version;
workDir = ./.;
fileset = fileset.difference
fileset =
fileset.difference
(fileset.unions [
../../.version
# Too many different types of files to filter for now
@ -36,7 +38,10 @@ mkMesonDerivation (finalAttrs: {
../../doc/manual/package.nix;
# TODO the man pages should probably be separate
outputs = [ "out" "man" ];
outputs = [
"out"
"man"
];
# Hack for sake of the dev shell
passthru.externalNativeBuildInputs = [
@ -54,8 +59,7 @@ mkMesonDerivation (finalAttrs: {
nix-cli
];
preConfigure =
''
preConfigure = ''
chmod u+w ./.version
echo ${finalAttrs.version} > ./.version
'';

View file

@ -1,22 +0,0 @@
---
synopsis: "Flake lock file generation now ignores local registries"
prs: [12019]
---
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
```nix
{
outputs = { self, nixpkgs }: { ... };
}
```
write
```nix
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
outputs = { self, nixpkgs }: { ... };
}
```

View file

@ -1,18 +0,0 @@
---
synopsis: "`nix copy` supports `--profile` and `--out-link`"
prs: [11657]
---
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
top-level store path, while `--out-link` create symlinks to the top-level store paths.
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
```
# nix copy --from ssh://server $path
# nix build --profile /nix/var/nix/profiles/system $path
```
you can now do
```
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
```
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.

View file

@ -1,8 +0,0 @@
---
synopsis: "`nix-instantiate --eval` now supports `--raw`"
prs: [12119]
---
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
the evaluation result must be a string, which is printed verbatim without
quotation marks or escaping.

View file

@ -1,21 +0,0 @@
---
synopsis: "Improved `NIX_SSHOPTS` parsing for better SSH option handling"
issues: [5181]
prs: [12020]
---
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
Previously, incorrectly split SSH options could cause failures in CLIs like `nix-copy-closure`,
especially when using complex ssh invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
This change introduces a `shellSplitString` function to ensure
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
behavior, addressing common parsing errors.
For example, the following now works as expected:
```bash
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
```
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.

View file

@ -1,12 +0,0 @@
---
synopsis: "Support for relative path inputs"
prs: [10089]
---
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
```nix
inputs.foo.url = "path:./foo";
```
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.

View file

@ -132,6 +132,7 @@
- [Contributing](development/contributing.md)
- [Releases](release-notes/index.md)
{{#include ./SUMMARY-rl-next.md}}
- [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md)
- [Release 2.25 (2024-11-07)](release-notes/rl-2.25.md)
- [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md)
- [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md)

View file

@ -79,7 +79,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im
To get a shell with one of the other [supported compilation environments](#compilation-environments):
```console
$ nix develop .#native-clangStdenvPackages
$ nix develop .#native-clangStdenv
```
> **Note**
@ -167,11 +167,13 @@ It is useful to perform multiple cross and native builds on the same source tree
for example to ensure that better support for one platform doesn't break the build for another.
Meson thankfully makes this very easy by confining all build products to the build directory --- one simple shares the source directory between multiple build directories, each of which contains the build for Nix to a different platform.
Nixpkgs's `configurePhase` always chooses `build` in the current directory as the name and location of the build.
This makes having multiple build directories slightly more inconvenient.
The good news is that Meson/Ninja seem to cope well with relocating the build directory after it is created.
Here's how to do that:
Here's how to do that
1. Instruct Nixpkgs's infra where we want Meson to put its build directory
```bash
mesonBuildDir=build-my-variant-name
```
1. Configure as usual
@ -179,24 +181,12 @@ Here's how to do that
configurePhase
```
2. Rename the build directory
```bash
cd .. # since `configurePhase` cd'd inside
mv build build-linux # or whatever name we want
cd build-linux
```
3. Build as usual
```bash
buildPhase
```
> **N.B.**
> [`nixpkgs#335818`](https://github.com/NixOS/nixpkgs/issues/335818) tracks giving `mesonConfigurePhase` proper support for custom build directories.
> When it is fixed, we can simplify these instructions and then remove this notice.
## System type
Nix uses a string with the following format to identify the *system type* or *platform* it runs on:
@ -261,7 +251,8 @@ See [supported compilation environments](#compilation-environments) and instruct
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
Meson's configure always produces this inside the build directory.
Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
Configure your editor to use the `clangd` from the `.#native-clangStdenv` shell.
You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
> **Note**
>
@ -277,6 +268,8 @@ You may run the formatters as a one-off using:
./maintainers/format.sh
```
### Pre-commit hooks
If you'd like to run the formatters before every commit, install the hooks:
```
@ -291,3 +284,30 @@ If it fails, run `git add --patch` to approve the suggestions _and commit again_
To refresh pre-commit hook's config file, do the following:
1. Exit the development shell and start it again by running `nix develop`.
2. If you also use the pre-commit hook, also run `pre-commit-hooks-install` again.
### VSCode
Insert the following json into your `.vscode/settings.json` file to configure `nixfmt`.
This will be picked up by the _Format Document_ command, `"editor.formatOnSave"`, etc.
```json
{
"nix.formatterPath": "nixfmt",
"nix.serverSettings": {
"nixd": {
"formatting": {
"command": [
"nixfmt"
],
},
},
"nil": {
"formatting": {
"command": [
"nixfmt"
],
},
},
},
}
```

View file

@ -2,6 +2,8 @@
This section shows how to build and debug Nix with debug symbols enabled.
Additionally, see [Testing Nix](./testing.md) for further instructions on how to debug Nix in the context of a unit test or functional test.
## Building Nix with Debug Symbols
In the development shell, set the `mesonBuildType` environment variable to `debug` before configuring the build:
@ -13,6 +15,15 @@ In the development shell, set the `mesonBuildType` environment variable to `debu
Then, proceed to build Nix as described in [Building Nix](./building.md).
This will build Nix with debug symbols, which are essential for effective debugging.
It is also possible to build without debugging for faster build:
```console
[nix-shell]$ NIX_HARDENING_ENABLE=$(printLines $NIX_HARDENING_ENABLE | grep -v fortify)
[nix-shell]$ export mesonBuildType=debug
```
(The first line is needed because `fortify` hardening requires at least some optimization.)
## Debugging the Nix Binary
Obtain your preferred debugger within the development shell:

View file

@ -87,7 +87,11 @@ A environment variables that Google Test accepts are also worth knowing:
This is used to avoid logging passing tests.
Putting the two together, one might run
3. [`GTEST_BREAK_ON_FAILURE`](https://google.github.io/googletest/advanced.html#turning-assertion-failures-into-break-points)
This is used to create a debugger breakpoint when an assertion failure occurs.
Putting the first two together, one might run
```bash
GTEST_BRIEF=1 GTEST_FILTER='ErrorTraceTest.*' meson test nix-expr-tests -v
@ -95,6 +99,22 @@ GTEST_BRIEF=1 GTEST_FILTER='ErrorTraceTest.*' meson test nix-expr-tests -v
for short but comprensive output.
### Debugging tests
For debugging, it is useful to combine the third option above with Meson's [`--gdb`](https://mesonbuild.com/Unit-tests.html#other-test-options) flag:
```bash
GTEST_BRIEF=1 GTEST_FILTER='Group.my-failing-test' meson test nix-expr-tests --gdb
```
This will:
1. Run the unit test with GDB
2. Run just `Group.my-failing-test`
3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB.
### Characterisation testing { #characaterisation-testing-unit }
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
@ -213,10 +233,10 @@ edit it like so:
bar
```
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
Then, running the test with [`--interactive`](https://mesonbuild.com/Unit-tests.html#other-test-options) will prevent Meson from hijacking the terminal so you can drop you into GDB once the script reaches that point:
```shell-session
$ ./mk/debug-test.sh tests/functional/${testName}.sh
$ meson test ${testName} --interactive
...
+ gdb blash blub
GNU gdb (GDB) 12.1

View file

@ -0,0 +1,128 @@
# Release 2.26.0 (2025-01-22)
- Support for relative path inputs [#10089](https://github.com/NixOS/nix/pull/10089)
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
```nix
inputs.foo.url = "path:./foo";
```
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.
- Flake lock file generation now ignores local registries [#12019](https://github.com/NixOS/nix/pull/12019)
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
```nix
{
outputs = { self, nixpkgs }: { ... };
}
```
write
```nix
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
outputs = { self, nixpkgs }: { ... };
}
```
- `nix copy` supports `--profile` and `--out-link` [#11657](https://github.com/NixOS/nix/pull/11657)
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
top-level store path, while `--out-link` create symlinks to the top-level store paths.
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
```
# nix copy --from ssh://server $path
# nix build --profile /nix/var/nix/profiles/system $path
```
you can now do
```
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
```
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.
- `nix-instantiate --eval` now supports `--raw` [#12119](https://github.com/NixOS/nix/pull/12119)
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
the evaluation result must be a string, which is printed verbatim without
quotation marks or escaping.
- Improved `NIX_SSHOPTS` parsing for better SSH option handling [#5181](https://github.com/NixOS/nix/issues/5181) [#12020](https://github.com/NixOS/nix/pull/12020)
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
Previously, incorrectly split SSH options could cause failures in commands like `nix-copy-closure`,
especially when using complex SSH invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
This change introduces a `shellSplitString` function to ensure
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
behavior, addressing common parsing errors.
For example, the following now works as expected:
```bash
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
```
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.
- Nix is now built using Meson
As proposed in [RFC 132](https://github.com/NixOS/rfcs/pull/132), Nix's build system now uses Meson/Ninja. The old Make-based build system has been removed.
- Evaluation caching now works for dirty Git workdirs [#11992](https://github.com/NixOS/nix/pull/11992)
# Contributors
This release was made possible by the following 45 contributors:
- Anatoli Babenia [**(@abitrolly)**](https://github.com/abitrolly)
- Domagoj Mišković [**(@allrealmsoflife)**](https://github.com/allrealmsoflife)
- Yaroslav Bolyukin [**(@CertainLach)**](https://github.com/CertainLach)
- bryango [**(@bryango)**](https://github.com/bryango)
- tomberek [**(@tomberek)**](https://github.com/tomberek)
- Matej Urbas [**(@mupdt)**](https://github.com/mupdt)
- elikoga [**(@elikoga)**](https://github.com/elikoga)
- wh0 [**(@wh0)**](https://github.com/wh0)
- Félix [**(@picnoir)**](https://github.com/picnoir)
- Valentin Gagarin [**(@fricklerhandwerk)**](https://github.com/fricklerhandwerk)
- Gavin John [**(@Pandapip1)**](https://github.com/Pandapip1)
- Travis A. Everett [**(@abathur)**](https://github.com/abathur)
- Vladimir Panteleev [**(@CyberShadow)**](https://github.com/CyberShadow)
- Ilja [**(@suruaku)**](https://github.com/suruaku)
- Jason Yundt [**(@Jayman2000)**](https://github.com/Jayman2000)
- Mike Kusold [**(@kusold)**](https://github.com/kusold)
- Andy Hamon [**(@andrewhamon)**](https://github.com/andrewhamon)
- Brian McKenna [**(@puffnfresh)**](https://github.com/puffnfresh)
- Greg Curtis [**(@gcurtis)**](https://github.com/gcurtis)
- Andrew Poelstra [**(@apoelstra)**](https://github.com/apoelstra)
- Linus Heckemann [**(@lheckemann)**](https://github.com/lheckemann)
- Tristan Ross [**(@RossComputerGuy)**](https://github.com/RossComputerGuy)
- Dominique Martinet [**(@martinetd)**](https://github.com/martinetd)
- h0nIg [**(@h0nIg)**](https://github.com/h0nIg)
- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra)
- Shahar "Dawn" Or [**(@mightyiam)**](https://github.com/mightyiam)
- NAHO [**(@trueNAHO)**](https://github.com/trueNAHO)
- Ryan Hendrickson [**(@rhendric)**](https://github.com/rhendric)
- the-sun-will-rise-tomorrow [**(@the-sun-will-rise-tomorrow)**](https://github.com/the-sun-will-rise-tomorrow)
- Connor Baker [**(@ConnorBaker)**](https://github.com/ConnorBaker)
- Cole Helbling [**(@cole-h)**](https://github.com/cole-h)
- Jack Wilsdon [**(@jackwilsdon)**](https://github.com/jackwilsdon)
- rekcäH nitraM [**(@dwt)**](https://github.com/dwt)
- Martin Fischer [**(@not-my-profile)**](https://github.com/not-my-profile)
- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314)
- Graham Christensen [**(@grahamc)**](https://github.com/grahamc)
- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium)
- Siddarth Kumar [**(@siddarthkay)**](https://github.com/siddarthkay)
- Sergei Trofimovich [**(@trofi)**](https://github.com/trofi)
- Robert Hensing [**(@roberth)**](https://github.com/roberth)
- Mutsuha Asada [**(@momeemt)**](https://github.com/momeemt)
- Parker Jones [**(@knotapun)**](https://github.com/knotapun)
- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92)
- dbdr [**(@dbdr)**](https://github.com/dbdr)
- myclevorname [**(@myclevorname)**](https://github.com/myclevorname)
- Philipp Otterbein

View file

@ -11,10 +11,15 @@ rec {
concatStrings = concatStringsSep "";
attrsToList = a:
map (name: { inherit name; value = a.${name}; }) (builtins.attrNames a);
attrsToList =
a:
map (name: {
inherit name;
value = a.${name};
}) (builtins.attrNames a);
replaceStringsRec = from: to: string:
replaceStringsRec =
from: to: string:
# recursively replace occurrences of `from` with `to` within `string`
# example:
# replaceStringRec "--" "-" "hello-----world"
@ -28,10 +33,12 @@ rec {
squash = replaceStringsRec "\n\n\n" "\n\n";
trim = string:
trim =
string:
# trim trailing spaces and squash non-leading spaces
let
trimLine = line:
trimLine =
line:
let
# separate leading spaces from the rest
parts = split "(^ *)" line;
@ -39,19 +46,30 @@ rec {
rest = elemAt parts 2;
# drop trailing spaces
body = head (split " *$" rest);
in spaces + replaceStringsRec " " " " body;
in concatStringsSep "\n" (map trimLine (splitLines string));
in
spaces + replaceStringsRec " " " " body;
in
concatStringsSep "\n" (map trimLine (splitLines string));
# FIXME: O(n^2)
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ];
nameValuePair = name: value: { inherit name value; };
filterAttrs = pred: set:
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
filterAttrs =
pred: set:
listToAttrs (
concatMap (
name:
let
v = set.${name};
in
if pred name v then [ (nameValuePair name v) ] else [ ]
) (attrNames set)
);
optionalString = cond: string: if cond then string else "";
indent = prefix: s:
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
indent =
prefix: s: concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
}

View file

@ -1,21 +1,24 @@
{ pkgs ? import <nixpkgs> { }
, lib ? pkgs.lib
, name ? "nix"
, tag ? "latest"
, bundleNixpkgs ? true
, channelName ? "nixpkgs"
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
, extraPkgs ? []
, maxLayers ? 100
, nixConf ? {}
, flake-registry ? null
, uid ? 0
, gid ? 0
, uname ? "root"
, gname ? "root"
{
pkgs ? import <nixpkgs> { },
lib ? pkgs.lib,
name ? "nix",
tag ? "latest",
bundleNixpkgs ? true,
channelName ? "nixpkgs",
channelURL ? "https://nixos.org/channels/nixpkgs-unstable",
extraPkgs ? [ ],
maxLayers ? 100,
nixConf ? { },
flake-registry ? null,
uid ? 0,
gid ? 0,
uname ? "root",
gname ? "root",
}:
let
defaultPkgs = with pkgs; [
defaultPkgs =
with pkgs;
[
nix
bashInteractive
coreutils-full
@ -32,9 +35,11 @@ let
iana-etc
git
openssh
] ++ extraPkgs;
]
++ extraPkgs;
users = {
users =
{
root = {
uid = 0;
@ -54,7 +59,8 @@ let
description = "Unprivileged account (don't use!)";
};
} // lib.optionalAttrs (uid != 0) {
}
// lib.optionalAttrs (uid != 0) {
"${uname}" = {
uid = uid;
shell = "${pkgs.bashInteractive}/bin/bash";
@ -63,10 +69,9 @@ let
groups = [ "${gname}" ];
description = "Nix user";
};
} // lib.listToAttrs (
map
(
n: {
}
// lib.listToAttrs (
map (n: {
name = "nixbld${toString n}";
value = {
uid = 30000 + n;
@ -74,39 +79,35 @@ let
groups = [ "nixbld" ];
description = "Nix build user ${toString n}";
};
}
)
(lib.lists.range 1 32)
}) (lib.lists.range 1 32)
);
groups = {
groups =
{
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
} // lib.optionalAttrs (gid != 0) {
}
// lib.optionalAttrs (gid != 0) {
"${gname}".gid = gid;
};
userToPasswd = (
k:
{ uid
, gid ? 65534
, home ? "/var/empty"
, description ? ""
, shell ? "/bin/false"
, groups ? [ ]
}: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
);
passwdContents = (
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs userToPasswd users))
{
uid,
gid ? 65534,
home ? "/var/empty",
description ? "",
shell ? "/bin/false",
groups ? [ ],
}:
"${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
);
passwdContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToPasswd users)));
userToShadow = k: { ... }: "${k}:!:1::::::";
shadowContents = (
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs userToShadow users))
);
shadowContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToShadow users)));
# Map groups to members
# {
@ -116,42 +117,35 @@ let
let
# Create a flat list of user/group mappings
mappings = (
builtins.foldl'
(
builtins.foldl' (
acc: user:
let
groups = users.${user}.groups or [ ];
in
acc ++ map
(group: {
acc
++ map (group: {
inherit user group;
})
groups
)
[ ]
(lib.attrNames users)
}) groups
) [ ] (lib.attrNames users)
);
in
(
builtins.foldl'
(
acc: v: acc // {
(builtins.foldl' (
acc: v:
acc
// {
${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
}
)
{ }
mappings)
) { } mappings)
);
groupToGroup = k: { gid }:
groupToGroup =
k:
{ gid }:
let
members = groupMemberMap.${k} or [ ];
in
"${k}:x:${toString gid}:${lib.concatStringsSep "," members}";
groupContents = (
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs groupToGroup groups))
);
groupContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs groupToGroup groups)));
defaultNixConf = {
sandbox = "false";
@ -159,11 +153,17 @@ let
trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
};
nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v:
nixConfContents =
(lib.concatStringsSep "\n" (
lib.mapAttrsFlatten (
n: v:
let
vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
in
"${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n";
"${n} = ${vStr}"
) (defaultNixConf // nixConf)
))
+ "\n";
userHome = if uid == 0 then "/root" else "/home/${uname}";
@ -184,13 +184,19 @@ let
manifest = pkgs.buildPackages.runCommand "manifest.nix" { } ''
cat > $out <<EOF
[
${lib.concatStringsSep "\n" (builtins.map (drv: let
${lib.concatStringsSep "\n" (
builtins.map (
drv:
let
outputs = drv.outputsToInstall or [ "out" ];
in ''
in
''
{
${lib.concatStringsSep "\n" (builtins.map (output: ''
${lib.concatStringsSep "\n" (
builtins.map (output: ''
${output} = { outPath = "${lib.getOutput output drv}"; };
'') outputs)}
'') outputs
)}
outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ];
name = "${drv.name}";
outPath = "${drv}";
@ -198,7 +204,9 @@ let
type = "derivation";
meta = { };
}
'') defaultPkgs)}
''
) defaultPkgs
)}
]
EOF
'';
@ -207,7 +215,8 @@ let
cp -a ${rootEnv}/* $out/
ln -s ${manifest} $out/manifest.nix
'';
flake-registry-path = if (flake-registry == null) then
flake-registry-path =
if (flake-registry == null) then
null
else if (builtins.readFileType (toString flake-registry)) == "directory" then
"${flake-registry}/flake-registry.json"
@ -216,7 +225,12 @@ let
in
pkgs.runCommand "base-system"
{
inherit passwdContents groupContents shadowContents nixConfContents;
inherit
passwdContents
groupContents
shadowContents
nixConfContents
;
passAsFile = [
"passwdContents"
"groupContents"
@ -225,7 +239,9 @@ let
];
allowSubstitutes = false;
preferLocalBuild = true;
} (''
}
(
''
env
set -x
mkdir -p $out/etc
@ -272,7 +288,8 @@ let
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
'' + (lib.optionalString (flake-registry-path != null) ''
''
+ (lib.optionalString (flake-registry-path != null) ''
nixCacheDir="${userHome}/.cache/nix"
mkdir -p $out$nixCacheDir
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
@ -280,12 +297,21 @@ let
mkdir -p $out/nix/var/nix/gcroots/auto
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
''));
'')
);
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
inherit name tag maxLayers uid gid uname gname;
inherit
name
tag
maxLayers
uid
gid
uname
gname
;
contents = [ baseSystem ];
@ -305,15 +331,19 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
User = "${toString uid}:${toString gid}";
Env = [
"USER=${uname}"
"PATH=${lib.concatStringsSep ":" [
"PATH=${
lib.concatStringsSep ":" [
"${userHome}/.nix-profile/bin"
"/nix/var/nix/profiles/default/bin"
"/nix/var/nix/profiles/default/sbin"
]}"
"MANPATH=${lib.concatStringsSep ":" [
]
}"
"MANPATH=${
lib.concatStringsSep ":" [
"${userHome}/.nix-profile/share/man"
"/nix/var/nix/profiles/default/share/man"
]}"
]
}"
"SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"

52
flake.lock generated
View file

@ -36,6 +36,24 @@
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"git-hooks-nix": {
"inputs": {
"flake-compat": [],
@ -61,6 +79,24 @@
"type": "github"
}
},
"nixfmt": {
"inputs": {
"flake-utils": "flake-utils"
},
"locked": {
"lastModified": 1736283758,
"narHash": "sha256-hrKhUp2V2fk/dvzTTHFqvtOg000G1e+jyIam+D4XqhA=",
"owner": "NixOS",
"repo": "nixfmt",
"rev": "8d4bd690c247004d90d8554f0b746b1231fe2436",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixfmt",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1734359947,
@ -114,10 +150,26 @@
"flake-compat": "flake-compat",
"flake-parts": "flake-parts",
"git-hooks-nix": "git-hooks-nix",
"nixfmt": "nixfmt",
"nixpkgs": "nixpkgs",
"nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",

272
flake.nix
View file

@ -5,7 +5,10 @@
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
inputs.flake-compat = {
url = "github:edolstra/flake-compat";
flake = false;
};
# dev tooling
inputs.flake-parts.url = "github:hercules-ci/flake-parts";
@ -17,9 +20,15 @@
# work around 7730 and https://github.com/NixOS/nix/issues/7807
inputs.git-hooks-nix.inputs.flake-compat.follows = "";
inputs.git-hooks-nix.inputs.gitignore.follows = "";
inputs.nixfmt.url = "github:NixOS/nixfmt";
outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }:
outputs =
inputs@{
self,
nixpkgs,
nixpkgs-regression,
...
}:
let
inherit (nixpkgs) lib;
@ -27,9 +36,15 @@
officialRelease = false;
linux32BitSystems = [ "i686-linux" ];
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
linux64BitSystems = [
"x86_64-linux"
"aarch64-linux"
];
linuxSystems = linux32BitSystems ++ linux64BitSystems;
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ];
darwinSystems = [
"x86_64-darwin"
"aarch64-darwin"
];
systems = linuxSystems ++ darwinSystems;
crossSystems = [
@ -66,56 +81,59 @@
forAllCrossSystems = lib.genAttrs crossSystems;
forAllStdenvs = f:
lib.listToAttrs
(map
(stdenvName: {
name = "${stdenvName}Packages";
value = f stdenvName;
})
stdenvs);
forAllStdenvs = lib.genAttrs stdenvs;
# We don't apply flake-parts to the whole flake so that non-development attributes
# load without fetching any development inputs.
devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } {
imports = [ ./maintainers/flake-module.nix ];
systems = lib.subtractLists crossSystems systems;
perSystem = { system, ... }: {
perSystem =
{ system, ... }:
{
_module.args.pkgs = nixpkgsFor.${system}.native;
};
};
# Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor = forAllSystems
(system: let
make-pkgs = crossSystem: stdenv: import nixpkgs {
nixpkgsFor = forAllSystems (
system:
let
make-pkgs =
crossSystem:
forAllStdenvs (
stdenv:
import nixpkgs {
localSystem = {
inherit system;
};
crossSystem = if crossSystem == null then null else {
crossSystem =
if crossSystem == null then
null
else
{
config = crossSystem;
} // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
}
// lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
useLLVM = true;
};
overlays = [
(overlayFor (p: p.${stdenv}))
(overlayFor (pkgs: pkgs.${stdenv}))
];
};
stdenvs = forAllStdenvs (make-pkgs null);
native = stdenvs.stdenvPackages;
in {
inherit stdenvs native;
static = native.pkgsStatic;
llvm = native.pkgsLLVM;
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
});
}
);
in
rec {
nativeForStdenv = make-pkgs null;
crossForStdenv = forAllCrossSystems make-pkgs;
# Alias for convenience
native = nativeForStdenv.stdenv;
cross = forAllCrossSystems (crossSystem: crossForStdenv.${crossSystem}.stdenv);
}
);
binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix {
inherit nix;
};
overlayFor = getStdenv: final: prev:
overlayFor =
getStdenv: final: prev:
let
stdenv = getStdenv final;
in
@ -162,12 +180,19 @@
# See https://github.com/NixOS/nixpkgs/pull/214409
# Remove when fixed in this flake's nixpkgs
pre-commit =
if prev.stdenv.hostPlatform.system == "i686-linux"
then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; })
else prev.pre-commit;
if prev.stdenv.hostPlatform.system == "i686-linux" then
(prev.pre-commit.override (o: {
dotnet-sdk = "";
})).overridePythonAttrs
(o: {
doCheck = false;
})
else
prev.pre-commit;
};
in {
in
{
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix-perl-bindings' packages.
overlays.default = overlayFor (p: p.stdenv);
@ -175,7 +200,6 @@
hydraJobs = import ./packaging/hydra.nix {
inherit
inputs
binaryTarball
forAllCrossSystems
forAllSystems
lib
@ -186,40 +210,53 @@
;
};
checks = forAllSystems (system: {
checks = forAllSystems (
system:
{
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
installTests = self.hydraJobs.installTests.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
rl-next =
let pkgs = nixpkgsFor.${system}.native;
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
let
pkgs = nixpkgsFor.${system}.native;
in
pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out
'';
repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { };
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
}
// (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
dockerImage = self.hydraJobs.dockerImage.${system};
} // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
}
// (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
# Some perl dependencies are broken on i686-linux.
# Since the support is only best-effort there, disable the perl
# bindings
perlBindings = self.hydraJobs.perlBindings.${system};
}
# Add "passthru" tests
// flatMapAttrs ({
//
flatMapAttrs
(
{
"" = nixpkgsFor.${system}.native;
} // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
}
// lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
# TODO: enable static builds for darwin, blocked on:
# https://github.com/NixOS/nixpkgs/issues/320448
# TODO: disabled to speed up GHA CI.
#"static-" = nixpkgsFor.${system}.static;
})
(nixpkgsPrefix: nixpkgs:
flatMapAttrs nixpkgs.nixComponents
(pkgName: pkg:
flatMapAttrs pkg.tests or {}
(testName: test: {
#"static-" = nixpkgsFor.${system}.native.pkgsStatic;
}
)
(
nixpkgsPrefix: nixpkgs:
flatMapAttrs nixpkgs.nixComponents (
pkgName: pkg:
flatMapAttrs pkg.tests or { } (
testName: test: {
"${nixpkgsPrefix}${pkgName}-${testName}" = test;
})
}
)
)
// lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) {
"${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests;
@ -228,11 +265,14 @@
// devFlake.checks.${system} or { }
);
packages = forAllSystems (system:
{ # Here we put attributes that map 1:1 into packages.<system>, ie
packages = forAllSystems (
system:
{
# Here we put attributes that map 1:1 into packages.<system>, ie
# for which we don't apply the full build matrix such as cross or static.
inherit (nixpkgsFor.${system}.native)
changelog-d;
changelog-d
;
default = self.packages.${system}.nix;
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
binaryTarball = self.hydraJobs.binaryTarball.${system};
@ -243,8 +283,10 @@
nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs;
}
# We need to flatten recursive attribute sets of derivations to pass `flake check`.
// flatMapAttrs
{ # Components we'll iterate over in the upcoming lambda
//
flatMapAttrs
{
# Components we'll iterate over in the upcoming lambda
"nix-util" = { };
"nix-util-c" = { };
"nix-util-test-support" = { };
@ -275,33 +317,55 @@
"nix-everything" = { };
"nix-functional-tests" = { supportsCross = false; };
"nix-functional-tests" = {
supportsCross = false;
};
"nix-perl-bindings" = { supportsCross = false; };
"nix-perl-bindings" = {
supportsCross = false;
};
}
(pkgName: { supportsCross ? true }: {
(
pkgName:
{
supportsCross ? true,
}:
{
# These attributes go right into `packages.<system>`.
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName};
"${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName};
"${pkgName}-llvm" = nixpkgsFor.${system}.native.pkgsLLVM.nixComponents.${pkgName};
}
// lib.optionalAttrs supportsCross (flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: {
// lib.optionalAttrs supportsCross (
flatMapAttrs (lib.genAttrs crossSystems (_: { })) (
crossSystem:
{ }:
{
# These attributes go right into `packages.<system>`.
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName};
}))
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (stdenvName: {}: {
}
)
)
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (
stdenvName:
{ }:
{
# These attributes go right into `packages.<system>`.
"${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName};
})
"${pkgName}-${stdenvName}" =
nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.nixComponents.${pkgName};
}
)
)
// lib.optionalAttrs (builtins.elem system linux64BitSystems) {
dockerImage =
let
pkgs = nixpkgsFor.${system}.native;
image = import ./docker.nix { inherit pkgs; tag = pkgs.nix.version; };
image = import ./docker.nix {
inherit pkgs;
tag = pkgs.nix.version;
};
in
pkgs.runCommand
"docker-image-tarball-${pkgs.nix.version}"
pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
{ meta.description = "Docker image with Nix for ${system}"; }
''
mkdir -p $out/nix-support
@ -309,29 +373,53 @@
ln -s ${image} $image
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
'';
});
}
);
devShells = let
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };
devShells =
let
makeShell = import ./packaging/dev-shell.nix { inherit inputs lib devFlake; };
prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; });
in
forAllSystems (system:
prefixAttrs "native" (forAllStdenvs (stdenvName: makeShell {
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages";
})) //
lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) (
prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell {
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic;
})) //
prefixAttrs "llvm" (forAllStdenvs (stdenvName: makeShell {
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM;
})) //
prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell {
forAllSystems (
system:
prefixAttrs "native" (
forAllStdenvs (
stdenvName:
makeShell {
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName};
}
)
)
// lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) (
prefixAttrs "static" (
forAllStdenvs (
stdenvName:
makeShell {
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsStatic;
}
)
)
// prefixAttrs "llvm" (
forAllStdenvs (
stdenvName:
makeShell {
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsLLVM;
}
)
)
// prefixAttrs "cross" (
forAllCrossSystems (
crossSystem:
makeShell {
pkgs = nixpkgsFor.${system}.cross.${crossSystem};
}))
) //
{
default = self.devShells.${system}.native-stdenvPackages;
}
)
)
)
// {
native = self.devShells.${system}.native-stdenv;
default = self.devShells.${system}.native;
}
);
};

View file

@ -98,5 +98,39 @@
"aks.kenji@protonmail.com": "a-kenji",
"54070204+0x5a4@users.noreply.github.com": "0x5a4",
"brian@bmcgee.ie": "brianmcgee",
"squalus@squalus.net": "squalus"
"squalus@squalus.net": "squalus",
"kusold@users.noreply.github.com": "kusold",
"37929162+mergify[bot]@users.noreply.github.com": "mergify[bot]",
"ilja@mailbox.org": "suruaku",
"and.ham95@gmail.com": "andrewhamon",
"andy.hamon@discordapp.com": "andrewhamon",
"siddarthkay@gmail.com": "siddarthkay",
"apoelstra@wpsoftware.net": "apoelstra",
"asmadeus@codewreck.org": "martinetd",
"tristan.ross@midstall.com": "RossComputerGuy",
"bryanlais@gmail.com": "bryango",
"157494086+allrealmsoflife@users.noreply.github.com": "allrealmsoflife",
"ConnorBaker01@gmail.com": "ConnorBaker",
"me@momee.mt": "momeemt",
"martin@push-f.com": "not-my-profile",
"90870942+trueNAHO@users.noreply.github.com": "trueNAHO",
"49885263+knotapun@users.noreply.github.com": "knotapun",
"iam@lach.pw": "CertainLach",
"elikowa@gmail.com": "elikoga",
"greg.curtis@jetpack.io": "gcurtis",
"git@sphalerite.org": "lheckemann",
"mightyiampresence@gmail.com": "mightyiam",
"spamfaenger@gmx.de": "dwt",
"graham@grahamc.com": "grahamc",
"wh0@users.noreply.github.com": "wh0",
"25388474+mupdt@users.noreply.github.com": "mupdt",
"anatoli@rainforce.org": "abitrolly",
"h0nIg@users.noreply.github.com": "h0nIg",
"CyberShadow@users.noreply.github.com": "CyberShadow",
"gavinnjohn@gmail.com": "Pandapip1",
"picnoir@alternativebit.fr": "picnoir",
"140354451+myclevorname@users.noreply.github.com": "myclevorname",
"bonniot@gmail.com": "dbdr",
"jack@wilsdon.me": "jackwilsdon",
"143541718+WxNzEMof@users.noreply.github.com": "the-sun-will-rise-tomorrow"
}

View file

@ -86,5 +86,37 @@
"Aleksanaa": "Aleksana",
"YorikSar": "Yuriy Taraday",
"kjeremy": "Jeremy Kolb",
"artemist": "Artemis Tosini"
"artemist": "Artemis Tosini",
"the-sun-will-rise-tomorrow": null,
"gcurtis": "Greg Curtis",
"ConnorBaker": "Connor Baker",
"abitrolly": "Anatoli Babenia",
"allrealmsoflife": "Domagoj Mi\u0161kovi\u0107",
"andrewhamon": "Andy Hamon",
"picnoir": "F\u00e9lix",
"dbdr": null,
"suruaku": "Ilja",
"jackwilsdon": "Jack Wilsdon",
"mergify[bot]": null,
"kusold": "Mike Kusold",
"lheckemann": "Linus Heckemann",
"h0nIg": null,
"grahamc": "Graham Christensen",
"not-my-profile": "Martin Fischer",
"CyberShadow": "Vladimir Panteleev",
"Pandapip1": "Gavin John",
"RossComputerGuy": "Tristan Ross",
"elikoga": null,
"martinetd": "Dominique Martinet",
"knotapun": "Parker Jones",
"mightyiam": "Shahar \"Dawn\" Or",
"siddarthkay": "Siddarth Kumar",
"apoelstra": "Andrew Poelstra",
"myclevorname": null,
"CertainLach": "Yaroslav Bolyukin",
"trueNAHO": "NAHO",
"wh0": null,
"mupdt": "Matej Urbas",
"momeemt": "Mutsuha Asada",
"dwt": "\u202erekc\u00e4H nitraM\u202e"
}

View file

@ -1,11 +1,18 @@
{ lib, getSystem, inputs, ... }:
{
lib,
getSystem,
inputs,
...
}:
{
imports = [
inputs.git-hooks-nix.flakeModule
];
perSystem = { config, pkgs, ... }: {
perSystem =
{ config, pkgs, ... }:
{
# https://flake.parts/options/git-hooks-nix#options
pre-commit.settings = {
@ -31,6 +38,35 @@
touch $out
''}";
};
nixfmt-rfc-style = {
enable = true;
package = inputs.nixfmt.packages.${pkgs.hostPlatform.system}.default;
excludes = [
# Invalid
''^tests/functional/lang/parse-.*\.nix$''
# Formatting-sensitive
''^tests/functional/lang/eval-okay-curpos\.nix$''
''^tests/functional/lang/.*comment.*\.nix$''
''^tests/functional/lang/.*newline.*\.nix$''
''^tests/functional/lang/.*eol.*\.nix$''
# Syntax tests
''^tests/functional/shell.shebang\.nix$''
''^tests/functional/lang/eval-okay-ind-string\.nix$''
# Not supported by nixfmt
''^tests/functional/lang/eval-okay-deprecate-cursed-or\.nix$''
''^tests/functional/lang/eval-okay-attrs5\.nix$''
# More syntax tests
# These tests, or parts of them, should have been parse-* test cases.
''^tests/functional/lang/eval-fail-eol-2\.nix$''
''^tests/functional/lang/eval-fail-path-slash\.nix$''
''^tests/functional/lang/eval-fail-toJSON-non-utf-8\.nix$''
''^tests/functional/lang/eval-fail-set\.nix$''
];
};
clang-format = {
enable = true;
# https://github.com/cachix/git-hooks.nix/pull/532
@ -660,7 +696,6 @@
''^src/libutil-tests/data/git/check-data\.sh$''
];
};
# TODO: nixfmt, https://github.com/NixOS/nixfmt/issues/153
};
};
};

View file

@ -144,12 +144,10 @@ release:
Make a pull request and auto-merge it.
* Create a milestone for the next release, move all unresolved issues
from the previous milestone, and close the previous milestone. Set
the date for the next milestone 6 weeks from now.
* Create a backport label.
* Add the new backport label to `.mergify.yml`.
* Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of
`rl-$VERSION.md`.

View file

@ -42,7 +42,7 @@ my $flakeUrl = $evalInfo->{flake};
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix.x86_64-linux", 'application/json'));
my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix-everything.x86_64-linux", 'application/json'));
#print Dumper($buildInfo);
my $releaseName = $buildInfo->{nixname};
@ -91,7 +91,7 @@ sub getStorePath {
sub copyManual {
my $manual;
eval {
$manual = getStorePath("build.nix.x86_64-linux", "doc");
$manual = getStorePath("manual");
};
if ($@) {
warn "$@";
@ -240,12 +240,12 @@ if ($haveDocker) {
# Upload nix-fallback-paths.nix.
write_file("$tmpDir/fallback-paths.nix",
"{\n" .
" x86_64-linux = \"" . getStorePath("build.nix.x86_64-linux") . "\";\n" .
" i686-linux = \"" . getStorePath("build.nix.i686-linux") . "\";\n" .
" aarch64-linux = \"" . getStorePath("build.nix.aarch64-linux") . "\";\n" .
" riscv64-linux = \"" . getStorePath("buildCross.nix.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" .
" x86_64-darwin = \"" . getStorePath("build.nix.x86_64-darwin") . "\";\n" .
" aarch64-darwin = \"" . getStorePath("build.nix.aarch64-darwin") . "\";\n" .
" x86_64-linux = \"" . getStorePath("build.nix-everything.x86_64-linux") . "\";\n" .
" i686-linux = \"" . getStorePath("build.nix-everything.i686-linux") . "\";\n" .
" aarch64-linux = \"" . getStorePath("build.nix-everything.aarch64-linux") . "\";\n" .
" riscv64-linux = \"" . getStorePath("buildCross.nix-everything.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" .
" x86_64-darwin = \"" . getStorePath("build.nix-everything.x86_64-darwin") . "\";\n" .
" aarch64-darwin = \"" . getStorePath("build.nix-everything.aarch64-darwin") . "\";\n" .
"}\n");
# Upload release files to S3.

View file

@ -0,0 +1,6 @@
if host_machine.system() == 'windows'
# libexpr's primops creates a large object
# Without the following flag, we'll get errors when cross-compiling to mingw32:
# Fatal error: can't write 66 bytes to section .text of src/libexpr/libnixexpr.dll.p/primops.cc.obj: 'file too big'
add_project_arguments([ '-Wa,-mbig-obj' ], language: 'cpp')
endif

View file

@ -1,14 +1,18 @@
{ runCommand
, system
, buildPackages
, cacert
, nix
{
runCommand,
system,
buildPackages,
cacert,
nix,
}:
let
installerClosureInfo = buildPackages.closureInfo {
rootPaths = [ nix cacert ];
rootPaths = [
nix
cacert
];
};
inherit (nix) version;
@ -22,18 +26,18 @@ in
runCommand "nix-binary-tarball-${version}" env ''
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
substitute ${./install-nix-from-tarball.sh} $TMPDIR/install \
cp ${../scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
substitute ${../scripts/install-nix-from-tarball.sh} $TMPDIR/install \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
substitute ${../scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
substitute ${../scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \
substitute ${../scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}

View file

@ -13,9 +13,11 @@ let
versionSuffix = lib.optionalString (!officialRelease) "pre";
fineVersionSuffix = lib.optionalString
(!officialRelease)
"pre${builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")}_${src.shortRev or "dirty"}";
fineVersionSuffix =
lib.optionalString (!officialRelease)
"pre${
builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")
}_${src.shortRev or "dirty"}";
fineVersion = baseVersion + fineVersionSuffix;
in
@ -54,7 +56,9 @@ in
nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; };
nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { version = fineVersion; };
nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix {
version = fineVersion;
};
nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; };
nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; };

View file

@ -19,9 +19,7 @@ let
root = ../.;
stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64
then darwinStdenv
else prevStdenv;
stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv;
# Fix the following error with the default x86_64-darwin SDK:
#
@ -38,11 +36,14 @@ let
# Indirection for Nixpkgs to override when package.nix files are vendored
filesetToSource = lib.fileset.toSource;
/** Given a set of layers, create a mkDerivation-like function */
mkPackageBuilder = exts: userFn:
stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn);
/**
Given a set of layers, create a mkDerivation-like function
*/
mkPackageBuilder =
exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn);
localSourceLayer = finalAttrs: prevAttrs:
localSourceLayer =
finalAttrs: prevAttrs:
let
workDirPath =
# Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has
@ -51,8 +52,13 @@ let
prevAttrs.workDir;
workDirSubpath = lib.path.removePrefix root workDirPath;
sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset;
src = lib.fileset.toSource { fileset = sources; inherit root; };
sources =
assert prevAttrs.fileset._type == "fileset";
prevAttrs.fileset;
src = lib.fileset.toSource {
fileset = sources;
inherit root;
};
in
{
@ -64,8 +70,7 @@ let
workDir = null;
};
mesonLayer = finalAttrs: prevAttrs:
{
mesonLayer = finalAttrs: prevAttrs: {
# NOTE:
# As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
# `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
@ -75,11 +80,18 @@ let
# Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
# guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
# For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
preConfigure = prevAttrs.preConfigure or "" + lib.optionalString (
preConfigure =
prevAttrs.preConfigure or ""
+
lib.optionalString
(
!stdenv.hostPlatform.isWindows
# build failure
&& !stdenv.hostPlatform.isStatic
) ''
# LTO breaks exception handling on x86-64-darwin.
&& stdenv.system != "x86_64-darwin"
)
''
case "$mesonBuildType" in
release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
*) appendToVar mesonFlags "-Db_lto=false" ;;
@ -94,35 +106,34 @@ let
];
};
mesonBuildLayer = finalAttrs: prevAttrs:
{
mesonBuildLayer = finalAttrs: prevAttrs: {
nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [
pkgs.buildPackages.pkg-config
];
separateDebugInfo = !stdenv.hostPlatform.isStatic;
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
env = prevAttrs.env or {}
// lib.optionalAttrs
(stdenv.isLinux
env =
prevAttrs.env or { }
// lib.optionalAttrs (
stdenv.isLinux
&& !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
&& !(stdenv.hostPlatform.useLLVM or false))
{ LDFLAGS = "-fuse-ld=gold"; };
&& !(stdenv.hostPlatform.useLLVM or false)
) { LDFLAGS = "-fuse-ld=gold"; };
};
mesonLibraryLayer = finalAttrs: prevAttrs:
{
mesonLibraryLayer = finalAttrs: prevAttrs: {
outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ];
};
# Work around weird `--as-needed` linker behavior with BSD, see
# https://github.com/mesonbuild/meson/issues/3593
bsdNoLinkAsNeeded = finalAttrs: prevAttrs:
bsdNoLinkAsNeeded =
finalAttrs: prevAttrs:
lib.optionalAttrs stdenv.hostPlatform.isBSD {
mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ];
};
miscGoodPractice = finalAttrs: prevAttrs:
{
miscGoodPractice = finalAttrs: prevAttrs: {
strictDeps = prevAttrs.strictDeps or true;
enableParallelBuilding = true;
};
@ -130,10 +141,15 @@ in
scope: {
inherit stdenv;
aws-sdk-cpp = (pkgs.aws-sdk-cpp.override {
apis = [ "s3" "transfer" ];
aws-sdk-cpp =
(pkgs.aws-sdk-cpp.override {
apis = [
"s3"
"transfer"
];
customMemoryManagement = false;
}).overrideAttrs {
}).overrideAttrs
{
# only a stripped down version is built, which takes a lot less resources
# to build, so we don't need a "big-parallel" machine.
requiredSystemFeatures = [ ];
@ -144,35 +160,39 @@ scope: {
};
# TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed.
boost = (pkgs.boost.override {
boost =
(pkgs.boost.override {
extraB2Args = [
"--with-container"
"--with-context"
"--with-coroutine"
];
}).overrideAttrs (old: {
}).overrideAttrs
(old: {
# Need to remove `--with-*` to use `--with-libraries=...`
buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase;
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase;
});
libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
cmakeFlags = attrs.cmakeFlags or []
++ [ "-DUSE_SSH=exec" ];
nativeBuildInputs = attrs.nativeBuildInputs or []
cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ];
nativeBuildInputs =
attrs.nativeBuildInputs or [ ]
# gitMinimal does not build on Windows. See packbuilder patch.
++ lib.optionals (!stdenv.hostPlatform.isWindows) [
# Needed for `git apply`; see `prePatch`
pkgs.buildPackages.gitMinimal
];
# Only `git apply` can handle git binary patches
prePatch = attrs.prePatch or ""
prePatch =
attrs.prePatch or ""
+ lib.optionalString (!stdenv.hostPlatform.isWindows) ''
patch() {
git apply
}
'';
patches = attrs.patches or []
patches =
attrs.patches or [ ]
++ [
./patches/libgit2-mempack-thin-packfile.patch
]
@ -186,22 +206,19 @@ scope: {
inherit resolvePath filesetToSource;
mkMesonDerivation =
mkPackageBuilder [
mkMesonDerivation = mkPackageBuilder [
miscGoodPractice
localSourceLayer
mesonLayer
];
mkMesonExecutable =
mkPackageBuilder [
mkMesonExecutable = mkPackageBuilder [
miscGoodPractice
bsdNoLinkAsNeeded
localSourceLayer
mesonLayer
mesonBuildLayer
];
mkMesonLibrary =
mkPackageBuilder [
mkMesonLibrary = mkPackageBuilder [
miscGoodPractice
bsdNoLinkAsNeeded
localSourceLayer

View file

@ -1,14 +1,20 @@
{ lib, devFlake }:
{
lib,
inputs,
devFlake,
}:
{ pkgs }:
pkgs.nixComponents.nix-util.overrideAttrs (attrs:
pkgs.nixComponents.nix-util.overrideAttrs (
attrs:
let
stdenv = pkgs.nixDependencies.stdenv;
buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
modular = devFlake.getSystem stdenv.buildPlatform.system;
transformFlag = prefix: flag:
transformFlag =
prefix: flag:
assert builtins.isString flag;
let
rest = builtins.substring 2 (builtins.stringLength flag) flag;
@ -16,7 +22,8 @@ let
"-D${prefix}:${rest}";
havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix;
ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags;
in {
in
{
pname = "shell-for-" + attrs.pname;
# Remove the version suffix to avoid unnecessary attempts to substitute in nix develop
@ -71,20 +78,23 @@ in {
BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include";
BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib";
# For `make format`, to work without installing pre-commit
_NIX_PRE_COMMIT_HOOKS_CONFIG =
"${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}";
_NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml"
modular.pre-commit.settings.rawConfig
}";
};
mesonFlags =
map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags)
++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags)
++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags)
++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags))
++ lib.optionals havePerl (
map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)
)
++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags)
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags)
;
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags);
nativeBuildInputs = attrs.nativeBuildInputs or []
nativeBuildInputs =
attrs.nativeBuildInputs or [ ]
++ pkgs.nixComponents.nix-util.nativeBuildInputs
++ pkgs.nixComponents.nix-store.nativeBuildInputs
++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs
@ -94,27 +104,30 @@ in {
++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs
++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs
++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs
++ lib.optional
(!buildCanExecuteHost
++ lib.optional (
!buildCanExecuteHost
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages))
pkgs.buildPackages.mesonEmulatorHook
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
) pkgs.buildPackages.mesonEmulatorHook
++ [
pkgs.buildPackages.cmake
pkgs.buildPackages.shellcheck
pkgs.buildPackages.changelog-d
modular.pre-commit.settings.package
(pkgs.writeScriptBin "pre-commit-hooks-install"
modular.pre-commit.settings.installationScript)
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
inputs.nixfmt.packages.${pkgs.hostPlatform.system}.default
]
# TODO: Remove the darwin check once
# https://github.com/NixOS/nixpkgs/pull/291814 is available
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (lib.hiPrio pkgs.buildPackages.clang-tools);
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (
lib.hiPrio pkgs.buildPackages.clang-tools
);
buildInputs = attrs.buildInputs or []
buildInputs =
attrs.buildInputs or [ ]
++ pkgs.nixComponents.nix-util.buildInputs
++ pkgs.nixComponents.nix-store.buildInputs
++ pkgs.nixComponents.nix-store-tests.externalBuildInputs
@ -123,6 +136,6 @@ in {
++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs
++ pkgs.nixComponents.nix-cmd.buildInputs
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs
++ lib.optional havePerl pkgs.perl
;
})
++ lib.optional havePerl pkgs.perl;
}
)

View file

@ -42,37 +42,46 @@
}:
let
libs =
{
inherit
nix-util
nix-util-c
nix-store
nix-store-c
nix-fetchers
nix-expr
nix-expr-c
nix-flake
nix-flake-c
nix-main
nix-main-c
nix-cmd
;
}
// lib.optionalAttrs
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
{
# Currently fails in static build
inherit
nix-perl-bindings
;
};
dev = stdenv.mkDerivation (finalAttrs: {
name = "nix-${nix-cli.version}-dev";
pname = "nix";
version = nix-cli.version;
dontUnpack = true;
dontBuild = true;
libs = map lib.getDev [
nix-cmd
nix-expr
nix-expr-c
nix-fetchers
nix-flake
nix-flake-c
nix-main
nix-main-c
nix-store
nix-store-c
nix-util
nix-util-c
] ++ lib.optionals (!stdenv.hostPlatform.isStatic) [
# Currently fails in static build
nix-perl-bindings
];
libs = map lib.getDev (lib.attrValues libs);
installPhase = ''
mkdir -p $out/nix-support
echo $libs >> $out/nix-support/propagated-build-inputs
'';
passthru = {
tests = {
pkg-config =
testers.hasPkgConfigModules {
pkg-config = testers.hasPkgConfigModules {
package = finalAttrs.finalPackage;
};
};
@ -84,6 +93,7 @@ let
libs = throw "`nix.dev.libs` is not meant to be used; use `nix.libs` instead.";
};
meta = {
mainProgram = "nix";
pkgConfigModules = [
"nix-cmd"
"nix-expr"
@ -117,11 +127,14 @@ in
];
meta.mainProgram = "nix";
}).overrideAttrs (finalAttrs: prevAttrs: {
}).overrideAttrs
(
finalAttrs: prevAttrs: {
doCheck = true;
doInstallCheck = true;
checkInputs = [
checkInputs =
[
# Make sure the unit tests have passed
nix-util-tests.tests.run
nix-store-tests.tests.run
@ -129,22 +142,19 @@ in
nix-fetchers-tests.tests.run
nix-flake-tests.tests.run
# Make sure the functional tests have passed
nix-functional-tests
# dev bundle is ok
# (checkInputs must be empty paths??)
(runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out")
] ++
lib.optionals (!stdenv.hostPlatform.isStatic) (
# Perl currently fails in static build
(if stdenv.buildPlatform.canExecute stdenv.hostPlatform
then [
# TODO: add perl.tests
nix-perl-bindings
]
else [
++ lib.optionals
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
[
# Perl currently fails in static build
# TODO: Split out tests into a separate derivation?
nix-perl-bindings
]));
installCheckInputs = [
nix-functional-tests
];
passthru = prevAttrs.passthru // {
inherit (nix-cli) version;
@ -166,21 +176,7 @@ in
disallowedReferences = nix.all;
```
*/
libs = {
inherit
nix-util
nix-util-c
nix-store
nix-store-c
nix-fetchers
nix-expr
nix-expr-c
nix-flake
nix-flake-c
nix-main
nix-main-c
;
};
inherit libs;
tests = prevAttrs.passthru.tests or { } // {
# TODO: create a proper fixpoint and:
@ -196,11 +192,19 @@ in
inherit dev;
inherit devdoc;
doc = nix-manual;
outputs = [ "out" "dev" "devdoc" "doc" ];
all = lib.attrValues (lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName}));
outputs = [
"out"
"dev"
"devdoc"
"doc"
];
all = lib.attrValues (
lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName})
);
};
meta = prevAttrs.meta // {
description = "The Nix package manager";
pkgConfigModules = dev.meta.pkgConfigModules;
};
})
}
)

View file

@ -1,22 +1,24 @@
{ inputs
, binaryTarball
, forAllCrossSystems
, forAllSystems
, lib
, linux64BitSystems
, nixpkgsFor
, self
, officialRelease
{
inputs,
forAllCrossSystems,
forAllSystems,
lib,
linux64BitSystems,
nixpkgsFor,
self,
officialRelease,
}:
let
inherit (inputs) nixpkgs nixpkgs-regression;
installScriptFor = tarballs:
nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix {
installScriptFor =
tarballs:
nixpkgsFor.x86_64-linux.native.callPackage ./installer {
inherit tarballs;
};
testNixVersions = pkgs: daemon:
testNixVersions =
pkgs: daemon:
pkgs.nixComponents.nix-functional-tests.override {
pname = "nix-daemon-compat-tests";
version = "${pkgs.nix.version}-with-daemon-${daemon.version}";
@ -54,44 +56,72 @@ let
in
{
# Binary package for various platforms.
build = forAllPackages (pkgName:
forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName}));
shellInputs = removeAttrs
(forAllSystems (system: self.devShells.${system}.default.inputDerivation))
[ "i686-linux" ];
buildStatic = forAllPackages (pkgName:
lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}));
buildCross = forAllPackages (pkgName:
# Hack to avoid non-evaling package
(if pkgName == "nix-functional-tests" then lib.flip builtins.removeAttrs ["x86_64-w64-mingw32"] else lib.id)
(forAllCrossSystems (crossSystem:
lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}))));
buildNoGc = let
components = forAllSystems (system:
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: {
nix-expr = super.nix-expr.override { enableGC = false; };
})
build = forAllPackages (
pkgName: forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})
);
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
shellInputs = removeAttrs (forAllSystems (
system: self.devShells.${system}.default.inputDerivation
)) [ "i686-linux" ];
buildStatic = forAllPackages (
pkgName:
lib.genAttrs linux64BitSystems (
system: nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName}
)
);
buildCross = forAllPackages (
pkgName:
# Hack to avoid non-evaling package
(
if pkgName == "nix-functional-tests" then
lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ]
else
lib.id
)
(
forAllCrossSystems (
crossSystem:
lib.genAttrs [ "x86_64-linux" ] (
system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}
)
)
)
);
buildNoGc =
let
components = forAllSystems (
system:
nixpkgsFor.${system}.native.nixComponents.overrideScope (
self: super: {
nix-expr = super.nix-expr.override { enableGC = false; };
}
)
);
in
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli);
# Toggles some settings for better coverage. Windows needs these
# library combinations, and Debian build Nix with GNU readline too.
buildReadlineNoMarkdown = let
components = forAllSystems (system:
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: {
buildReadlineNoMarkdown =
let
components = forAllSystems (
system:
nixpkgsFor.${system}.native.nixComponents.overrideScope (
self: super: {
nix-cmd = super.nix-cmd.override {
enableMarkdown = false;
readlineFlavor = "readline";
};
})
}
)
);
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
in
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
# Perl bindings for various platforms.
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings);
@ -99,13 +129,16 @@ in
# Binary tarball for various platforms, containing a Nix store
# with the closure of 'nix' package, and the second half of
# the installation script.
binaryTarball = forAllSystems (system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native);
binaryTarball = forAllSystems (
system: nixpkgsFor.${system}.native.callPackage ./binary-tarball.nix { }
);
binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (system:
forAllCrossSystems (crossSystem:
binaryTarball
nixpkgsFor.${system}.cross.${crossSystem}.nix
nixpkgsFor.${system}.cross.${crossSystem}));
binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (
system:
forAllCrossSystems (
crossSystem: nixpkgsFor.${system}.cross.${crossSystem}.callPackage ./binary-tarball.nix { }
)
);
# The first half of the installation script. This is uploaded
# to https://nixos.org/nix/install. It downloads the binary
@ -124,9 +157,12 @@ in
self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu"
];
installerScriptForGHA = forAllSystems (system: nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix {
installerScriptForGHA = forAllSystems (
system:
nixpkgsFor.${system}.native.callPackage ./installer {
tarballs = [ self.hydraJobs.binaryTarball.${system} ];
});
}
);
# docker image with Nix inside
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
@ -147,7 +183,12 @@ in
external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs;
# System tests.
tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // {
tests =
import ../tests/nixos {
inherit lib nixpkgs nixpkgsFor;
inherit (self.inputs) nixpkgs-23-11;
}
// {
# Make sure that nix-env still produces the exact same result
# on a particular version of Nixpkgs.
@ -155,8 +196,7 @@ in
let
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
in
runCommand "eval-nixos" { buildInputs = [ nix ]; }
''
runCommand "eval-nixos" { buildInputs = [ nix ]; } ''
type -p nix-env
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
(
@ -167,10 +207,9 @@ in
mkdir $out
'';
nixpkgsLibTests =
forAllSystems (system:
import (nixpkgs + "/lib/tests/test-with-nix.nix")
{
nixpkgsLibTests = forAllSystems (
system:
import (nixpkgs + "/lib/tests/test-with-nix.nix") {
lib = nixpkgsFor.${system}.native.lib;
nix = self.packages.${system}.nix-cli;
pkgs = nixpkgsFor.${system}.native;
@ -183,20 +222,21 @@ in
nixpkgs = nixpkgs-regression;
};
installTests = forAllSystems (system:
let pkgs = nixpkgsFor.${system}.native; in
pkgs.runCommand "install-tests"
{
installTests = forAllSystems (
system:
let
pkgs = nixpkgsFor.${system}.native;
in
pkgs.runCommand "install-tests" {
againstSelf = testNixVersions pkgs pkgs.nix;
againstCurrentLatest =
# FIXME: temporarily disable this on macOS because of #3605.
if system == "x86_64-linux"
then testNixVersions pkgs pkgs.nixVersions.latest
else null;
if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null;
# Disabled because the latest stable version doesn't handle
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
# againstLatestStable = testNixVersions pkgs pkgs.nixStable;
} "touch $out");
} "touch $out"
);
installerTests = import ../tests/installer {
binaryTarballs = self.hydraJobs.binaryTarball;

View file

@ -0,0 +1,42 @@
{
lib,
runCommand,
nix,
tarballs,
}:
runCommand "installer-script"
{
buildInputs = [ nix ];
}
''
mkdir -p $out/nix-support
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
tarballPath() {
# Remove the store prefix
local path=''${1#${builtins.storeDir}/}
# Get the path relative to the derivation root
local rest=''${path#*/}
# Get the derivation hash
local drvHash=''${path%%-*}
echo "$drvHash/$rest"
}
substitute ${./install.in} $out/install \
${
lib.concatMapStrings (
tarball:
let
inherit (tarball.stdenv.hostPlatform) system;
in
''
\
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
''
) tarballs
} --replace '@nixVersion@' ${nix.version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
''

View file

@ -1,36 +0,0 @@
{ lib
, runCommand
, nix
, tarballs
}:
runCommand "installer-script" {
buildInputs = [ nix ];
} ''
mkdir -p $out/nix-support
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
tarballPath() {
# Remove the store prefix
local path=''${1#${builtins.storeDir}/}
# Get the path relative to the derivation root
local rest=''${path#*/}
# Get the derivation hash
local drvHash=''${path%%-*}
echo "$drvHash/$rest"
}
substitute ${./install.in} $out/install \
${lib.concatMapStrings
(tarball: let
inherit (tarball.stdenv.hostPlatform) system;
in '' \
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
''
)
tarballs
} --replace '@nixVersion@' ${nix.version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
''

View file

@ -1,3 +1,13 @@
# Only execute this file once per shell.
if test -z "$HOME" || \
test -n "$__ETC_PROFILE_NIX_SOURCED"
exit
end
set --global __ETC_PROFILE_NIX_SOURCED 1
# Local helpers
function add_path --argument-names new_path
if type -q fish_add_path
# fish 3.2.0 or newer
@ -10,21 +20,22 @@ function add_path --argument-names new_path
end
end
# Only execute this file once per shell.
if test -n "$__ETC_PROFILE_NIX_SOURCED"
exit
end
# Main configuration
set __ETC_PROFILE_NIX_SOURCED 1
# Set up the per-user profile.
set --local NIX_LINK $HOME/.nix-profile
# Set up environment.
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
# Populate bash completions, .desktop files, etc
if test -z "$XDG_DATA_DIRS"
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:/nix/var/nix/profiles/default/share"
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
else
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:/nix/var/nix/profiles/default/share"
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
end
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
@ -52,6 +63,8 @@ else
end
add_path "@localstatedir@/nix/profiles/default/bin"
add_path "$HOME/.nix-profile/bin"
add_path "$NIX_LINK/bin"
# Cleanup
functions -e add_path

View file

@ -1,3 +1,13 @@
# Only execute this file once per shell.
if test -z "$HOME" || test -z "$USER" || \
test -n "$__ETC_PROFILE_NIX_SOURCED"
exit
end
set --global __ETC_PROFILE_NIX_SOURCED 1
# Local helpers
function add_path --argument-names new_path
if type -q fish_add_path
# fish 3.2.0 or newer
@ -10,11 +20,11 @@ function add_path --argument-names new_path
end
end
if test -n "$HOME" && test -n "$USER"
# Main configuration
# Set up the per-user profile.
set NIX_LINK $HOME/.nix-profile
set --local NIX_LINK $HOME/.nix-profile
# Set up environment.
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
@ -29,7 +39,7 @@ if test -n "$HOME" && test -n "$USER"
end
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
if test -n "$NIX_SSH_CERT_FILE"
if test -n "$NIX_SSL_CERT_FILE"
: # Allow users to override the NIX_SSL_CERT_FILE
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
@ -53,7 +63,7 @@ if test -n "$HOME" && test -n "$USER"
end
add_path "$NIX_LINK/bin"
set --erase NIX_LINK
end
# Cleanup
functions -e add_path

View file

@ -1,11 +1,12 @@
{ lib
, mkMesonDerivation
{
lib,
mkMesonDerivation,
, doxygen
doxygen,
# Configuration Options
, version
version,
}:
let
@ -39,8 +40,7 @@ mkMesonDerivation (finalAttrs: {
doxygen
];
preConfigure =
''
preConfigure = ''
chmod u+w ./.version
echo ${finalAttrs.version} > ./.version
'';

View file

@ -1,11 +1,12 @@
{ lib
, mkMesonDerivation
{
lib,
mkMesonDerivation,
, doxygen
doxygen,
# Configuration Options
, version
version,
}:
let
@ -17,9 +18,11 @@ mkMesonDerivation (finalAttrs: {
inherit version;
workDir = ./.;
fileset = let
fileset =
let
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
in fileset.unions [
in
fileset.unions [
./.version
../../.version
./meson.build
@ -33,8 +36,7 @@ mkMesonDerivation (finalAttrs: {
doxygen
];
preConfigure =
''
preConfigure = ''
chmod u+w ./.version
echo ${finalAttrs.version} > ./.version
'';

View file

@ -347,7 +347,7 @@ struct MixEnvironment : virtual Args
void setEnviron();
};
void completeFlakeInputPath(
void completeFlakeInputAttrPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,

View file

@ -33,7 +33,7 @@ namespace nix {
namespace fs { using namespace std::filesystem; }
void completeFlakeInputPath(
void completeFlakeInputAttrPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,
@ -117,10 +117,10 @@ MixFlakeOptions::MixFlakeOptions()
.labels = {"input-path"},
.handler = {[&](std::string s) {
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
lockFlags.inputUpdates.insert(flake::parseInputAttrPath(s));
}},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
}}
});
@ -129,15 +129,15 @@ MixFlakeOptions::MixFlakeOptions()
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
.category = category,
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
.handler = {[&](std::string inputAttrPath, std::string flakeRef) {
lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
flake::parseInputAttrPath(inputAttrPath),
parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true));
}},
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
if (n == 0) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
} else if (n == 1) {
completeFlakeRef(completions, getEvalState()->store, prefix);
}

View file

@ -1,24 +1,25 @@
{ lib
, stdenv
, mkMesonLibrary
{
lib,
stdenv,
mkMesonLibrary,
, nix-util
, nix-store
, nix-fetchers
, nix-expr
, nix-flake
, nix-main
, editline
, readline
, lowdown
, nlohmann_json
nix-util,
nix-store,
nix-fetchers,
nix-expr,
nix-flake,
nix-main,
editline,
readline,
lowdown,
nlohmann_json,
# Configuration Options
, version
version,
# Whether to enable Markdown rendering in the Nix binary.
, enableMarkdown ? !stdenv.hostPlatform.isWindows
enableMarkdown ? !stdenv.hostPlatform.isWindows,
# Which interactive line editor library to use for Nix's repl.
#
@ -26,7 +27,7 @@
#
# - editline (default)
# - readline
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline",
}:
let

View file

@ -613,12 +613,8 @@ nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * st
context->last_err_code = NIX_OK;
try {
auto & v = check_value_in(value);
nix::NixStringContext stringContext;
auto rawStr = state->state.coerceToString(nix::noPos, v, stringContext, "while realising a string").toOwned();
nix::StorePathSet storePaths;
auto rewrites = state->state.realiseContext(stringContext, &storePaths);
auto s = nix::rewriteStrings(rawStr, rewrites);
auto s = state->state.realiseString(v, &storePaths, isIFD);
// Convert to the C API StorePath type and convert to vector for index-based access
std::vector<StorePath> vec;

View file

@ -1,12 +1,13 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-store-c
, nix-expr
nix-store-c,
nix-expr,
# Configuration Options
, version
version,
}:
let

View file

@ -1,15 +1,16 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-store-test-support
, nix-expr
, nix-expr-c
nix-store-test-support,
nix-expr,
nix-expr-c,
, rapidcheck
rapidcheck,
# Configuration Options
, version
version,
}:
let

View file

@ -1,20 +1,21 @@
{ lib
, buildPackages
, stdenv
, mkMesonExecutable
{
lib,
buildPackages,
stdenv,
mkMesonExecutable,
, nix-expr
, nix-expr-c
, nix-expr-test-support
nix-expr,
nix-expr-c,
nix-expr-test-support,
, rapidcheck
, gtest
, runCommand
rapidcheck,
gtest,
runCommand,
# Configuration Options
, version
, resolvePath
version,
resolvePath,
}:
let
@ -58,16 +59,22 @@ mkMesonExecutable (finalAttrs: {
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
run =
runCommand "${finalAttrs.pname}-run"
{
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
} (lib.optionalString stdenv.hostPlatform.isWindows ''
}
(
lib.optionalString stdenv.hostPlatform.isWindows ''
export HOME="$PWD/home-dir"
mkdir -p "$HOME"
'' + ''
''
+ ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
''
);
};
};

View file

@ -20,36 +20,33 @@ let
# Resolve a input spec into a node name. An input spec is
# either a node name, or a 'follows' path from the root
# node.
resolveInput = inputSpec:
if builtins.isList inputSpec
then getInputByPath lockFile.root inputSpec
else inputSpec;
resolveInput =
inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec;
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
# Follow an input attrpath (e.g. ["dwarffs" "nixpkgs"]) from the
# root node, returning the final node.
getInputByPath = nodeName: path:
if path == []
then nodeName
getInputByPath =
nodeName: path:
if path == [ ] then
nodeName
else
getInputByPath
# Since this could be a 'follows' input, call resolveInput.
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
(builtins.tail path);
allNodes =
builtins.mapAttrs
(key: node:
allNodes = builtins.mapAttrs (
key: node:
let
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
sourceInfo =
if overrides ? ${key}
then
if overrides ? ${key} then
overrides.${key}.sourceInfo
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/"
then
parentNode.sourceInfo // {
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then
parentNode.sourceInfo
// {
outPath = parentNode.outPath + ("/" + node.locked.path);
}
else
@ -63,9 +60,9 @@ let
flake = import (outPath + "/flake.nix");
inputs = builtins.mapAttrs
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
(node.inputs or {});
inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (
node.inputs or { }
);
outputs = flake.outputs (inputs // { self = result; });
@ -81,7 +78,10 @@ let
# This shadows the sourceInfo.outPath
inherit outPath;
inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake";
inherit inputs;
inherit outputs;
inherit sourceInfo;
_type = "flake";
};
in
@ -90,7 +90,7 @@ let
result
else
sourceInfo
)
lockFile.nodes;
) lockFile.nodes;
in allNodes.${lockFile.root}
in
allNodes.${lockFile.root}

View file

@ -3114,7 +3114,7 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
}
}
if (path.pathExists())
if (path.resolveSymlinks().pathExists())
return finish(std::move(path));
else {
logWarning({

View file

@ -820,6 +820,15 @@ public:
*/
[[nodiscard]] StringMap realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true);
/**
* Realise the given string with context, and return the string with outputs instead of downstream output placeholders.
* @param[in] str the string to realise
* @param[out] paths all referenced store paths will be added to this set
* @return the realised string
* @throw EvalError if the value is not a string, path or derivation (see `coerceToString`)
*/
std::string realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos);
/* Call the binary path filter predicate used builtins.path etc. */
bool callPathFilter(
Value * filterFun,

View file

@ -1,27 +1,55 @@
{ system ? "" # obsolete
, url
, hash ? "" # an SRI hash
{
system ? "", # obsolete
url,
hash ? "", # an SRI hash
# Legacy hash specification
, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? ""
, outputHash ?
if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256
, outputHashAlgo ?
if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256"
md5 ? "",
sha1 ? "",
sha256 ? "",
sha512 ? "",
outputHash ?
if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
, executable ? false
, unpack ? false
, name ? baseNameOf (toString url)
, impure ? false
executable ? false,
unpack ? false,
name ? baseNameOf (toString url),
impure ? false,
}:
derivation ({
derivation (
{
builder = "builtin:fetchurl";
# New-style output content requirements.
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit name url executable unpack;
inherit
name
url
executable
unpack
;
system = "builtin";
@ -30,11 +58,15 @@ derivation ({
# This attribute does nothing; it's here to avoid changing evaluation results.
impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
# To make "nix-prefetch-url" work.
urls = [ url ];
} // (if impure
then { __impure = true; }
else { inherit outputHashAlgo outputHash; }))
}
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
)

View file

@ -1,15 +1,20 @@
attrs @ { drvPath, outputs, name, ... }:
attrs@{
drvPath,
outputs,
name,
...
}:
let
commonAttrs = (builtins.listToAttrs outputsList) //
{ all = map (x: x.value) outputsList;
commonAttrs = (builtins.listToAttrs outputsList) // {
all = map (x: x.value) outputsList;
inherit drvPath name;
type = "derivation";
};
outputToAttrListElement = outputName:
{ name = outputName;
outputToAttrListElement = outputName: {
name = outputName;
value = commonAttrs // {
outPath = builtins.getAttr outputName attrs;
inherit outputName;
@ -18,4 +23,5 @@ let
outputsList = map outputToAttrListElement outputs;
in (builtins.head outputsList).value
in
(builtins.head outputsList).value

View file

@ -24,6 +24,7 @@ deps_public_maybe_subproject = [
dependency('nix-fetchers'),
]
subdir('nix-meson-build-support/subprojects')
subdir('nix-meson-build-support/big-objs')
boost = dependency(
'boost',

View file

@ -1,22 +1,23 @@
{ lib
, stdenv
, mkMesonLibrary
{
lib,
stdenv,
mkMesonLibrary,
, bison
, flex
, cmake # for resolving toml11 dep
bison,
flex,
cmake, # for resolving toml11 dep
, nix-util
, nix-store
, nix-fetchers
, boost
, boehmgc
, nlohmann_json
, toml11
nix-util,
nix-store,
nix-fetchers,
boost,
boehmgc,
nlohmann_json,
toml11,
# Configuration Options
, version
version,
# Whether to use garbage collection for the Nix language evaluator.
#
@ -27,7 +28,7 @@
#
# Temporarily disabled on Windows because the `GC_throw_bad_alloc`
# symbol is missing during linking.
, enableGC ? !stdenv.hostPlatform.isWindows
enableGC ? !stdenv.hostPlatform.isWindows,
}:
let
@ -51,10 +52,7 @@ mkMesonLibrary (finalAttrs: {
(fileset.fileFilter (file: file.hasExt "hh") ./.)
./lexer.l
./parser.y
(fileset.difference
(fileset.fileFilter (file: file.hasExt "nix") ./.)
./package.nix
)
(fileset.difference (fileset.fileFilter (file: file.hasExt "nix") ./.) ./package.nix)
];
nativeBuildInputs = [

View file

@ -47,6 +47,15 @@ static inline Value * mkString(EvalState & state, const std::csub_match & match)
return v;
}
std::string EvalState::realiseString(Value & s, StorePathSet * storePathsOutMaybe, bool isIFD, const PosIdx pos)
{
nix::NixStringContext stringContext;
auto rawStr = coerceToString(pos, s, stringContext, "while realising a string").toOwned();
auto rewrites = realiseContext(stringContext, storePathsOutMaybe, isIFD);
return nix::rewriteStrings(rawStr, rewrites);
}
StringMap EvalState::realiseContext(const NixStringContext & context, StorePathSet * maybePathsOut, bool isIFD)
{
std::vector<DerivedPath::Built> drvs;

View file

@ -26,19 +26,25 @@
Note that `derivation` is very bare-bones, and provides almost no commands during the build.
Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment.
*/
drvAttrs @ { outputs ? [ "out" ], ... }:
drvAttrs@{
outputs ? [ "out" ],
...
}:
let
strict = derivationStrict drvAttrs;
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
{ all = map (x: x.value) outputsList;
commonAttrs =
drvAttrs
// (builtins.listToAttrs outputsList)
// {
all = map (x: x.value) outputsList;
inherit drvAttrs;
};
outputToAttrListElement = outputName:
{ name = outputName;
outputToAttrListElement = outputName: {
name = outputName;
value = commonAttrs // {
outPath = builtins.getAttr outputName strict;
drvPath = strict.drvPath;
@ -49,4 +55,5 @@ let
outputsList = map outputToAttrListElement outputs;
in (builtins.head outputsList).value
in
(builtins.head outputsList).value

View file

@ -90,24 +90,26 @@ static void fetchTree(
fetchers::Input input { state.fetchSettings };
NixStringContext context;
std::optional<std::string> type;
auto fetcher = params.isFetchGit ? "fetchGit" : "fetchTree";
if (params.isFetchGit) type = "git";
state.forceValue(*args[0], pos);
if (args[0]->type() == nAttrs) {
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchTree");
state.forceAttrs(*args[0], pos, fmt("while evaluating the argument passed to '%s'", fetcher));
fetchers::Attrs attrs;
if (auto aType = args[0]->attrs()->get(state.sType)) {
if (type)
state.error<EvalError>(
"unexpected attribute 'type'"
"unexpected argument 'type'"
).atPos(pos).debugThrow();
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
type = state.forceStringNoCtx(*aType->value, aType->pos,
fmt("while evaluating the `type` argument passed to '%s'", fetcher));
} else if (!type)
state.error<EvalError>(
"attribute 'type' is missing in call to 'fetchTree'"
"argument 'type' is missing in call to '%s'", fetcher
).atPos(pos).debugThrow();
attrs.emplace("type", type.value());
@ -127,9 +129,8 @@ static void fetchTree(
else if (attr.value->type() == nInt) {
auto intValue = attr.value->integer().value;
if (intValue < 0) {
state.error<EvalError>("negative value given for fetchTree attr %1%: %2%", state.symbols[attr.name], intValue).atPos(pos).debugThrow();
}
if (intValue < 0)
state.error<EvalError>("negative value given for '%s' argument '%s': %d", fetcher, state.symbols[attr.name], intValue).atPos(pos).debugThrow();
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
} else if (state.symbols[attr.name] == "publicKeys") {
@ -137,8 +138,8 @@ static void fetchTree(
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
}
else
state.error<TypeError>("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
state.symbols[attr.name], showType(*attr.value)).debugThrow();
state.error<TypeError>("argument '%s' to '%s' is %s while a string, Boolean or integer is expected",
state.symbols[attr.name], fetcher, showType(*attr.value)).debugThrow();
}
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
@ -153,13 +154,13 @@ static void fetchTree(
if (!params.allowNameArgument)
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
state.error<EvalError>(
"attribute 'name' isnt supported in call to 'fetchTree'"
"argument 'name' isnt supported in call to '%s'", fetcher
).atPos(pos).debugThrow();
input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs));
} else {
auto url = state.coerceToString(pos, *args[0], context,
"while evaluating the first argument passed to the fetcher",
fmt("while evaluating the first argument passed to '%s'", fetcher),
false, false).toOwned();
if (params.isFetchGit) {
@ -173,7 +174,7 @@ static void fetchTree(
} else {
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
state.error<EvalError>(
"passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"
"passing a string argument to '%s' requires the 'flakes' experimental feature", fetcher
).atPos(pos).debugThrow();
input = fetchers::Input::fromURL(state.fetchSettings, url);
}
@ -182,15 +183,16 @@ static void fetchTree(
if (!state.settings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
input = lookupInRegistries(state.store, input).first;
if (state.settings.pureEval && !input.isConsideredLocked(state.fetchSettings)) {
auto fetcher = "fetchTree";
if (params.isFetchGit)
fetcher = "fetchGit";
if (state.settings.pureEval && !input.isLocked()) {
if (input.getNarHash())
warn(
"Input '%s' is unlocked (e.g. lacks a Git revision) but does have a NAR hash. "
"This is deprecated since such inputs are verifiable but may not be reproducible.",
input.to_string());
else
state.error<EvalError>(
"in pure evaluation mode, '%s' will not fetch unlocked input '%s'",
fetcher, input.to_string()
).atPos(pos).debugThrow();
fetcher, input.to_string()).atPos(pos).debugThrow();
}
state.checkURI(input.toURLString());

View file

@ -1,19 +1,20 @@
{ lib
, buildPackages
, stdenv
, mkMesonExecutable
{
lib,
buildPackages,
stdenv,
mkMesonExecutable,
, nix-fetchers
, nix-store-test-support
nix-fetchers,
nix-store-test-support,
, rapidcheck
, gtest
, runCommand
rapidcheck,
gtest,
runCommand,
# Configuration Options
, version
, resolvePath
version,
resolvePath,
}:
let
@ -56,16 +57,22 @@ mkMesonExecutable (finalAttrs: {
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
run =
runCommand "${finalAttrs.pname}-run"
{
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
} (lib.optionalString stdenv.hostPlatform.isWindows ''
}
(
lib.optionalString stdenv.hostPlatform.isWindows ''
export HOME="$PWD/home-dir"
mkdir -p "$HOME"
'' + ''
''
+ ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
''
);
};
};

View file

@ -155,12 +155,6 @@ bool Input::isLocked() const
return scheme && scheme->isLocked(*this);
}
bool Input::isConsideredLocked(
const Settings & settings) const
{
return isLocked() || (settings.allowDirtyLocks && getNarHash());
}
bool Input::isFinal() const
{
return maybeGetBoolAttr(attrs, "__final").value_or(false);

View file

@ -90,15 +90,6 @@ public:
*/
bool isLocked() const;
/**
* Return whether the input is either locked, or, if
* `allow-dirty-locks` is enabled, it has a NAR hash. In the
* latter case, we can verify the input but we may not be able to
* fetch it from anywhere.
*/
bool isConsideredLocked(
const Settings & settings) const;
/**
* Only for relative path flakes, i.e. 'path:./foo', returns the
* relative path, i.e. './foo'.

View file

@ -206,7 +206,8 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil
} // extern "C"
static void initRepoAtomically(std::filesystem::path &path, bool bare) {
static void initRepoAtomically(std::filesystem::path &path, bool bare)
{
if (pathExists(path.string())) return;
Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() }));
@ -507,7 +508,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
*/
ref<GitSourceAccessor> getRawAccessor(const Hash & rev);
ref<SourceAccessor> getAccessor(const Hash & rev, bool exportIgnore) override;
ref<SourceAccessor> getAccessor(
const Hash & rev,
bool exportIgnore,
std::string displayPrefix) override;
ref<SourceAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
@ -544,13 +548,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
// then use code that was removed in this commit (see blame)
auto dir = this->path;
Strings gitArgs;
if (shallow) {
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
}
else {
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec };
}
Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"};
if (shallow)
append(gitArgs, {"--depth", "1"});
append(gitArgs, {std::string("--"), url, refspec});
runProgram(RunOptions {
.program = "git",
@ -629,7 +630,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
Hash treeHashToNarHash(const Hash & treeHash) override
{
auto accessor = getAccessor(treeHash, false);
auto accessor = getAccessor(treeHash, false, "");
fetchers::Cache::Key cacheKey{"treeHashToNarHash", {{"treeHash", treeHash.gitRev()}}};
@ -1196,17 +1197,19 @@ ref<GitSourceAccessor> GitRepoImpl::getRawAccessor(const Hash & rev)
return make_ref<GitSourceAccessor>(self, rev);
}
ref<SourceAccessor> GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore)
ref<SourceAccessor> GitRepoImpl::getAccessor(
const Hash & rev,
bool exportIgnore,
std::string displayPrefix)
{
auto self = ref<GitRepoImpl>(shared_from_this());
ref<GitSourceAccessor> rawGitAccessor = getRawAccessor(rev);
if (exportIgnore) {
rawGitAccessor->setPathDisplay(std::move(displayPrefix));
if (exportIgnore)
return make_ref<GitExportIgnoreSourceAccessor>(self, rawGitAccessor, rev);
}
else {
else
return rawGitAccessor;
}
}
ref<SourceAccessor> GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
{
@ -1238,7 +1241,7 @@ std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules
/* Read the .gitmodules files from this revision. */
CanonPath modulesFile(".gitmodules");
auto accessor = getAccessor(rev, exportIgnore);
auto accessor = getAccessor(rev, exportIgnore, "");
if (!accessor->pathExists(modulesFile)) return {};
/* Parse it and get the revision of each submodule. */

View file

@ -86,7 +86,10 @@ struct GitRepo
virtual bool hasObject(const Hash & oid) = 0;
virtual ref<SourceAccessor> getAccessor(const Hash & rev, bool exportIgnore) = 0;
virtual ref<SourceAccessor> getAccessor(
const Hash & rev,
bool exportIgnore,
std::string displayPrefix) = 0;
virtual ref<SourceAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;

View file

@ -69,7 +69,7 @@ std::optional<std::string> readHead(const Path & path)
std::string_view line = output;
line = line.substr(0, line.find("\n"));
if (const auto parseResult = git::parseLsRemoteLine(line)) {
if (const auto parseResult = git::parseLsRemoteLine(line); parseResult && parseResult->reference == "HEAD") {
switch (parseResult->kind) {
case git::LsRemoteRefLine::Kind::Symbolic:
debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
@ -459,8 +459,14 @@ struct GitInputScheme : InputScheme
url);
}
repoInfo.location = std::filesystem::absolute(url.path);
} else
} else {
if (url.scheme == "file")
/* Query parameters are meaningless for file://, but
Git interprets them as part of the file name. So get
rid of them. */
url.query.clear();
repoInfo.location = url;
}
// If this is a local directory and no ref or revision is
// given, then allow the use of an unclean working tree.
@ -605,16 +611,16 @@ struct GitInputScheme : InputScheme
try {
auto fetchRef =
getAllRefsAttr(input)
? "refs/*"
? "refs/*:refs/*"
: input.getRev()
? input.getRev()->gitRev()
: ref.compare(0, 5, "refs/") == 0
? ref
? fmt("%1%:%1%", ref)
: ref == "HEAD"
? ref
: "refs/heads/" + ref;
: fmt("%1%:%1%", "refs/heads/" + ref);
repo->fetch(repoUrl.to_string(), fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
repo->fetch(repoUrl.to_string(), fetchRef, getShallowAttr(input));
} catch (Error & e) {
if (!std::filesystem::exists(localRefFile)) throw;
logError(e.info());
@ -672,9 +678,7 @@ struct GitInputScheme : InputScheme
verifyCommit(input, repo);
bool exportIgnore = getExportIgnoreAttr(input);
auto accessor = repo->getAccessor(rev, exportIgnore);
accessor->setPathDisplay("«" + input.to_string() + "»");
auto accessor = repo->getAccessor(rev, exportIgnore, "«" + input.to_string() + "»");
/* If the repo has submodules, fetch them and return a mounted
input accessor consisting of the accessor for the top-level
@ -737,8 +741,6 @@ struct GitInputScheme : InputScheme
exportIgnore,
makeNotAllowedError(repoInfo.locationToArg()));
accessor->setPathDisplay(repoInfo.locationToArg());
/* If the repo has submodules, return a mounted input accessor
consisting of the accessor for the top-level repo and the
accessors for the submodule workdirs. */

View file

@ -294,9 +294,10 @@ struct GitArchiveInputScheme : InputScheme
#endif
input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false);
accessor->setPathDisplay("«" + input.to_string() + "»");
auto accessor = getTarballCache()->getAccessor(
tarballInfo.treeHash,
false,
"«" + input.to_string() + "»");
return {accessor, input};
}

View file

@ -1,14 +1,15 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util
, nix-store
, nlohmann_json
, libgit2
nix-util,
nix-store,
nlohmann_json,
libgit2,
# Configuration Options
, version
version,
}:
let

View file

@ -105,7 +105,8 @@ DownloadFileResult downloadFile(
static DownloadTarballResult downloadTarball_(
const std::string & url,
const Headers & headers)
const Headers & headers,
const std::string & displayPrefix)
{
Cache::Key cacheKey{"tarball", {{"url", url}}};
@ -118,7 +119,7 @@ static DownloadTarballResult downloadTarball_(
.treeHash = treeHash,
.lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"),
.immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"),
.accessor = getTarballCache()->getAccessor(treeHash, false),
.accessor = getTarballCache()->getAccessor(treeHash, false, displayPrefix),
};
};
@ -371,9 +372,10 @@ struct TarballInputScheme : CurlInputScheme
{
auto input(_input);
auto result = downloadTarball_(getStrAttr(input.attrs, "url"), {});
result.accessor->setPathDisplay("«" + input.to_string() + "»");
auto result = downloadTarball_(
getStrAttr(input.attrs, "url"),
{},
"«" + input.to_string() + "»");
if (result.immutableUrl) {
auto immutableInput = Input::fromURL(*input.settings, *result.immutableUrl);

View file

@ -1,13 +1,14 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-store-c
, nix-expr-c
, nix-flake
nix-store-c,
nix-expr-c,
nix-flake,
# Configuration Options
, version
version,
}:
let

View file

@ -1,20 +1,21 @@
{ lib
, buildPackages
, stdenv
, mkMesonExecutable
{
lib,
buildPackages,
stdenv,
mkMesonExecutable,
, nix-flake
, nix-flake-c
, nix-expr-test-support
nix-flake,
nix-flake-c,
nix-expr-test-support,
, rapidcheck
, gtest
, runCommand
rapidcheck,
gtest,
runCommand,
# Configuration Options
, version
, resolvePath
version,
resolvePath,
}:
let
@ -58,17 +59,23 @@ mkMesonExecutable (finalAttrs: {
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
run =
runCommand "${finalAttrs.pname}-run"
{
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
} (lib.optionalString stdenv.hostPlatform.isWindows ''
}
(
lib.optionalString stdenv.hostPlatform.isWindows ''
export HOME="$PWD/home-dir"
mkdir -p "$HOME"
'' + ''
''
+ ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
export NIX_CONFIG="extra-experimental-features = flakes"
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
''
);
};
};

View file

@ -105,7 +105,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state,
Value * value,
const PosIdx pos,
const InputPath & lockRootPath,
const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir);
static FlakeInput parseFlakeInput(
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(
std::string_view inputName,
Value * value,
const PosIdx pos,
const InputPath & lockRootPath,
const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir)
{
expectType(state, nAttrs, *value, pos);
@ -137,7 +137,7 @@ static FlakeInput parseFlakeInput(
else if (attr.value->type() == nPath) {
auto path = attr.value->path();
if (path.accessor != flakeDir.accessor)
throw Error("input path '%s' at %s must be in the same source tree as %s",
throw Error("input attribute path '%s' at %s must be in the same source tree as %s",
path, state.positions[attr.pos], flakeDir);
url = "path:" + flakeDir.path.makeRelative(path.path);
}
@ -149,11 +149,11 @@ static FlakeInput parseFlakeInput(
expectType(state, nBool, *attr.value, attr.pos);
input.isFlake = attr.value->boolean();
} else if (attr.name == sInputs) {
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath, flakeDir);
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, attr.pos);
auto follows(parseInputPath(attr.value->c_str()));
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
auto follows(parseInputAttrPath(attr.value->c_str()));
follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end());
input.follows = follows;
} else {
// Allow selecting a subset of enum values
@ -220,7 +220,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state,
Value * value,
const PosIdx pos,
const InputPath & lockRootPath,
const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir)
{
std::map<FlakeId, FlakeInput> inputs;
@ -233,7 +233,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
state.symbols[inputAttr.name],
inputAttr.value,
inputAttr.pos,
lockRootPath,
lockRootAttrPath,
flakeDir));
}
@ -246,7 +246,7 @@ static Flake readFlake(
const FlakeRef & resolvedRef,
const FlakeRef & lockedRef,
const SourcePath & rootDir,
const InputPath & lockRootPath)
const InputAttrPath & lockRootAttrPath)
{
auto flakeDir = rootDir / CanonPath(resolvedRef.subdir);
auto flakePath = flakeDir / "flake.nix";
@ -270,7 +270,7 @@ static Flake readFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs()->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath, flakeDir);
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir);
auto sOutputs = state.symbols.create("outputs");
@ -347,12 +347,12 @@ static Flake getFlake(
const FlakeRef & originalRef,
bool useRegistries,
FlakeCache & flakeCache,
const InputPath & lockRootPath)
const InputAttrPath & lockRootAttrPath)
{
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, useRegistries, flakeCache);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootAttrPath);
}
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
@ -407,12 +407,12 @@ LockedFlake lockFlake(
{
FlakeInput input;
SourcePath sourcePath;
std::optional<InputPath> parentInputPath; // FIXME: rename to inputPathPrefix?
std::optional<InputAttrPath> parentInputAttrPath; // FIXME: rename to inputAttrPathPrefix?
};
std::map<InputPath, OverrideTarget> overrides;
std::set<InputPath> explicitCliOverrides;
std::set<InputPath> overridesUsed, updatesUsed;
std::map<InputAttrPath, OverrideTarget> overrides;
std::set<InputAttrPath> explicitCliOverrides;
std::set<InputAttrPath> overridesUsed, updatesUsed;
std::map<ref<Node>, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) {
@ -436,9 +436,9 @@ LockedFlake lockFlake(
std::function<void(
const FlakeInputs & flakeInputs,
ref<Node> node,
const InputPath & inputPathPrefix,
const InputAttrPath & inputAttrPathPrefix,
std::shared_ptr<const Node> oldNode,
const InputPath & followsPrefix,
const InputAttrPath & followsPrefix,
const SourcePath & sourcePath,
bool trustLock)>
computeLocks;
@ -450,7 +450,7 @@ LockedFlake lockFlake(
/* The node whose locks are to be updated.*/
ref<Node> node,
/* The path to this node in the lock file graph. */
const InputPath & inputPathPrefix,
const InputAttrPath & inputAttrPathPrefix,
/* The old node, if any, from which locks can be
copied. */
std::shared_ptr<const Node> oldNode,
@ -458,59 +458,59 @@ LockedFlake lockFlake(
interpreted. When a node is initially locked, it's
relative to the node's flake; when it's already locked,
it's relative to the root of the lock file. */
const InputPath & followsPrefix,
const InputAttrPath & followsPrefix,
/* The source path of this node's flake. */
const SourcePath & sourcePath,
bool trustLock)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix));
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
for (auto & [id, input] : flakeInputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
inputPath.push_back(idOverride);
overrides.emplace(inputPath,
auto inputAttrPath(inputAttrPathPrefix);
inputAttrPath.push_back(id);
inputAttrPath.push_back(idOverride);
overrides.emplace(inputAttrPath,
OverrideTarget {
.input = inputOverride,
.sourcePath = sourcePath,
.parentInputPath = inputPathPrefix
.parentInputAttrPath = inputAttrPathPrefix
});
}
}
/* Check whether this input has overrides for a
non-existent input. */
for (auto [inputPath, inputOverride] : overrides) {
auto inputPath2(inputPath);
auto follow = inputPath2.back();
inputPath2.pop_back();
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
for (auto [inputAttrPath, inputOverride] : overrides) {
auto inputAttrPath2(inputAttrPath);
auto follow = inputAttrPath2.back();
inputAttrPath2.pop_back();
if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow))
warn(
"input '%s' has an override for a non-existent input '%s'",
printInputPath(inputPathPrefix), follow);
printInputAttrPath(inputAttrPathPrefix), follow);
}
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
for (auto & [id, input2] : flakeInputs) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
auto inputPathS = printInputPath(inputPath);
debug("computing input '%s'", inputPathS);
auto inputAttrPath(inputAttrPathPrefix);
inputAttrPath.push_back(id);
auto inputAttrPathS = printInputAttrPath(inputAttrPath);
debug("computing input '%s'", inputAttrPathS);
try {
/* Do we have an override for this input from one of the
ancestors? */
auto i = overrides.find(inputPath);
auto i = overrides.find(inputAttrPath);
bool hasOverride = i != overrides.end();
bool hasCliOverride = explicitCliOverrides.contains(inputPath);
bool hasCliOverride = explicitCliOverrides.contains(inputAttrPath);
if (hasOverride)
overridesUsed.insert(inputPath);
overridesUsed.insert(inputAttrPath);
auto input = hasOverride ? i->second.input : input2;
/* Resolve relative 'path:' inputs relative to
@ -525,11 +525,11 @@ LockedFlake lockFlake(
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
if (input.follows) {
InputPath target;
InputAttrPath target;
target.insert(target.end(), input.follows->begin(), input.follows->end());
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
debug("input '%s' follows '%s'", inputAttrPathS, printInputAttrPath(target));
node->inputs.insert_or_assign(id, target);
continue;
}
@ -538,7 +538,7 @@ LockedFlake lockFlake(
auto overridenParentPath =
input.ref->input.isRelative()
? std::optional<InputPath>(hasOverride ? i->second.parentInputPath : inputPathPrefix)
? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
: std::nullopt;
auto resolveRelativePath = [&]() -> std::optional<SourcePath>
@ -557,9 +557,9 @@ LockedFlake lockFlake(
auto getInputFlake = [&]()
{
if (auto resolvedPath = resolveRelativePath()) {
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath);
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputAttrPath);
} else {
return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath);
return getFlake(state, *input.ref, useRegistries, flakeCache, inputAttrPath);
}
};
@ -567,19 +567,19 @@ LockedFlake lockFlake(
And the input is not in updateInputs? */
std::shared_ptr<LockedNode> oldLock;
updatesUsed.insert(inputPath);
updatesUsed.insert(inputAttrPath);
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
if (oldNode && !lockFlags.inputUpdates.count(inputAttrPath))
if (auto oldLock2 = get(oldNode->inputs, id))
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
oldLock = *oldLock3;
if (oldLock
&& oldLock->originalRef == *input.ref
&& oldLock->parentPath == overridenParentPath
&& oldLock->parentInputAttrPath == overridenParentPath
&& !hasCliOverride)
{
debug("keeping existing input '%s'", inputPathS);
debug("keeping existing input '%s'", inputAttrPathS);
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
@ -588,18 +588,18 @@ LockedFlake lockFlake(
oldLock->lockedRef,
oldLock->originalRef,
oldLock->isFlake,
oldLock->parentPath);
oldLock->parentInputAttrPath);
node->inputs.insert_or_assign(id, childNode);
/* If we have this input in updateInputs, then we
must fetch the flake to update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath);
auto mustRefetch =
lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
&& lb->size() > inputAttrPath.size()
&& std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
FlakeInputs fakeInputs;
@ -618,7 +618,7 @@ LockedFlake lockFlake(
if (!trustLock) {
// It is possible that the flake has changed,
// so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath);
auto overridePath(inputAttrPath);
overridePath.push_back(i.first);
auto o = overrides.find(overridePath);
// If the override disappeared, we have to refetch the flake,
@ -642,21 +642,21 @@ LockedFlake lockFlake(
if (mustRefetch) {
auto inputFlake = getInputFlake();
nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix,
computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix,
inputFlake.path, false);
} else {
computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, true);
computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
}
} else {
/* We need to create a new lock file entry. So fetch
this input. */
debug("creating new input '%s'", inputPathS);
debug("creating new input '%s'", inputAttrPathS);
if (!lockFlags.allowUnlocked
&& !input.ref->input.isLocked()
&& !input.ref->input.isRelative())
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
@ -665,7 +665,7 @@ LockedFlake lockFlake(
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref;
auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref;
if (input.isFlake) {
auto inputFlake = getInputFlake();
@ -691,11 +691,11 @@ LockedFlake lockFlake(
own lock file. */
nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(
inputFlake.inputs, childNode, inputPath,
inputFlake.inputs, childNode, inputAttrPath,
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
oldLock ? followsPrefix : inputPath,
oldLock ? followsPrefix : inputAttrPath,
inputFlake.path,
false);
}
@ -722,7 +722,7 @@ LockedFlake lockFlake(
}
} catch (Error & e) {
e.addTrace({}, "while updating the flake input '%s'", inputPathS);
e.addTrace({}, "while updating the flake input '%s'", inputAttrPathS);
throw;
}
}
@ -742,11 +742,11 @@ LockedFlake lockFlake(
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
warn("the flag '--override-input %s %s' does not match any input",
printInputPath(i.first), i.second);
printInputAttrPath(i.first), i.second);
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("'%s' does not match any input of this flake", printInputPath(i));
warn("'%s' does not match any input of this flake", printInputAttrPath(i));
/* Check 'follows' inputs. */
newLockFile.check();

View file

@ -57,7 +57,7 @@ struct FlakeInput
* false = (fetched) static source path
*/
bool isFlake = true;
std::optional<InputPath> follows;
std::optional<InputAttrPath> follows;
FlakeInputs overrides;
};
@ -201,13 +201,13 @@ struct LockFlags
/**
* Flake inputs to be overridden.
*/
std::map<InputPath, FlakeRef> inputOverrides;
std::map<InputAttrPath, FlakeRef> inputOverrides;
/**
* Flake inputs to be updated. This means that any existing lock
* for those inputs will be ignored.
*/
std::set<InputPath> inputUpdates;
std::set<InputAttrPath> inputUpdates;
};
LockedFlake lockFlake(

View file

@ -107,7 +107,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
to 'baseDir'). If so, search upward to the root of the
repo (i.e. the directory containing .git). */
path = absPath(path, baseDir);
path = absPath(path, baseDir, true);
if (isFlake) {

View file

@ -1,7 +1,10 @@
#include <unordered_set>
#include "fetch-settings.hh"
#include "flake/settings.hh"
#include "lockfile.hh"
#include "store-api.hh"
#include "strings.hh"
#include <algorithm>
#include <iomanip>
@ -9,8 +12,6 @@
#include <iterator>
#include <nlohmann/json.hpp>
#include "strings.hh"
#include "flake/settings.hh"
namespace nix::flake {
@ -43,11 +44,18 @@ LockedNode::LockedNode(
: lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info"
, originalRef(getFlakeRef(fetchSettings, json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
, parentPath(json.find("parent") != json.end() ? (std::optional<InputPath>) json["parent"] : std::nullopt)
, parentInputAttrPath(json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
{
if (!lockedRef.input.isConsideredLocked(fetchSettings) && !lockedRef.input.isRelative())
if (!lockedRef.input.isLocked() && !lockedRef.input.isRelative()) {
if (lockedRef.input.getNarHash())
warn(
"Lock file entry '%s' is unlocked (e.g. lacks a Git revision) but does have a NAR hash. "
"This is deprecated since such inputs are verifiable but may not be reproducible.",
lockedRef.to_string());
else
throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
}
// For backward compatibility, lock file entries are implicitly final.
assert(!lockedRef.input.attrs.contains("__final"));
@ -59,7 +67,7 @@ StorePath LockedNode::computeStorePath(Store & store) const
return lockedRef.input.computeStorePath(store);
}
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited)
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
{
auto pos = root;
@ -67,8 +75,8 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
if (found != visited.end()) {
std::vector<std::string> cycle;
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
cycle.push_back(printInputPath(path));
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputAttrPath);
cycle.push_back(printInputAttrPath(path));
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
}
visited.push_back(path);
@ -90,9 +98,9 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
return pos;
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
std::shared_ptr<Node> LockFile::findInput(const InputAttrPath & path)
{
std::vector<InputPath> visited;
std::vector<InputAttrPath> visited;
return doFind(root, path, visited);
}
@ -115,7 +123,7 @@ LockFile::LockFile(
if (jsonNode.find("inputs") == jsonNode.end()) return;
for (auto & i : jsonNode["inputs"].items()) {
if (i.value().is_array()) { // FIXME: remove, obsolete
InputPath path;
InputAttrPath path;
for (auto & j : i.value())
path.push_back(j);
node.inputs.insert_or_assign(i.key(), path);
@ -203,8 +211,8 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
n["locked"].erase("__final");
if (!lockedNode->isFlake)
n["flake"] = false;
if (lockedNode->parentPath)
n["parent"] = *lockedNode->parentPath;
if (lockedNode->parentInputAttrPath)
n["parent"] = *lockedNode->parentInputAttrPath;
}
nodes[key] = std::move(n);
@ -248,11 +256,20 @@ std::optional<FlakeRef> LockFile::isUnlocked(const fetchers::Settings & fetchSet
visit(root);
/* Return whether the input is either locked, or, if
`allow-dirty-locks` is enabled, it has a NAR hash. In the
latter case, we can verify the input but we may not be able to
fetch it from anywhere. */
auto isConsideredLocked = [&](const fetchers::Input & input)
{
return input.isLocked() || (fetchSettings.allowDirtyLocks && input.getNarHash());
};
for (auto & i : nodes) {
if (i == ref<const Node>(root)) continue;
auto node = i.dynamic_pointer_cast<const LockedNode>();
if (node
&& (!node->lockedRef.input.isConsideredLocked(fetchSettings)
&& (!isConsideredLocked(node->lockedRef.input)
|| !node->lockedRef.input.isFinal())
&& !node->lockedRef.input.isRelative())
return node->lockedRef;
@ -267,36 +284,36 @@ bool LockFile::operator ==(const LockFile & other) const
return toJSON().first == other.toJSON().first;
}
InputPath parseInputPath(std::string_view s)
InputAttrPath parseInputAttrPath(std::string_view s)
{
InputPath path;
InputAttrPath path;
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
if (!std::regex_match(elem, flakeIdRegex))
throw UsageError("invalid flake input path element '%s'", elem);
throw UsageError("invalid flake input attribute path element '%s'", elem);
path.push_back(elem);
}
return path;
}
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
{
std::set<ref<Node>> done;
std::map<InputPath, Node::Edge> res;
std::map<InputAttrPath, Node::Edge> res;
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
recurse = [&](const InputPath & prefix, ref<Node> node)
recurse = [&](const InputAttrPath & prefix, ref<Node> node)
{
if (!done.insert(node).second) return;
for (auto &[id, input] : node->inputs) {
auto inputPath(prefix);
inputPath.push_back(id);
res.emplace(inputPath, input);
auto inputAttrPath(prefix);
inputAttrPath.push_back(id);
res.emplace(inputAttrPath, input);
if (auto child = std::get_if<0>(&input))
recurse(inputPath, *child);
recurse(inputAttrPath, *child);
}
};
@ -320,7 +337,7 @@ std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
if (auto node = std::get_if<0>(&edge))
stream << describe((*node)->lockedRef);
else if (auto follows = std::get_if<1>(&edge))
stream << fmt("follows '%s'", printInputPath(*follows));
stream << fmt("follows '%s'", printInputAttrPath(*follows));
return stream;
}
@ -347,15 +364,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
while (i != oldFlat.end() || j != newFlat.end()) {
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
res += fmt("" ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
printInputPath(j->first), j->second);
printInputAttrPath(j->first), j->second);
++j;
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first));
res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first));
++i;
} else {
if (!equals(i->second, j->second)) {
res += fmt("" ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
printInputPath(i->first),
printInputAttrPath(i->first),
i->second,
j->second);
}
@ -371,19 +388,19 @@ void LockFile::check()
{
auto inputs = getAllInputs();
for (auto & [inputPath, input] : inputs) {
for (auto & [inputAttrPath, input] : inputs) {
if (auto follows = std::get_if<1>(&input)) {
if (!follows->empty() && !findInput(*follows))
throw Error("input '%s' follows a non-existent input '%s'",
printInputPath(inputPath),
printInputPath(*follows));
printInputAttrPath(inputAttrPath),
printInputAttrPath(*follows));
}
}
}
void check();
std::string printInputPath(const InputPath & path)
std::string printInputAttrPath(const InputAttrPath & path)
{
return concatStringsSep("/", path);
}

View file

@ -12,7 +12,7 @@ class StorePath;
namespace nix::flake {
typedef std::vector<FlakeId> InputPath;
typedef std::vector<FlakeId> InputAttrPath;
struct LockedNode;
@ -23,7 +23,7 @@ struct LockedNode;
*/
struct Node : std::enable_shared_from_this<Node>
{
typedef std::variant<ref<LockedNode>, InputPath> Edge;
typedef std::variant<ref<LockedNode>, InputAttrPath> Edge;
std::map<FlakeId, Edge> inputs;
@ -40,17 +40,17 @@ struct LockedNode : Node
/* The node relative to which relative source paths
(e.g. 'path:../foo') are interpreted. */
std::optional<InputPath> parentPath;
std::optional<InputAttrPath> parentInputAttrPath;
LockedNode(
const FlakeRef & lockedRef,
const FlakeRef & originalRef,
bool isFlake = true,
std::optional<InputPath> parentPath = {})
: lockedRef(lockedRef)
, originalRef(originalRef)
std::optional<InputAttrPath> parentInputAttrPath = {})
: lockedRef(std::move(lockedRef))
, originalRef(std::move(originalRef))
, isFlake(isFlake)
, parentPath(parentPath)
, parentInputAttrPath(std::move(parentInputAttrPath))
{ }
LockedNode(
@ -83,9 +83,9 @@ struct LockFile
bool operator ==(const LockFile & other) const;
std::shared_ptr<Node> findInput(const InputPath & path);
std::shared_ptr<Node> findInput(const InputAttrPath & path);
std::map<InputPath, Node::Edge> getAllInputs() const;
std::map<InputAttrPath, Node::Edge> getAllInputs() const;
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
@ -97,8 +97,8 @@ struct LockFile
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
InputPath parseInputPath(std::string_view s);
InputAttrPath parseInputAttrPath(std::string_view s);
std::string printInputPath(const InputPath & path);
std::string printInputAttrPath(const InputAttrPath & path);
}

View file

@ -1,15 +1,16 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util
, nix-store
, nix-fetchers
, nix-expr
, nlohmann_json
nix-util,
nix-store,
nix-fetchers,
nix-expr,
nlohmann_json,
# Configuration Options
, version
version,
}:
let

View file

@ -1,14 +1,15 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util-c
, nix-store
, nix-store-c
, nix-main
nix-util-c,
nix-store,
nix-store-c,
nix-main,
# Configuration Options
, version
version,
}:
let

View file

@ -1,14 +1,15 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, openssl
openssl,
, nix-util
, nix-store
nix-util,
nix-store,
# Configuration Options
, version
version,
}:
let

View file

@ -1,12 +1,13 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util-c
, nix-store
nix-util-c,
nix-store,
# Configuration Options
, version
version,
}:
let

View file

@ -1,15 +1,16 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util-test-support
, nix-store
, nix-store-c
nix-util-test-support,
nix-store,
nix-store-c,
, rapidcheck
rapidcheck,
# Configuration Options
, version
version,
}:
let

View file

@ -1,21 +1,22 @@
{ lib
, buildPackages
, stdenv
, mkMesonExecutable
{
lib,
buildPackages,
stdenv,
mkMesonExecutable,
, nix-store
, nix-store-c
, nix-store-test-support
, sqlite
nix-store,
nix-store-c,
nix-store-test-support,
sqlite,
, rapidcheck
, gtest
, runCommand
rapidcheck,
gtest,
runCommand,
# Configuration Options
, version
, filesetToSource
version,
filesetToSource,
}:
let
@ -64,7 +65,8 @@ mkMesonExecutable (finalAttrs: {
passthru = {
tests = {
run = let
run =
let
# Some data is shared with the functional tests: they create it,
# we consume it.
data = filesetToSource {
@ -74,16 +76,22 @@ mkMesonExecutable (finalAttrs: {
../../tests/functional/derivation
];
};
in runCommand "${finalAttrs.pname}-run" {
in
runCommand "${finalAttrs.pname}-run"
{
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
} (lib.optionalString stdenv.hostPlatform.isWindows ''
}
(
lib.optionalString stdenv.hostPlatform.isWindows ''
export HOME="$PWD/home-dir"
mkdir -p "$HOME"
'' + ''
''
+ ''
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
''
);
};
};

View file

@ -36,14 +36,6 @@
namespace nix {
Goal::Co DerivationGoal::init() {
if (useDerivation) {
co_return getDerivation();
} else {
co_return haveDerivation();
}
}
DerivationGoal::DerivationGoal(const StorePath & drvPath,
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs })
@ -141,26 +133,19 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
}
Goal::Co DerivationGoal::getDerivation()
{
Goal::Co DerivationGoal::init() {
trace("init");
if (useDerivation) {
/* The first thing to do is to make sure that the derivation
exists. If it doesn't, it may be created through a
substitute. */
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
co_return loadDerivation();
}
if (buildMode != bmNormal || !worker.evalStore.isValidPath(drvPath)) {
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
co_await Suspend{};
co_return loadDerivation();
}
Goal::Co DerivationGoal::loadDerivation()
{
trace("loading derivation");
if (nrFailed != 0) {
@ -185,6 +170,7 @@ Goal::Co DerivationGoal::loadDerivation()
}
}
assert(drv);
}
co_return haveDerivation();
}
@ -235,6 +221,7 @@ Goal::Co DerivationGoal::haveDerivation()
}
});
{
/* Check what outputs paths are not already valid. */
auto [allValid, validOutputs] = checkPathValidity();
@ -242,6 +229,7 @@ Goal::Co DerivationGoal::haveDerivation()
if (allValid && buildMode == bmNormal) {
co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
}
}
/* We are first going to try to create the invalid output paths
through substitutes. If that doesn't work, we'll build
@ -268,12 +256,7 @@ Goal::Co DerivationGoal::haveDerivation()
}
if (!waitees.empty()) co_await Suspend{}; /* to prevent hang (no wake-up event) */
co_return outputsSubstitutionTried();
}
Goal::Co DerivationGoal::outputsSubstitutionTried()
{
trace("all outputs substituted (maybe)");
assert(!drv->type().isImpure());
@ -399,84 +382,7 @@ Goal::Co DerivationGoal::gaveUpOnSubstitution()
}
if (!waitees.empty()) co_await Suspend{}; /* to prevent hang (no wake-up event) */
co_return inputsRealised();
}
Goal::Co DerivationGoal::repairClosure()
{
assert(!drv->type().isImpure());
/* If we're repairing, we now know that our own outputs are valid.
Now check whether the other paths in the outputs closure are
good. If not, then start derivation goals for the derivations
that produced those outputs. */
/* Get the output closure. */
auto outputs = queryDerivationOutputMap();
StorePathSet outputClosure;
for (auto & i : outputs) {
if (!wantedOutputs.contains(i.first)) continue;
worker.store.computeFSClosure(i.second, outputClosure);
}
/* Filter out our own outputs (which we have already checked). */
for (auto & i : outputs)
outputClosure.erase(i.second);
/* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output
closure. */
StorePathSet inputClosure;
if (useDerivation) worker.store.computeFSClosure(drvPath, inputClosure);
std::map<StorePath, StorePath> outputsToDrv;
for (auto & i : inputClosure)
if (i.isDerivation()) {
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
for (auto & j : depOutputs)
if (j.second)
outputsToDrv.insert_or_assign(*j.second, i);
}
/* Check each path (slow!). */
for (auto & i : outputClosure) {
if (worker.pathContentsGood(i)) continue;
printError(
"found corrupted or missing path '%s' in the output closure of '%s'",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
auto drvPath2 = outputsToDrv.find(i);
if (drvPath2 == outputsToDrv.end())
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
else
addWaitee(worker.makeGoal(
DerivedPath::Built {
.drvPath = makeConstantStorePathRef(drvPath2->second),
.outputs = OutputsSpec::All { },
},
bmRepair));
}
if (waitees.empty()) {
co_return done(BuildResult::AlreadyValid, assertPathValidity());
} else {
co_await Suspend{};
co_return closureRepaired();
}
}
Goal::Co DerivationGoal::closureRepaired()
{
trace("closure repaired");
if (nrFailed > 0)
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
worker.store.printStorePath(drvPath));
co_return done(BuildResult::AlreadyValid, assertPathValidity());
}
Goal::Co DerivationGoal::inputsRealised()
{
trace("all inputs realised");
if (nrFailed != 0) {
@ -766,6 +672,73 @@ Goal::Co DerivationGoal::tryLocalBuild() {
}
Goal::Co DerivationGoal::repairClosure()
{
assert(!drv->type().isImpure());
/* If we're repairing, we now know that our own outputs are valid.
Now check whether the other paths in the outputs closure are
good. If not, then start derivation goals for the derivations
that produced those outputs. */
/* Get the output closure. */
auto outputs = queryDerivationOutputMap();
StorePathSet outputClosure;
for (auto & i : outputs) {
if (!wantedOutputs.contains(i.first)) continue;
worker.store.computeFSClosure(i.second, outputClosure);
}
/* Filter out our own outputs (which we have already checked). */
for (auto & i : outputs)
outputClosure.erase(i.second);
/* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output
closure. */
StorePathSet inputClosure;
if (useDerivation) worker.store.computeFSClosure(drvPath, inputClosure);
std::map<StorePath, StorePath> outputsToDrv;
for (auto & i : inputClosure)
if (i.isDerivation()) {
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
for (auto & j : depOutputs)
if (j.second)
outputsToDrv.insert_or_assign(*j.second, i);
}
/* Check each path (slow!). */
for (auto & i : outputClosure) {
if (worker.pathContentsGood(i)) continue;
printError(
"found corrupted or missing path '%s' in the output closure of '%s'",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
auto drvPath2 = outputsToDrv.find(i);
if (drvPath2 == outputsToDrv.end())
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
else
addWaitee(worker.makeGoal(
DerivedPath::Built {
.drvPath = makeConstantStorePathRef(drvPath2->second),
.outputs = OutputsSpec::All { },
},
bmRepair));
}
if (waitees.empty()) {
co_return done(BuildResult::AlreadyValid, assertPathValidity());
} else {
co_await Suspend{};
trace("closure repaired");
if (nrFailed > 0)
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
worker.store.printStorePath(drvPath));
co_return done(BuildResult::AlreadyValid, assertPathValidity());
}
}
static void chmod_(const Path & path, mode_t mode)
{
if (chmod(path.c_str(), mode) == -1)

View file

@ -80,7 +80,7 @@ struct DerivationGoal : public Goal
/**
* Mapping from input derivations + output names to actual store
* paths. This is filled in by waiteeDone() as each dependency
* finishes, before inputsRealised() is reached.
* finishes, before `trace("all inputs realised")` is reached.
*/
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
@ -233,13 +233,8 @@ struct DerivationGoal : public Goal
* The states.
*/
Co init() override;
Co getDerivation();
Co loadDerivation();
Co haveDerivation();
Co outputsSubstitutionTried();
Co gaveUpOnSubstitution();
Co closureRepaired();
Co inputsRealised();
Co tryToBuild();
virtual Co tryLocalBuild();
Co buildDone();

View file

@ -50,8 +50,9 @@ struct FileTransferSettings : Config
Setting<size_t> downloadBufferSize{this, 64 * 1024 * 1024, "download-buffer-size",
R"(
The size of Nix's internal download buffer during `curl` transfers. If data is
The size of Nix's internal download buffer in bytes during `curl` transfers. If data is
not processed quickly enough to exceed the size of this buffer, downloads may stall.
The default is 67108864 (64 MiB).
)"};
};

View file

@ -167,16 +167,15 @@ LocalStore::LocalStore(
/* Ensure that the store and its parents are not symlinks. */
if (!settings.allowSymlinkedStore) {
Path path = realStoreDir;
struct stat st;
while (path != "/") {
st = lstat(path);
if (S_ISLNK(st.st_mode))
std::filesystem::path path = realStoreDir.get();
std::filesystem::path root = path.root_path();
while (path != root) {
if (std::filesystem::is_symlink(path))
throw Error(
"the path '%1%' is a symlink; "
"this is not allowed for the Nix store and its parent directories",
path);
path = dirOf(path);
path = path.parent_path();
}
}

View file

@ -1,25 +1,26 @@
{ lib
, stdenv
, mkMesonLibrary
{
lib,
stdenv,
mkMesonLibrary,
, unixtools
, darwin
unixtools,
darwin,
, nix-util
, boost
, curl
, aws-sdk-cpp
, libseccomp
, nlohmann_json
, sqlite
nix-util,
boost,
curl,
aws-sdk-cpp,
libseccomp,
nlohmann_json,
sqlite,
, busybox-sandbox-shell ? null
busybox-sandbox-shell ? null,
# Configuration Options
, version
version,
, embeddedSandboxShell ? stdenv.hostPlatform.isStatic
embeddedSandboxShell ? stdenv.hostPlatform.isStatic,
}:
let
@ -48,19 +49,20 @@ mkMesonLibrary (finalAttrs: {
(fileset.fileFilter (file: file.hasExt "sql") ./.)
];
nativeBuildInputs =
lib.optional embeddedSandboxShell unixtools.hexdump;
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
buildInputs = [
buildInputs =
[
boost
curl
sqlite
] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp
]
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
# There have been issues building these dependencies
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
aws-sdk-cpp
;
++ lib.optional (
stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)
) aws-sdk-cpp;
propagatedBuildInputs = [
nix-util
@ -75,10 +77,12 @@ mkMesonLibrary (finalAttrs: {
echo ${version} > ../../.version
'';
mesonFlags = [
mesonFlags =
[
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
] ++ lib.optionals stdenv.hostPlatform.isLinux [
]
++ lib.optionals stdenv.hostPlatform.isLinux [
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
];

View file

@ -539,11 +539,21 @@ void RemoteStore::addMultipleToStore(
RepairFlag repair,
CheckSigsFlag checkSigs)
{
// `addMultipleToStore` is single threaded
size_t bytesExpected = 0;
for (auto & [pathInfo, _] : pathsToCopy) {
bytesExpected += pathInfo.narSize;
}
act.setExpected(actCopyPath, bytesExpected);
auto source = sinkToSource([&](Sink & sink) {
sink << pathsToCopy.size();
size_t nrTotal = pathsToCopy.size();
sink << nrTotal;
// Reverse, so we can release memory at the original start
std::reverse(pathsToCopy.begin(), pathsToCopy.end());
while (!pathsToCopy.empty()) {
act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0));
auto & [pathInfo, pathSource] = pathsToCopy.back();
WorkerProto::Serialise<ValidPathInfo>::write(*this,
WorkerProto::WriteConn {

View file

@ -242,8 +242,8 @@ void Store::addMultipleToStore(
storePathsToAdd.insert(thingToAdd.first.path);
}
auto showProgress = [&]() {
act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed);
auto showProgress = [&, nrTotal = pathsToCopy.size()]() {
act.progress(nrDone, nrTotal, nrRunning, nrFailed);
};
processGraph<StorePath>(
@ -1104,9 +1104,6 @@ std::map<StorePath, StorePath> copyPaths(
return storePathForDst;
};
// total is accessed by each copy, which are each handled in separate threads
std::atomic<uint64_t> total = 0;
for (auto & missingPath : sortedMissing) {
auto info = srcStore.queryPathInfo(missingPath);
@ -1116,9 +1113,10 @@ std::map<StorePath, StorePath> copyPaths(
ValidPathInfo infoForDst = *info;
infoForDst.path = storePathForDst;
auto source = sinkToSource([&](Sink & sink) {
auto source = sinkToSource([&, narSize = info->narSize](Sink & sink) {
// We can reasonably assume that the copy will happen whenever we
// read the path, so log something about that at that point
uint64_t total = 0;
auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri();
auto storePathS = srcStore.printStorePath(missingPath);
@ -1129,13 +1127,13 @@ std::map<StorePath, StorePath> copyPaths(
LambdaSink progressSink([&](std::string_view data) {
total += data.size();
act.progress(total, info->narSize);
act.progress(total, narSize);
});
TeeSink tee { sink, progressSink };
srcStore.narFromPath(missingPath, tee);
});
pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
pathsToCopy.emplace_back(std::move(infoForDst), std::move(source));
}
dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs);

View file

@ -2565,7 +2565,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
case FileIngestionMethod::Git: {
return git::dumpHash(
outputHash.hashAlgo,
{getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash;
{getFSSourceAccessor(), CanonPath(actualPath)}).hash;
}
}
assert(false);
@ -2657,10 +2657,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
wanted.to_string(HashFormat::SRI, true),
got.to_string(HashFormat::SRI, true)));
}
if (!newInfo0.references.empty())
if (!newInfo0.references.empty()) {
auto numViolations = newInfo.references.size();
delayedException = std::make_exception_ptr(
BuildError("illegal path references in fixed-output derivation '%s'",
worker.store.printStorePath(drvPath)));
BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'",
worker.store.printStorePath(drvPath),
numViolations,
worker.store.printStorePath(*newInfo.references.begin())));
}
return newInfo0;
},

View file

@ -1,11 +1,12 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util
nix-util,
# Configuration Options
, version
version,
}:
let

View file

@ -1,14 +1,15 @@
{ lib
, mkMesonLibrary
{
lib,
mkMesonLibrary,
, nix-util
, nix-util-c
nix-util,
nix-util-c,
, rapidcheck
rapidcheck,
# Configuration Options
, version
version,
}:
let

View file

@ -1,19 +1,20 @@
{ lib
, buildPackages
, stdenv
, mkMesonExecutable
{
lib,
buildPackages,
stdenv,
mkMesonExecutable,
, nix-util
, nix-util-c
, nix-util-test-support
nix-util,
nix-util-c,
nix-util-test-support,
, rapidcheck
, gtest
, runCommand
rapidcheck,
gtest,
runCommand,
# Configuration Options
, version
version,
}:
let
@ -57,16 +58,22 @@ mkMesonExecutable (finalAttrs: {
passthru = {
tests = {
run = runCommand "${finalAttrs.pname}-run" {
run =
runCommand "${finalAttrs.pname}-run"
{
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
} (lib.optionalString stdenv.hostPlatform.isWindows ''
}
(
lib.optionalString stdenv.hostPlatform.isWindows ''
export HOME="$PWD/home-dir"
mkdir -p "$HOME"
'' + ''
''
+ ''
export _NIX_TEST_UNIT_DATA=${./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
'');
''
);
};
};

View file

@ -19,10 +19,6 @@
# include "namespaces.hh"
#endif
#ifndef _WIN32
# include <sys/resource.h>
#endif
namespace nix {
unsigned int getMaxCPU()
@ -55,11 +51,11 @@ unsigned int getMaxCPU()
//////////////////////////////////////////////////////////////////////
#ifndef _WIN32
size_t savedStackSize = 0;
void setStackSize(size_t stackSize)
{
#ifndef _WIN32
struct rlimit limit;
if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) {
savedStackSize = limit.rlim_cur;
@ -77,31 +73,8 @@ void setStackSize(size_t stackSize)
);
}
}
#else
ULONG_PTR stackLow, stackHigh;
GetCurrentThreadStackLimits(&stackLow, &stackHigh);
ULONG maxStackSize = stackHigh - stackLow;
ULONG currStackSize = 0;
// This retrieves the current promised stack size
SetThreadStackGuarantee(&currStackSize);
if (currStackSize < stackSize) {
savedStackSize = currStackSize;
ULONG newStackSize = std::min(static_cast<ULONG>(stackSize), maxStackSize);
if (SetThreadStackGuarantee(&newStackSize) == 0) {
logger->log(
lvlError,
HintFmt(
"Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%",
savedStackSize,
stackSize,
maxStackSize,
std::to_string(GetLastError())
).str()
);
}
}
#endif
}
void restoreProcessContext(bool restoreMounts)
{

View file

@ -17,10 +17,13 @@ namespace nix {
*/
unsigned int getMaxCPU();
// It does not seem possible to dynamically change stack size on Windows.
#ifndef _WIN32
/**
* Change the stack size.
*/
void setStackSize(size_t stackSize);
#endif
/**
* Restore the original inherited Unix process context (such as signal

View file

@ -1,18 +1,19 @@
{ lib
, stdenv
, mkMesonLibrary
{
lib,
stdenv,
mkMesonLibrary,
, boost
, brotli
, libarchive
, libcpuid
, libsodium
, nlohmann_json
, openssl
boost,
brotli,
libarchive,
libcpuid,
libsodium,
nlohmann_json,
openssl,
# Configuration Options
, version
version,
}:
let
@ -43,8 +44,7 @@ mkMesonLibrary (finalAttrs: {
brotli
libsodium
openssl
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
;
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
propagatedBuildInputs = [
boost

View file

@ -274,6 +274,17 @@ std::optional<typename T::value_type> pop(T & c)
}
/**
* Append items to a container. TODO: remove this once we can use
* C++23's `append_range()`.
*/
template<class C, typename T>
void append(C & c, std::initializer_list<T> l)
{
c.insert(c.end(), l.begin(), l.end());
}
template<typename T>
class Callback;

View file

@ -1,4 +1,8 @@
{ name, channelName, src }:
{
name,
channelName,
src,
}:
derivation {
builder = "builtin:unpack-channel";

View file

@ -8,13 +8,15 @@ derivation {
inherit manifest;
# !!! grmbl, need structured data for passing this in a clean way.
derivations =
map (d:
[ (d.meta.active or "true")
derivations = map (
d:
[
(d.meta.active or "true")
(d.meta.priority or 5)
(builtins.length d.outputs)
] ++ map (output: builtins.getAttr output d) d.outputs)
derivations;
]
++ map (output: builtins.getAttr output d) d.outputs
) derivations;
# Building user environments remotely just causes huge amounts of
# network traffic, so don't do that.

Some files were not shown because too many files have changed in this diff Show more