mirror of
https://github.com/NixOS/nix.git
synced 2025-11-08 11:36:03 +01:00
Compare commits
30 commits
ac3532d0f2
...
aa657c1679
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa657c1679 | ||
|
|
a4fb83a239 | ||
|
|
47ba375285 | ||
|
|
5c9e22d75a | ||
|
|
e6d823e46d | ||
|
|
038cc7913b | ||
|
|
828bf74cd0 | ||
|
|
ec122cbfda | ||
|
|
e1ff27324b | ||
|
|
7b41563055 | ||
|
|
7f9b9c3638 | ||
|
|
ed09f1b4d9 | ||
|
|
fc6811cb51 | ||
|
|
a24df3d4e5 | ||
|
|
e6003b5c4f | ||
|
|
f566957dc4 | ||
|
|
f434a3e3c6 | ||
|
|
939f81c2e6 | ||
|
|
758dacacf4 | ||
|
|
9e4177bc67 | ||
|
|
328a3bbbd0 | ||
|
|
5c9481de19 | ||
|
|
ebadea0734 | ||
|
|
7d7ca3fe96 | ||
|
|
3a92f83e75 | ||
|
|
291e8ab6bd | ||
|
|
19441dd317 | ||
|
|
71ec2cf62d | ||
|
|
b36f8043d2 | ||
|
|
4d1f72a324 |
24 changed files with 217 additions and 26 deletions
2
.version
2
.version
|
|
@ -1 +1 @@
|
||||||
2.32.2
|
2.32.3
|
||||||
|
|
|
||||||
|
|
@ -24,9 +24,9 @@ let
|
||||||
in
|
in
|
||||||
concatStringsSep "\n" (map showEntry storesList);
|
concatStringsSep "\n" (map showEntry storesList);
|
||||||
|
|
||||||
"index.md" =
|
"index.md" = replaceStrings [ "@store-types@" ] [ index ] (
|
||||||
replaceStrings [ "@store-types@" ] [ index ]
|
readFile ./source/store/types/index.md.in
|
||||||
(readFile ./source/store/types/index.md.in);
|
);
|
||||||
|
|
||||||
tableOfContents =
|
tableOfContents =
|
||||||
let
|
let
|
||||||
|
|
|
||||||
8
flake.lock
generated
8
flake.lock
generated
|
|
@ -63,16 +63,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1756178832,
|
"lastModified": 1761597516,
|
||||||
"narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=",
|
"narHash": "sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8+ON/0Yy8+a5vsDU=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "d98ce345cdab58477ca61855540999c86577d19d",
|
"rev": "daf6dc47aa4b44791372d6139ab7b25269184d55",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-25.05-small",
|
"ref": "nixos-25.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||||
|
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
# shellcheck disable=all
|
|
||||||
#compdef nix
|
#compdef nix
|
||||||
|
# shellcheck disable=all
|
||||||
|
|
||||||
function _nix() {
|
function _nix() {
|
||||||
local ifs_bk="$IFS"
|
local ifs_bk="$IFS"
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ if cxx.get_id() == 'clang'
|
||||||
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
add_project_arguments('-fpch-instantiate-templates', language : 'cpp')
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Darwin ld doesn't like "X.Y.Zpre"
|
# Darwin ld doesn't like "X.Y.ZpreABCD+W"
|
||||||
nix_soversion = meson.project_version().split('pre')[0]
|
nix_soversion = meson.project_version().split('+')[0].split('pre')[0]
|
||||||
|
|
||||||
subdir('assert-fail')
|
subdir('assert-fail')
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
#include "nix/expr/symbol-table.hh"
|
#include "nix/expr/symbol-table.hh"
|
||||||
|
|
||||||
#include <boost/container/static_vector.hpp>
|
#include <boost/container/static_vector.hpp>
|
||||||
|
#include <boost/iterator/function_output_iterator.hpp>
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
|
|
@ -463,12 +464,48 @@ private:
|
||||||
return bindings->baseLayer;
|
return bindings->baseLayer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the bindings gets "layered" on top of another we need to recalculate
|
||||||
|
* the number of unique attributes in the chain.
|
||||||
|
*
|
||||||
|
* This is done by either iterating over the base "layer" and the newly added
|
||||||
|
* attributes and counting duplicates. If the base "layer" is big this approach
|
||||||
|
* is inefficient and we fall back to doing per-element binary search in the base
|
||||||
|
* "layer".
|
||||||
|
*/
|
||||||
void finishSizeIfNecessary()
|
void finishSizeIfNecessary()
|
||||||
{
|
{
|
||||||
if (hasBaseLayer())
|
if (!hasBaseLayer())
|
||||||
/* NOTE: Do not use std::ranges::distance, since Bindings is a sized
|
return;
|
||||||
range, but we are calculating this size here. */
|
|
||||||
bindings->numAttrsInChain = std::distance(bindings->begin(), bindings->end());
|
auto & base = *bindings->baseLayer;
|
||||||
|
auto attrs = std::span(bindings->attrs, bindings->numAttrs);
|
||||||
|
|
||||||
|
Bindings::size_type duplicates = 0;
|
||||||
|
|
||||||
|
/* If the base bindings is smaller than the newly added attributes
|
||||||
|
iterate using std::set_intersection to run in O(|base| + |attrs|) =
|
||||||
|
O(|attrs|). Otherwise use an O(|attrs| * log(|base|)) per-attr binary
|
||||||
|
search to check for duplicates. Note that if we are in this code path then
|
||||||
|
|attrs| <= bindingsUpdateLayerRhsSizeThreshold, which 16 by default. We are
|
||||||
|
optimizing for the case when a small attribute set gets "layered" on top of
|
||||||
|
a much larger one. When attrsets are already small it's fine to do a linear
|
||||||
|
scan, but we should avoid expensive iterations over large "base" attrsets. */
|
||||||
|
if (attrs.size() > base.size()) {
|
||||||
|
std::set_intersection(
|
||||||
|
base.begin(),
|
||||||
|
base.end(),
|
||||||
|
attrs.begin(),
|
||||||
|
attrs.end(),
|
||||||
|
boost::make_function_output_iterator([&]([[maybe_unused]] auto && _) { ++duplicates; }));
|
||||||
|
} else {
|
||||||
|
for (const auto & attr : attrs) {
|
||||||
|
if (base.get(attr.name))
|
||||||
|
++duplicates;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bindings->numAttrsInChain = base.numAttrsInChain + attrs.size() - duplicates;
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
|
||||||
|
|
@ -164,8 +164,7 @@ struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||||
{
|
{
|
||||||
auto parsedScheme = parseUrlScheme(url.scheme);
|
if (url.scheme != "git" && parseUrlScheme(url.scheme).application != "git")
|
||||||
if (parsedScheme.application != "git")
|
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
auto url2(url);
|
auto url2(url);
|
||||||
|
|
|
||||||
|
|
@ -199,6 +199,28 @@ INSTANTIATE_TEST_SUITE_P(
|
||||||
.description = "flake_id_ref_branch_ignore_empty_segments_ref_rev",
|
.description = "flake_id_ref_branch_ignore_empty_segments_ref_rev",
|
||||||
.expectedUrl = "flake:nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed",
|
.expectedUrl = "flake:nixpkgs/branch/2aae6c35c94fcfb415dbe95f408b9ce91ee846ed",
|
||||||
},
|
},
|
||||||
|
InputFromURLTestCase{
|
||||||
|
.url = "git://somewhere/repo?ref=branch",
|
||||||
|
.attrs =
|
||||||
|
{
|
||||||
|
{"type", Attr("git")},
|
||||||
|
{"ref", Attr("branch")},
|
||||||
|
{"url", Attr("git://somewhere/repo")},
|
||||||
|
},
|
||||||
|
.description = "plain_git_with_ref",
|
||||||
|
.expectedUrl = "git://somewhere/repo?ref=branch",
|
||||||
|
},
|
||||||
|
InputFromURLTestCase{
|
||||||
|
.url = "git+https://somewhere.aaaaaaa/repo?ref=branch",
|
||||||
|
.attrs =
|
||||||
|
{
|
||||||
|
{"type", Attr("git")},
|
||||||
|
{"ref", Attr("branch")},
|
||||||
|
{"url", Attr("https://somewhere.aaaaaaa/repo")},
|
||||||
|
},
|
||||||
|
.description = "git_https_with_ref",
|
||||||
|
.expectedUrl = "git+https://somewhere.aaaaaaa/repo?ref=branch",
|
||||||
|
},
|
||||||
InputFromURLTestCase{
|
InputFromURLTestCase{
|
||||||
// Note that this is different from above because the "flake id" shorthand
|
// Note that this is different from above because the "flake id" shorthand
|
||||||
// doesn't allow this.
|
// doesn't allow this.
|
||||||
|
|
|
||||||
|
|
@ -80,7 +80,8 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
||||||
|
|
||||||
std::smatch match;
|
std::smatch match;
|
||||||
auto succeeds = std::regex_match(url, match, pathFlakeRegex);
|
auto succeeds = std::regex_match(url, match, pathFlakeRegex);
|
||||||
assert(succeeds);
|
if (!succeeds)
|
||||||
|
throw Error("invalid flakeref '%s'", url);
|
||||||
auto path = match[1].str();
|
auto path = match[1].str();
|
||||||
auto query = decodeQuery(match[3].str(), /*lenient=*/true);
|
auto query = decodeQuery(match[3].str(), /*lenient=*/true);
|
||||||
auto fragment = percentDecode(match[5].str());
|
auto fragment = percentDecode(match[5].str());
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%eth0]?a=b&c=d
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%25eth0]?a=b&c=d
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://userinfo@fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%25?a=b&c=d
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://userinfo@fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%eth0?a=b&c=d
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%eth0?a=b&c=d
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
ssh://fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%eth0
|
||||||
|
|
@ -183,4 +183,64 @@ static StoreReference sshIPv6AuthorityWithUserinfoAndParams{
|
||||||
|
|
||||||
URI_TEST_READ(ssh_unbracketed_ipv6_3, sshIPv6AuthorityWithUserinfoAndParams)
|
URI_TEST_READ(ssh_unbracketed_ipv6_3, sshIPv6AuthorityWithUserinfoAndParams)
|
||||||
|
|
||||||
|
static const StoreReference sshIPv6AuthorityWithUserinfoAndParamsAndZoneId{
|
||||||
|
.variant =
|
||||||
|
StoreReference::Specified{
|
||||||
|
.scheme = "ssh",
|
||||||
|
.authority = "userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%25eth0]",
|
||||||
|
},
|
||||||
|
.params =
|
||||||
|
{
|
||||||
|
{"a", "b"},
|
||||||
|
{"c", "d"},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_4, sshIPv6AuthorityWithUserinfoAndParamsAndZoneId)
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_5, sshIPv6AuthorityWithUserinfoAndParamsAndZoneId)
|
||||||
|
|
||||||
|
static const StoreReference sshIPv6AuthorityWithUserinfoAndParamsAndZoneIdTricky{
|
||||||
|
.variant =
|
||||||
|
StoreReference::Specified{
|
||||||
|
.scheme = "ssh",
|
||||||
|
.authority = "userinfo@[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%2525]",
|
||||||
|
},
|
||||||
|
.params =
|
||||||
|
{
|
||||||
|
{"a", "b"},
|
||||||
|
{"c", "d"},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Non-standard syntax where the IPv6 literal appears without brackets. In
|
||||||
|
// this case don't considering %25 to be a pct-encoded % and just take it as a
|
||||||
|
// literal value. 25 is a perfectly legal ZoneId value in theory.
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_6, sshIPv6AuthorityWithUserinfoAndParamsAndZoneIdTricky)
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_7, sshIPv6AuthorityWithUserinfoAndParamsAndZoneId)
|
||||||
|
|
||||||
|
static const StoreReference sshIPv6AuthorityWithParamsAndZoneId{
|
||||||
|
.variant =
|
||||||
|
StoreReference::Specified{
|
||||||
|
.scheme = "ssh",
|
||||||
|
.authority = "[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%25eth0]",
|
||||||
|
},
|
||||||
|
.params =
|
||||||
|
{
|
||||||
|
{"a", "b"},
|
||||||
|
{"c", "d"},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_8, sshIPv6AuthorityWithParamsAndZoneId)
|
||||||
|
|
||||||
|
static const StoreReference sshIPv6AuthorityWithZoneId{
|
||||||
|
.variant =
|
||||||
|
StoreReference::Specified{
|
||||||
|
.scheme = "ssh",
|
||||||
|
.authority = "[fea5:23e1:3916:fc24:cb52:2837:2ecb:ea8e%25eth0]",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
URI_TEST_READ(ssh_unbracketed_ipv6_9, sshIPv6AuthorityWithZoneId)
|
||||||
|
|
||||||
} // namespace nix
|
} // namespace nix
|
||||||
|
|
|
||||||
|
|
@ -860,7 +860,15 @@ Goal::Co DerivationBuildingGoal::tryToBuild()
|
||||||
{
|
{
|
||||||
builder.reset();
|
builder.reset();
|
||||||
StorePathSet outputPaths;
|
StorePathSet outputPaths;
|
||||||
for (auto & [_, output] : builtOutputs) {
|
/* In the check case we install no store objects, and so
|
||||||
|
`builtOutputs` is empty. However, per issue #14287, there is
|
||||||
|
an expectation that the post-build hook is still executed.
|
||||||
|
(This is useful for e.g. logging successful deterministic rebuilds.)
|
||||||
|
|
||||||
|
In order to make that work, in the check case just load the
|
||||||
|
(preexisting) infos from scratch, rather than relying on what
|
||||||
|
`DerivationBuilder` returned to us. */
|
||||||
|
for (auto & [_, output] : buildMode == bmCheck ? checkPathValidity(initialOutputs).second : builtOutputs) {
|
||||||
// for sake of `bmRepair`
|
// for sake of `bmRepair`
|
||||||
worker.markContentsGood(output.outPath);
|
worker.markContentsGood(output.outPath);
|
||||||
outputPaths.insert(output.outPath);
|
outputPaths.insert(output.outPath);
|
||||||
|
|
|
||||||
|
|
@ -182,7 +182,19 @@ Goal::Co DerivationGoal::haveDerivation()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(success.builtOutputs.count(wantedOutput) > 0);
|
/* If the wanted output is not in builtOutputs (e.g., because it
|
||||||
|
was already valid and therefore not re-registered), we need to
|
||||||
|
add it ourselves to ensure we return the correct information. */
|
||||||
|
if (success.builtOutputs.count(wantedOutput) == 0) {
|
||||||
|
debug(
|
||||||
|
"BUG! wanted output '%s' not in builtOutputs, working around by adding it manually", wantedOutput);
|
||||||
|
auto realisation = assertPathValidity();
|
||||||
|
realisation.id = DrvOutput{
|
||||||
|
.drvHash = outputHash,
|
||||||
|
.outputName = wantedOutput,
|
||||||
|
};
|
||||||
|
success.builtOutputs.emplace(wantedOutput, std::move(realisation));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -121,7 +121,27 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen
|
||||||
* greedily assumed to be the part of the host address. */
|
* greedily assumed to be the part of the host address. */
|
||||||
auto authorityString = schemeAndAuthority->authority;
|
auto authorityString = schemeAndAuthority->authority;
|
||||||
auto userinfo = splitPrefixTo(authorityString, '@');
|
auto userinfo = splitPrefixTo(authorityString, '@');
|
||||||
auto maybeIpv6 = boost::urls::parse_ipv6_address(authorityString);
|
/* Back-compat shim for ZoneId specifiers. Technically this isn't
|
||||||
|
* standard, but the expectation is this works with the old syntax
|
||||||
|
* for ZoneID specifiers. For the full story behind the fiasco that
|
||||||
|
* is ZoneID in URLs look at [^].
|
||||||
|
* [^]: https://datatracker.ietf.org/doc/html/draft-schinazi-httpbis-link-local-uri-bcp-03
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Fish out the internals from inside square brackets. It might be that the pct-sign is unencoded and that's
|
||||||
|
* why we failed to parse it previously. */
|
||||||
|
if (authorityString.starts_with('[') && authorityString.ends_with(']')) {
|
||||||
|
authorityString.remove_prefix(1);
|
||||||
|
authorityString.remove_suffix(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto maybeBeforePct = splitPrefixTo(authorityString, '%');
|
||||||
|
bool hasZoneId = maybeBeforePct.has_value();
|
||||||
|
auto maybeZoneId = hasZoneId ? std::optional{authorityString} : std::nullopt;
|
||||||
|
|
||||||
|
std::string_view maybeIpv6S = maybeBeforePct.value_or(authorityString);
|
||||||
|
auto maybeIpv6 = boost::urls::parse_ipv6_address(maybeIpv6S);
|
||||||
|
|
||||||
if (maybeIpv6) {
|
if (maybeIpv6) {
|
||||||
std::string fixedAuthority;
|
std::string fixedAuthority;
|
||||||
if (userinfo) {
|
if (userinfo) {
|
||||||
|
|
@ -129,7 +149,11 @@ StoreReference StoreReference::parse(const std::string & uri, const StoreReferen
|
||||||
fixedAuthority += '@';
|
fixedAuthority += '@';
|
||||||
}
|
}
|
||||||
fixedAuthority += '[';
|
fixedAuthority += '[';
|
||||||
fixedAuthority += authorityString;
|
fixedAuthority += maybeIpv6S;
|
||||||
|
if (maybeZoneId) {
|
||||||
|
fixedAuthority += "%25"; // pct-encoded percent character
|
||||||
|
fixedAuthority += *maybeZoneId;
|
||||||
|
}
|
||||||
fixedAuthority += ']';
|
fixedAuthority += ']';
|
||||||
return {
|
return {
|
||||||
.variant =
|
.variant =
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,7 @@ boost = dependency(
|
||||||
'url',
|
'url',
|
||||||
],
|
],
|
||||||
include_type : 'system',
|
include_type : 'system',
|
||||||
version : '>=1.82.0',
|
version : '>=1.87.0',
|
||||||
)
|
)
|
||||||
# boost is a public dependency, but not a pkg-config dependency unfortunately, so we
|
# boost is a public dependency, but not a pkg-config dependency unfortunately, so we
|
||||||
# put in `deps_other`.
|
# put in `deps_other`.
|
||||||
|
|
|
||||||
|
|
@ -798,8 +798,6 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
// via substitution, as `nix flake check` only needs to verify buildability,
|
// via substitution, as `nix flake check` only needs to verify buildability,
|
||||||
// not actually produce the outputs.
|
// not actually produce the outputs.
|
||||||
auto missing = store->queryMissing(drvPaths);
|
auto missing = store->queryMissing(drvPaths);
|
||||||
// Only occurs if `drvPaths` contains a `DerivedPath::Opaque`, which should never happen
|
|
||||||
assert(missing.unknown.empty());
|
|
||||||
|
|
||||||
std::vector<DerivedPath> toBuild;
|
std::vector<DerivedPath> toBuild;
|
||||||
for (auto & path : missing.willBuild) {
|
for (auto & path : missing.willBuild) {
|
||||||
|
|
|
||||||
12
tests/functional/build-hook-list-paths.sh
Executable file
12
tests/functional/build-hook-list-paths.sh
Executable file
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -x
|
||||||
|
set -e
|
||||||
|
|
||||||
|
[ -n "$OUT_PATHS" ]
|
||||||
|
[ -n "$DRV_PATH" ]
|
||||||
|
[ -n "$HOOK_DEST" ]
|
||||||
|
|
||||||
|
for o in $OUT_PATHS; do
|
||||||
|
echo "$o" >> "$HOOK_DEST"
|
||||||
|
done
|
||||||
|
|
@ -29,6 +29,18 @@ nix-build -o "$TEST_ROOT"/result dependencies.nix --post-build-hook "$pushToStor
|
||||||
export BUILD_HOOK_ONLY_OUT_PATHS=$([ ! "$NIX_TESTS_CA_BY_DEFAULT" ])
|
export BUILD_HOOK_ONLY_OUT_PATHS=$([ ! "$NIX_TESTS_CA_BY_DEFAULT" ])
|
||||||
nix-build -o "$TEST_ROOT"/result-mult multiple-outputs.nix -A a.first --post-build-hook "$pushToStore"
|
nix-build -o "$TEST_ROOT"/result-mult multiple-outputs.nix -A a.first --post-build-hook "$pushToStore"
|
||||||
|
|
||||||
|
if isDaemonNewer "2.33.0pre20251029"; then
|
||||||
|
# Regression test for issue #14287: `--check` should re-run post build
|
||||||
|
# hook, even though nothing is getting newly registered.
|
||||||
|
export HOOK_DEST=$TEST_ROOT/listing
|
||||||
|
# Needed so the hook will get the above environment variable.
|
||||||
|
restartDaemon
|
||||||
|
nix-build -o "$TEST_ROOT"/result-mult multiple-outputs.nix --check -A a.first --post-build-hook "$PWD/build-hook-list-paths.sh"
|
||||||
|
grepQuiet a-first "$HOOK_DEST"
|
||||||
|
grepQuiet a-second "$HOOK_DEST"
|
||||||
|
unset HOOK_DEST
|
||||||
|
fi
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
|
|
||||||
# Ensure that the remote store contains both the runtime and build-time
|
# Ensure that the remote store contains both the runtime and build-time
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue