mirror of
https://github.com/NixOS/nix.git
synced 2025-11-13 14:02:42 +01:00
Apply clang-format universally.
* It is tough to contribute to a project that doesn't use a formatter, * It is extra hard to contribute to a project which has configured the formatter, but ignores it for some files * Code formatting makes it harder to hide obscure / weird bugs by accident or on purpose, Let's rip the bandaid off? Note that PRs currently in flight should be able to be merged relatively easily by applying `clang-format` to their tip prior to merge.
This commit is contained in:
parent
41bf87ec70
commit
e4f62e4608
587 changed files with 23258 additions and 23135 deletions
|
|
@ -18,7 +18,8 @@ Path trustedListPath()
|
|||
static TrustedList readTrustedList()
|
||||
{
|
||||
auto path = trustedListPath();
|
||||
if (!pathExists(path)) return {};
|
||||
if (!pathExists(path))
|
||||
return {};
|
||||
auto json = nlohmann::json::parse(readFile(path));
|
||||
return json;
|
||||
}
|
||||
|
|
@ -32,7 +33,13 @@ static void writeTrustedList(const TrustedList & trustedList)
|
|||
|
||||
void ConfigFile::apply(const Settings & flakeSettings)
|
||||
{
|
||||
StringSet whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry", "commit-lock-file-summary", "commit-lockfile-summary"};
|
||||
StringSet whitelist{
|
||||
"bash-prompt",
|
||||
"bash-prompt-prefix",
|
||||
"bash-prompt-suffix",
|
||||
"flake-registry",
|
||||
"commit-lock-file-summary",
|
||||
"commit-lockfile-summary"};
|
||||
|
||||
for (auto & [name, value] : settings) {
|
||||
|
||||
|
|
@ -40,11 +47,11 @@ void ConfigFile::apply(const Settings & flakeSettings)
|
|||
|
||||
// FIXME: Move into libutil/config.cc.
|
||||
std::string valueS;
|
||||
if (auto* s = std::get_if<std::string>(&value))
|
||||
if (auto * s = std::get_if<std::string>(&value))
|
||||
valueS = *s;
|
||||
else if (auto* n = std::get_if<int64_t>(&value))
|
||||
else if (auto * n = std::get_if<int64_t>(&value))
|
||||
valueS = fmt("%d", *n);
|
||||
else if (auto* b = std::get_if<Explicit<bool>>(&value))
|
||||
else if (auto * b = std::get_if<Explicit<bool>>(&value))
|
||||
valueS = b->t ? "true" : "false";
|
||||
else if (auto ss = std::get_if<std::vector<std::string>>(&value))
|
||||
valueS = dropEmptyInitThenConcatStringsSep(" ", *ss); // FIXME: evil
|
||||
|
|
@ -57,19 +64,35 @@ void ConfigFile::apply(const Settings & flakeSettings)
|
|||
auto tlname = get(trustedList, name);
|
||||
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
|
||||
trusted = *saved;
|
||||
printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
|
||||
printInfo(
|
||||
"Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
|
||||
} else {
|
||||
// FIXME: filter ANSI escapes, newlines, \r, etc.
|
||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
|
||||
if (std::tolower(logger
|
||||
->ask(
|
||||
fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED
|
||||
"%s" ANSI_NORMAL "' (y/N)?",
|
||||
name,
|
||||
valueS))
|
||||
.value_or('n'))
|
||||
== 'y') {
|
||||
trusted = true;
|
||||
}
|
||||
if (std::tolower(logger->ask(fmt("do you want to permanently mark this value as %s (y/N)?", trusted ? "trusted": "untrusted" )).value_or('n')) == 'y') {
|
||||
if (std::tolower(logger
|
||||
->ask(
|
||||
fmt("do you want to permanently mark this value as %s (y/N)?",
|
||||
trusted ? "trusted" : "untrusted"))
|
||||
.value_or('n'))
|
||||
== 'y') {
|
||||
trustedList[name][valueS] = trusted;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
}
|
||||
if (!trusted) {
|
||||
warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config");
|
||||
warn(
|
||||
"ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it",
|
||||
name,
|
||||
"--accept-flake-config");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
|
@ -78,4 +101,4 @@ void ConfigFile::apply(const Settings & flakeSettings)
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace nix::flake
|
||||
|
|
|
|||
|
|
@ -25,10 +25,7 @@ using namespace flake;
|
|||
namespace flake {
|
||||
|
||||
static StorePath copyInputToStore(
|
||||
EvalState & state,
|
||||
fetchers::Input & input,
|
||||
const fetchers::Input & originalInput,
|
||||
ref<SourceAccessor> accessor)
|
||||
EvalState & state, fetchers::Input & input, const fetchers::Input & originalInput, ref<SourceAccessor> accessor)
|
||||
{
|
||||
auto storePath = fetchToStore(*input.settings, *state.store, accessor, FetchMode::Copy, input.getName());
|
||||
|
||||
|
|
@ -48,13 +45,11 @@ static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos
|
|||
state.forceValue(value, pos);
|
||||
}
|
||||
|
||||
static void expectType(EvalState & state, ValueType type,
|
||||
Value & value, const PosIdx pos)
|
||||
static void expectType(EvalState & state, ValueType type, Value & value, const PosIdx pos)
|
||||
{
|
||||
forceTrivialValue(state, value, pos);
|
||||
if (value.type() != type)
|
||||
throw Error("expected %s but got %s at %s",
|
||||
showType(type), showType(value.type()), state.positions[pos]);
|
||||
throw Error("expected %s but got %s at %s", showType(type), showType(value.type()), state.positions[pos]);
|
||||
}
|
||||
|
||||
static std::pair<std::map<FlakeId, FlakeInput>, fetchers::Attrs> parseFlakeInputs(
|
||||
|
|
@ -65,38 +60,43 @@ static std::pair<std::map<FlakeId, FlakeInput>, fetchers::Attrs> parseFlakeInput
|
|||
const SourcePath & flakeDir,
|
||||
bool allowSelf);
|
||||
|
||||
static void parseFlakeInputAttr(
|
||||
EvalState & state,
|
||||
const Attr & attr,
|
||||
fetchers::Attrs & attrs)
|
||||
static void parseFlakeInputAttr(EvalState & state, const Attr & attr, fetchers::Attrs & attrs)
|
||||
{
|
||||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
switch (attr.value->type()) {
|
||||
case nString:
|
||||
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
|
||||
break;
|
||||
case nBool:
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean() });
|
||||
break;
|
||||
case nInt: {
|
||||
auto intValue = attr.value->integer().value;
|
||||
if (intValue < 0)
|
||||
state.error<EvalError>("negative value given for flake input attribute %1%: %2%", state.symbols[attr.name], intValue).debugThrow();
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
if (attr.name == state.symbols.create("publicKeys")) {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
NixStringContext emptyContext = {};
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, attr.pos, emptyContext).dump());
|
||||
} else
|
||||
state.error<TypeError>("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||
case nString:
|
||||
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
|
||||
break;
|
||||
case nBool:
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean()});
|
||||
break;
|
||||
case nInt: {
|
||||
auto intValue = attr.value->integer().value;
|
||||
if (intValue < 0)
|
||||
state
|
||||
.error<EvalError>(
|
||||
"negative value given for flake input attribute %1%: %2%", state.symbols[attr.name], intValue)
|
||||
.debugThrow();
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
break;
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
default:
|
||||
if (attr.name == state.symbols.create("publicKeys")) {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
NixStringContext emptyContext = {};
|
||||
attrs.emplace(
|
||||
state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, attr.pos, emptyContext).dump());
|
||||
} else
|
||||
state
|
||||
.error<TypeError>(
|
||||
"flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name],
|
||||
showType(*attr.value))
|
||||
.debugThrow();
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
|
||||
static FlakeInput parseFlakeInput(
|
||||
|
|
@ -127,19 +127,24 @@ static FlakeInput parseFlakeInput(
|
|||
else if (attr.value->type() == nPath) {
|
||||
auto path = attr.value->path();
|
||||
if (path.accessor != flakeDir.accessor)
|
||||
throw Error("input attribute path '%s' at %s must be in the same source tree as %s",
|
||||
path, state.positions[attr.pos], flakeDir);
|
||||
throw Error(
|
||||
"input attribute path '%s' at %s must be in the same source tree as %s",
|
||||
path,
|
||||
state.positions[attr.pos],
|
||||
flakeDir);
|
||||
url = "path:" + flakeDir.path.makeRelative(path.path);
|
||||
}
|
||||
else
|
||||
throw Error("expected a string or a path but got %s at %s",
|
||||
showType(attr.value->type()), state.positions[attr.pos]);
|
||||
} else
|
||||
throw Error(
|
||||
"expected a string or a path but got %s at %s",
|
||||
showType(attr.value->type()),
|
||||
state.positions[attr.pos]);
|
||||
attrs.emplace("url", *url);
|
||||
} else if (attr.name == sFlake) {
|
||||
expectType(state, nBool, *attr.value, attr.pos);
|
||||
input.isFlake = attr.value->boolean();
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first;
|
||||
input.overrides =
|
||||
parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first;
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
auto follows(parseInputAttrPath(attr.value->c_str()));
|
||||
|
|
@ -149,8 +154,7 @@ static FlakeInput parseFlakeInput(
|
|||
parseFlakeInputAttr(state, attr, attrs);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(
|
||||
state.positions[attr.pos],
|
||||
HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
state.positions[attr.pos], HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
|
@ -198,12 +202,8 @@ static std::pair<std::map<FlakeId, FlakeInput>, fetchers::Attrs> parseFlakeInput
|
|||
for (auto & attr : *inputAttr.value->attrs())
|
||||
parseFlakeInputAttr(state, attr, selfAttrs);
|
||||
} else {
|
||||
inputs.emplace(inputName,
|
||||
parseFlakeInput(state,
|
||||
inputAttr.value,
|
||||
inputAttr.pos,
|
||||
lockRootAttrPath,
|
||||
flakeDir));
|
||||
inputs.emplace(
|
||||
inputName, parseFlakeInput(state, inputAttr.value, inputAttr.pos, lockRootAttrPath, flakeDir));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -225,7 +225,7 @@ static Flake readFlake(
|
|||
Value vInfo;
|
||||
state.evalFile(flakePath, vInfo, true);
|
||||
|
||||
Flake flake {
|
||||
Flake flake{
|
||||
.originalRef = originalRef,
|
||||
.resolvedRef = resolvedRef,
|
||||
.lockedRef = lockedRef,
|
||||
|
|
@ -240,7 +240,8 @@ static Flake readFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs()->get(sInputs)) {
|
||||
auto [flakeInputs, selfAttrs] = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir, true);
|
||||
auto [flakeInputs, selfAttrs] =
|
||||
parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir, true);
|
||||
flake.inputs = std::move(flakeInputs);
|
||||
flake.selfAttrs = std::move(selfAttrs);
|
||||
}
|
||||
|
|
@ -253,9 +254,9 @@ static Flake readFlake(
|
|||
if (outputs->value->isLambda() && outputs->value->lambda().fun->hasFormals()) {
|
||||
for (auto & formal : outputs->value->lambda().fun->formals->formals) {
|
||||
if (formal.name != state.sSelf)
|
||||
flake.inputs.emplace(state.symbols[formal.name], FlakeInput {
|
||||
.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))
|
||||
});
|
||||
flake.inputs.emplace(
|
||||
state.symbols[formal.name],
|
||||
FlakeInput{.ref = parseFlakeRef(state.fetchSettings, std::string(state.symbols[formal.name]))});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -271,53 +272,51 @@ static Flake readFlake(
|
|||
forceTrivialValue(state, *setting.value, setting.pos);
|
||||
if (setting.value->type() == nString)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
|
||||
state.symbols[setting.name], std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
|
||||
else if (setting.value->type() == nPath) {
|
||||
auto storePath = fetchToStore(state.fetchSettings, *state.store, setting.value->path(), FetchMode::Copy);
|
||||
auto storePath =
|
||||
fetchToStore(state.fetchSettings, *state.store, setting.value->path(), FetchMode::Copy);
|
||||
flake.config.settings.emplace(state.symbols[setting.name], state.store->printStorePath(storePath));
|
||||
} else if (setting.value->type() == nInt)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.store->printStorePath(storePath));
|
||||
}
|
||||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.forceInt(*setting.value, setting.pos, "").value);
|
||||
state.symbols[setting.name], state.forceInt(*setting.value, setting.pos, "").value);
|
||||
else if (setting.value->type() == nBool)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
Explicit<bool> { state.forceBool(*setting.value, setting.pos, "") });
|
||||
state.symbols[setting.name], Explicit<bool>{state.forceBool(*setting.value, setting.pos, "")});
|
||||
else if (setting.value->type() == nList) {
|
||||
std::vector<std::string> ss;
|
||||
for (auto elem : setting.value->listView()) {
|
||||
if (elem->type() != nString)
|
||||
state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
state
|
||||
.error<TypeError>(
|
||||
"list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name],
|
||||
showType(*setting.value))
|
||||
.debugThrow();
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
||||
}
|
||||
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
||||
}
|
||||
else
|
||||
state.error<TypeError>("flake configuration setting '%s' is %s",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
} else
|
||||
state
|
||||
.error<TypeError>(
|
||||
"flake configuration setting '%s' is %s", state.symbols[setting.name], showType(*setting.value))
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
for (auto & attr : *vInfo.attrs()) {
|
||||
if (attr.name != state.sDescription &&
|
||||
attr.name != sInputs &&
|
||||
attr.name != sOutputs &&
|
||||
attr.name != sNixConfig)
|
||||
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
|
||||
resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
|
||||
if (attr.name != state.sDescription && attr.name != sInputs && attr.name != sOutputs && attr.name != sNixConfig)
|
||||
throw Error(
|
||||
"flake '%s' has an unsupported attribute '%s', at %s",
|
||||
resolvedRef,
|
||||
state.symbols[attr.name],
|
||||
state.positions[attr.pos]);
|
||||
}
|
||||
|
||||
return flake;
|
||||
}
|
||||
|
||||
static FlakeRef applySelfAttrs(
|
||||
const FlakeRef & ref,
|
||||
const Flake & flake)
|
||||
static FlakeRef applySelfAttrs(const FlakeRef & ref, const Flake & flake)
|
||||
{
|
||||
auto newRef(ref);
|
||||
|
||||
|
|
@ -371,22 +370,16 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, fetchers::UseReg
|
|||
return getFlake(state, originalRef, useRegistries, {});
|
||||
}
|
||||
|
||||
static LockFile readLockFile(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const SourcePath & lockFilePath)
|
||||
static LockFile readLockFile(const fetchers::Settings & fetchSettings, const SourcePath & lockFilePath)
|
||||
{
|
||||
return lockFilePath.pathExists()
|
||||
? LockFile(fetchSettings, lockFilePath.readFile(), fmt("%s", lockFilePath))
|
||||
: LockFile();
|
||||
return lockFilePath.pathExists() ? LockFile(fetchSettings, lockFilePath.readFile(), fmt("%s", lockFilePath))
|
||||
: LockFile();
|
||||
}
|
||||
|
||||
/* Compute an in-memory lock file for the specified top-level flake,
|
||||
and optionally write it to file, if the flake is writable. */
|
||||
LockedFlake lockFlake(
|
||||
const Settings & settings,
|
||||
EvalState & state,
|
||||
const FlakeRef & topRef,
|
||||
const LockFlags & lockFlags)
|
||||
LockedFlake
|
||||
lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, const LockFlags & lockFlags)
|
||||
{
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
|
||||
|
|
@ -394,11 +387,7 @@ LockedFlake lockFlake(
|
|||
auto useRegistriesTop = useRegistries ? fetchers::UseRegistries::All : fetchers::UseRegistries::No;
|
||||
auto useRegistriesInputs = useRegistries ? fetchers::UseRegistries::Limited : fetchers::UseRegistries::No;
|
||||
|
||||
auto flake = getFlake(
|
||||
state,
|
||||
topRef,
|
||||
useRegistriesTop,
|
||||
{});
|
||||
auto flake = getFlake(state, topRef, useRegistriesTop, {});
|
||||
|
||||
if (lockFlags.applyNixConfig) {
|
||||
flake.config.apply(settings);
|
||||
|
|
@ -410,10 +399,8 @@ LockedFlake lockFlake(
|
|||
throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
|
||||
}
|
||||
|
||||
auto oldLockFile = readLockFile(
|
||||
state.fetchSettings,
|
||||
lockFlags.referenceLockFilePath.value_or(
|
||||
flake.lockFilePath()));
|
||||
auto oldLockFile =
|
||||
readLockFile(state.fetchSettings, lockFlags.referenceLockFilePath.value_or(flake.lockFilePath()));
|
||||
|
||||
debug("old lock file: %s", oldLockFile);
|
||||
|
||||
|
|
@ -432,8 +419,8 @@ LockedFlake lockFlake(
|
|||
for (auto & i : lockFlags.inputOverrides) {
|
||||
overrides.emplace(
|
||||
i.first,
|
||||
OverrideTarget {
|
||||
.input = FlakeInput { .ref = i.second },
|
||||
OverrideTarget{
|
||||
.input = FlakeInput{.ref = i.second},
|
||||
/* Note: any relative overrides
|
||||
(e.g. `--override-input B/C "path:./foo/bar"`)
|
||||
are interpreted relative to the top-level
|
||||
|
|
@ -458,42 +445,40 @@ LockedFlake lockFlake(
|
|||
computeLocks;
|
||||
|
||||
computeLocks = [&](
|
||||
/* The inputs of this node, either from flake.nix or
|
||||
flake.lock. */
|
||||
const FlakeInputs & flakeInputs,
|
||||
/* The node whose locks are to be updated.*/
|
||||
ref<Node> node,
|
||||
/* The path to this node in the lock file graph. */
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
/* The old node, if any, from which locks can be
|
||||
copied. */
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
/* The prefix relative to which 'follows' should be
|
||||
interpreted. When a node is initially locked, it's
|
||||
relative to the node's flake; when it's already locked,
|
||||
it's relative to the root of the lock file. */
|
||||
const InputAttrPath & followsPrefix,
|
||||
/* The source path of this node's flake. */
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)
|
||||
{
|
||||
/* The inputs of this node, either from flake.nix or
|
||||
flake.lock. */
|
||||
const FlakeInputs & flakeInputs,
|
||||
/* The node whose locks are to be updated.*/
|
||||
ref<Node> node,
|
||||
/* The path to this node in the lock file graph. */
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
/* The old node, if any, from which locks can be
|
||||
copied. */
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
/* The prefix relative to which 'follows' should be
|
||||
interpreted. When a node is initially locked, it's
|
||||
relative to the node's flake; when it's already locked,
|
||||
it's relative to the root of the lock file. */
|
||||
const InputAttrPath & followsPrefix,
|
||||
/* The source path of this node's flake. */
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock) {
|
||||
debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix));
|
||||
|
||||
/* Get the overrides (i.e. attributes of the form
|
||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||
std::function<void(const FlakeInput & input, const InputAttrPath & prefix)> addOverrides;
|
||||
addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix)
|
||||
{
|
||||
addOverrides = [&](const FlakeInput & input, const InputAttrPath & prefix) {
|
||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||
auto inputAttrPath(prefix);
|
||||
inputAttrPath.push_back(idOverride);
|
||||
if (inputOverride.ref || inputOverride.follows)
|
||||
overrides.emplace(inputAttrPath,
|
||||
OverrideTarget {
|
||||
overrides.emplace(
|
||||
inputAttrPath,
|
||||
OverrideTarget{
|
||||
.input = inputOverride,
|
||||
.sourcePath = sourcePath,
|
||||
.parentInputAttrPath = inputAttrPathPrefix
|
||||
});
|
||||
.parentInputAttrPath = inputAttrPathPrefix});
|
||||
addOverrides(inputOverride, inputAttrPath);
|
||||
}
|
||||
};
|
||||
|
|
@ -513,7 +498,8 @@ LockedFlake lockFlake(
|
|||
if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow))
|
||||
warn(
|
||||
"input '%s' has an override for a non-existent input '%s'",
|
||||
printInputAttrPath(inputAttrPathPrefix), follow);
|
||||
printInputAttrPath(inputAttrPathPrefix),
|
||||
follow);
|
||||
}
|
||||
|
||||
/* Go over the flake inputs, resolve/fetch them if
|
||||
|
|
@ -558,36 +544,31 @@ LockedFlake lockFlake(
|
|||
}
|
||||
|
||||
if (!input.ref)
|
||||
input.ref = FlakeRef::fromAttrs(state.fetchSettings, {{"type", "indirect"}, {"id", std::string(id)}});
|
||||
input.ref =
|
||||
FlakeRef::fromAttrs(state.fetchSettings, {{"type", "indirect"}, {"id", std::string(id)}});
|
||||
|
||||
auto overriddenParentPath =
|
||||
input.ref->input.isRelative()
|
||||
? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
|
||||
: std::nullopt;
|
||||
? std::optional<InputAttrPath>(
|
||||
hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
|
||||
: std::nullopt;
|
||||
|
||||
auto resolveRelativePath = [&]() -> std::optional<SourcePath>
|
||||
{
|
||||
auto resolveRelativePath = [&]() -> std::optional<SourcePath> {
|
||||
if (auto relativePath = input.ref->input.isRelative()) {
|
||||
return SourcePath {
|
||||
return SourcePath{
|
||||
overriddenSourcePath.accessor,
|
||||
CanonPath(*relativePath, overriddenSourcePath.path.parent().value())
|
||||
};
|
||||
CanonPath(*relativePath, overriddenSourcePath.path.parent().value())};
|
||||
} else
|
||||
return std::nullopt;
|
||||
};
|
||||
|
||||
/* Get the input flake, resolve 'path:./...'
|
||||
flakerefs relative to the parent flake. */
|
||||
auto getInputFlake = [&](const FlakeRef & ref, const fetchers::UseRegistries useRegistries)
|
||||
{
|
||||
auto getInputFlake = [&](const FlakeRef & ref, const fetchers::UseRegistries useRegistries) {
|
||||
if (auto resolvedPath = resolveRelativePath()) {
|
||||
return readFlake(state, ref, ref, ref, *resolvedPath, inputAttrPath);
|
||||
} else {
|
||||
return getFlake(
|
||||
state,
|
||||
ref,
|
||||
useRegistries,
|
||||
inputAttrPath);
|
||||
return getFlake(state, ref, useRegistries, inputAttrPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -602,21 +583,15 @@ LockedFlake lockFlake(
|
|||
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||
oldLock = *oldLock3;
|
||||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef.canonicalize() == input.ref->canonicalize()
|
||||
&& oldLock->parentInputAttrPath == overriddenParentPath
|
||||
&& !hasCliOverride)
|
||||
{
|
||||
if (oldLock && oldLock->originalRef.canonicalize() == input.ref->canonicalize()
|
||||
&& oldLock->parentInputAttrPath == overriddenParentPath && !hasCliOverride) {
|
||||
debug("keeping existing input '%s'", inputAttrPathS);
|
||||
|
||||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
higher level flake. */
|
||||
auto childNode = make_ref<LockedNode>(
|
||||
oldLock->lockedRef,
|
||||
oldLock->originalRef,
|
||||
oldLock->isFlake,
|
||||
oldLock->parentInputAttrPath);
|
||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake, oldLock->parentInputAttrPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
|
|
@ -624,10 +599,8 @@ LockedFlake lockFlake(
|
|||
must fetch the flake to update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath);
|
||||
|
||||
auto mustRefetch =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputAttrPath.size()
|
||||
&& std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
|
||||
auto mustRefetch = lb != lockFlags.inputUpdates.end() && lb->size() > inputAttrPath.size()
|
||||
&& std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
|
||||
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
|
|
@ -638,14 +611,17 @@ LockedFlake lockFlake(
|
|||
those. */
|
||||
for (auto & i : oldLock->inputs) {
|
||||
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.ref = (*lockedNode)->originalRef,
|
||||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
fakeInputs.emplace(
|
||||
i.first,
|
||||
FlakeInput{
|
||||
.ref = (*lockedNode)->originalRef,
|
||||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
if (!trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||
// so we must confirm all the follows that are in the lock file are also in the
|
||||
// flake.
|
||||
auto overridePath(inputAttrPath);
|
||||
overridePath.push_back(i.first);
|
||||
auto o = overrides.find(overridePath);
|
||||
|
|
@ -660,9 +636,11 @@ LockedFlake lockFlake(
|
|||
}
|
||||
auto absoluteFollows(followsPrefix);
|
||||
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = absoluteFollows,
|
||||
});
|
||||
fakeInputs.emplace(
|
||||
i.first,
|
||||
FlakeInput{
|
||||
.follows = absoluteFollows,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -670,10 +648,17 @@ LockedFlake lockFlake(
|
|||
if (mustRefetch) {
|
||||
auto inputFlake = getInputFlake(oldLock->lockedRef, useRegistriesInputs);
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix,
|
||||
inputFlake.path, false);
|
||||
computeLocks(
|
||||
inputFlake.inputs,
|
||||
childNode,
|
||||
inputAttrPath,
|
||||
oldLock,
|
||||
followsPrefix,
|
||||
inputFlake.path,
|
||||
false);
|
||||
} else {
|
||||
computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
|
||||
computeLocks(
|
||||
fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
|
@ -681,9 +666,7 @@ LockedFlake lockFlake(
|
|||
this input. */
|
||||
debug("creating new input '%s'", inputAttrPathS);
|
||||
|
||||
if (!lockFlags.allowUnlocked
|
||||
&& !input.ref->input.isLocked()
|
||||
&& !input.ref->input.isRelative())
|
||||
if (!lockFlags.allowUnlocked && !input.ref->input.isLocked() && !input.ref->input.isRelative())
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
|
|
@ -697,13 +680,11 @@ LockedFlake lockFlake(
|
|||
auto ref = (input2.ref && inputIsOverride) ? *input2.ref : *input.ref;
|
||||
|
||||
if (input.isFlake) {
|
||||
auto inputFlake = getInputFlake(*input.ref, inputIsOverride ? fetchers::UseRegistries::All : useRegistriesInputs);
|
||||
auto inputFlake = getInputFlake(
|
||||
*input.ref, inputIsOverride ? fetchers::UseRegistries::All : useRegistriesInputs);
|
||||
|
||||
auto childNode = make_ref<LockedNode>(
|
||||
inputFlake.lockedRef,
|
||||
ref,
|
||||
true,
|
||||
overriddenParentPath);
|
||||
auto childNode =
|
||||
make_ref<LockedNode>(inputFlake.lockedRef, ref, true, overriddenParentPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
|
|
@ -718,7 +699,9 @@ LockedFlake lockFlake(
|
|||
flake, using its own lock file. */
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputAttrPath,
|
||||
inputFlake.inputs,
|
||||
childNode,
|
||||
inputAttrPath,
|
||||
readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
|
||||
inputAttrPath,
|
||||
inputFlake.path,
|
||||
|
|
@ -726,21 +709,19 @@ LockedFlake lockFlake(
|
|||
}
|
||||
|
||||
else {
|
||||
auto [path, lockedRef] = [&]() -> std::tuple<SourcePath, FlakeRef>
|
||||
{
|
||||
auto [path, lockedRef] = [&]() -> std::tuple<SourcePath, FlakeRef> {
|
||||
// Handle non-flake 'path:./...' inputs.
|
||||
if (auto resolvedPath = resolveRelativePath()) {
|
||||
return {*resolvedPath, *input.ref};
|
||||
} else {
|
||||
auto cachedInput = state.inputCache->getAccessor(
|
||||
state.store,
|
||||
input.ref->input,
|
||||
useRegistriesInputs);
|
||||
state.store, input.ref->input, useRegistriesInputs);
|
||||
|
||||
auto lockedRef = FlakeRef(std::move(cachedInput.lockedInput), input.ref->subdir);
|
||||
|
||||
// FIXME: allow input to be lazy.
|
||||
auto storePath = copyInputToStore(state, lockedRef.input, input.ref->input, cachedInput.accessor);
|
||||
auto storePath = copyInputToStore(
|
||||
state, lockedRef.input, input.ref->input, cachedInput.accessor);
|
||||
|
||||
return {state.storePath(storePath), lockedRef};
|
||||
}
|
||||
|
|
@ -774,8 +755,10 @@ LockedFlake lockFlake(
|
|||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
warn("the flag '--override-input %s %s' does not match any input",
|
||||
printInputAttrPath(i.first), i.second);
|
||||
warn(
|
||||
"the flag '--override-input %s %s' does not match any input",
|
||||
printInputAttrPath(i.first),
|
||||
i.second);
|
||||
|
||||
for (auto & i : lockFlags.inputUpdates)
|
||||
if (!updatesUsed.count(i))
|
||||
|
|
@ -799,12 +782,19 @@ LockedFlake lockFlake(
|
|||
if (lockFlags.failOnUnlocked)
|
||||
throw Error(
|
||||
"Not writing lock file of flake '%s' because it has an unlocked input ('%s'). "
|
||||
"Use '--allow-dirty-locks' to allow this anyway.", topRef, *unlockedInput);
|
||||
"Use '--allow-dirty-locks' to allow this anyway.",
|
||||
topRef,
|
||||
*unlockedInput);
|
||||
if (state.fetchSettings.warnDirty)
|
||||
warn("not writing lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||
warn(
|
||||
"not writing lock file of flake '%s' because it has an unlocked input ('%s')",
|
||||
topRef,
|
||||
*unlockedInput);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
throw Error(
|
||||
"flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'",
|
||||
topRef);
|
||||
|
||||
auto newLockFileS = fmt("%s\n", newLockFile);
|
||||
|
||||
|
|
@ -845,36 +835,31 @@ LockedFlake lockFlake(
|
|||
|
||||
topRef.input.putFile(
|
||||
CanonPath((topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock"),
|
||||
newLockFileS, commitMessage);
|
||||
newLockFileS,
|
||||
commitMessage);
|
||||
}
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
also just clear the 'rev' field... */
|
||||
auto prevLockedRef = flake.lockedRef;
|
||||
flake = getFlake(
|
||||
state,
|
||||
topRef,
|
||||
useRegistriesTop);
|
||||
flake = getFlake(state, topRef, useRegistriesTop);
|
||||
|
||||
if (lockFlags.commitLockFile &&
|
||||
flake.lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
if (lockFlags.commitLockFile && flake.lockedRef.input.getRev()
|
||||
&& prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||
}
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
throw Error(
|
||||
"cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
} else {
|
||||
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||
flake.forceDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
return LockedFlake {
|
||||
.flake = std::move(flake),
|
||||
.lockFile = std::move(newLockFile),
|
||||
.nodePaths = std::move(nodePaths)
|
||||
};
|
||||
return LockedFlake{
|
||||
.flake = std::move(flake), .lockFile = std::move(newLockFile), .nodePaths = std::move(nodePaths)};
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
|
||||
|
|
@ -882,28 +867,28 @@ LockedFlake lockFlake(
|
|||
}
|
||||
}
|
||||
|
||||
static ref<SourceAccessor> makeInternalFS() {
|
||||
auto internalFS = make_ref<MemorySourceAccessor>(MemorySourceAccessor {});
|
||||
static ref<SourceAccessor> makeInternalFS()
|
||||
{
|
||||
auto internalFS = make_ref<MemorySourceAccessor>(MemorySourceAccessor{});
|
||||
internalFS->setPathDisplay("«flakes-internal»", "");
|
||||
internalFS->addFile(
|
||||
CanonPath("call-flake.nix"),
|
||||
#include "call-flake.nix.gen.hh"
|
||||
#include "call-flake.nix.gen.hh"
|
||||
);
|
||||
return internalFS;
|
||||
}
|
||||
|
||||
static auto internalFS = makeInternalFS();
|
||||
|
||||
static Value * requireInternalFile(EvalState & state, CanonPath path) {
|
||||
SourcePath p {internalFS, path};
|
||||
static Value * requireInternalFile(EvalState & state, CanonPath path)
|
||||
{
|
||||
SourcePath p{internalFS, path};
|
||||
auto v = state.allocValue();
|
||||
state.evalFile(p, *v); // has caching
|
||||
return v;
|
||||
}
|
||||
|
||||
void callFlake(EvalState & state,
|
||||
const LockedFlake & lockedFlake,
|
||||
Value & vRes)
|
||||
void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes)
|
||||
{
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
|
||||
|
|
@ -931,9 +916,7 @@ void callFlake(EvalState & state,
|
|||
auto key = keyMap.find(node);
|
||||
assert(key != keyMap.end());
|
||||
|
||||
override
|
||||
.alloc(state.symbols.create("dir"))
|
||||
.mkString(CanonPath(subdir).rel());
|
||||
override.alloc(state.symbols.create("dir")).mkString(CanonPath(subdir).rel());
|
||||
|
||||
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
|
||||
}
|
||||
|
|
@ -952,16 +935,16 @@ void callFlake(EvalState & state,
|
|||
state.callFunction(*vCallFlake, args, vRes, noPos);
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace flake
|
||||
|
||||
std::optional<Fingerprint> LockedFlake::getFingerprint(
|
||||
ref<Store> store,
|
||||
const fetchers::Settings & fetchSettings) const
|
||||
std::optional<Fingerprint> LockedFlake::getFingerprint(ref<Store> store, const fetchers::Settings & fetchSettings) const
|
||||
{
|
||||
if (lockFile.isUnlocked(fetchSettings)) return std::nullopt;
|
||||
if (lockFile.isUnlocked(fetchSettings))
|
||||
return std::nullopt;
|
||||
|
||||
auto fingerprint = flake.lockedRef.input.getFingerprint(store);
|
||||
if (!fingerprint) return std::nullopt;
|
||||
if (!fingerprint)
|
||||
return std::nullopt;
|
||||
|
||||
*fingerprint += fmt(";%s;%s", flake.lockedRef.subdir, lockFile);
|
||||
|
||||
|
|
@ -979,6 +962,6 @@ std::optional<Fingerprint> LockedFlake::getFingerprint(
|
|||
return hashString(HashAlgorithm::SHA256, *fingerprint);
|
||||
}
|
||||
|
||||
Flake::~Flake() { }
|
||||
Flake::~Flake() {}
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -29,15 +29,13 @@ fetchers::Attrs FlakeRef::toAttrs() const
|
|||
return attrs;
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
||||
std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef)
|
||||
{
|
||||
str << flakeRef.to_string();
|
||||
return str;
|
||||
}
|
||||
|
||||
FlakeRef FlakeRef::resolve(
|
||||
ref<Store> store,
|
||||
fetchers::UseRegistries useRegistries) const
|
||||
FlakeRef FlakeRef::resolve(ref<Store> store, fetchers::UseRegistries useRegistries) const
|
||||
{
|
||||
auto [input2, extraAttrs] = lookupInRegistries(store, input, useRegistries);
|
||||
return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir));
|
||||
|
|
@ -51,16 +49,15 @@ FlakeRef parseFlakeRef(
|
|||
bool isFlake,
|
||||
bool preserveRelativePaths)
|
||||
{
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths);
|
||||
auto [flakeRef, fragment] =
|
||||
parseFlakeRefWithFragment(fetchSettings, url, baseDir, allowMissing, isFlake, preserveRelativePaths);
|
||||
if (fragment != "")
|
||||
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
||||
return flakeRef;
|
||||
}
|
||||
|
||||
static std::pair<FlakeRef, std::string> fromParsedURL(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
ParsedURL && parsedURL,
|
||||
bool isFlake)
|
||||
static std::pair<FlakeRef, std::string>
|
||||
fromParsedURL(const fetchers::Settings & fetchSettings, ParsedURL && parsedURL, bool isFlake)
|
||||
{
|
||||
auto dir = getOr(parsedURL.query, "dir", "");
|
||||
parsedURL.query.erase("dir");
|
||||
|
|
@ -79,9 +76,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
|||
bool isFlake,
|
||||
bool preserveRelativePaths)
|
||||
{
|
||||
static std::regex pathFlakeRegex(
|
||||
R"(([^?#]*)(\?([^#]*))?(#(.*))?)",
|
||||
std::regex::ECMAScript);
|
||||
static std::regex pathFlakeRegex(R"(([^?#]*)(\?([^#]*))?(#(.*))?)", std::regex::ECMAScript);
|
||||
|
||||
std::smatch match;
|
||||
auto succeeds = std::regex_match(url, match, pathFlakeRegex);
|
||||
|
|
@ -104,16 +99,17 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
|||
// Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar`
|
||||
warn(
|
||||
"Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. "
|
||||
"Pretending that you meant '%s'"
|
||||
, path, dirOf(path));
|
||||
"Pretending that you meant '%s'",
|
||||
path,
|
||||
dirOf(path));
|
||||
path = dirOf(path);
|
||||
} else {
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
}
|
||||
}
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")) {
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up", path);
|
||||
|
||||
// Save device to detect filesystem boundary
|
||||
dev_t device = lstat(path).st_dev;
|
||||
|
|
@ -123,7 +119,9 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
|||
found = true;
|
||||
break;
|
||||
} else if (pathExists(path + "/.git"))
|
||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||
throw Error(
|
||||
"path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)",
|
||||
path);
|
||||
else {
|
||||
if (lstat(path).st_dev != device)
|
||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||
|
|
@ -172,29 +170,23 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
|||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||
}
|
||||
|
||||
return fromParsedURL(fetchSettings, {
|
||||
.scheme = "path",
|
||||
.authority = "",
|
||||
.path = path,
|
||||
.query = query,
|
||||
.fragment = fragment
|
||||
}, isFlake);
|
||||
return fromParsedURL(
|
||||
fetchSettings,
|
||||
{.scheme = "path", .authority = "", .path = path, .query = query, .fragment = fragment},
|
||||
isFlake);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if `url` is a flake ID. This is an abbreviated syntax for
|
||||
* `flake:<flake-id>?ref=<ref>&rev=<rev>`.
|
||||
*/
|
||||
static std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const std::string & url,
|
||||
bool isFlake)
|
||||
static std::optional<std::pair<FlakeRef, std::string>>
|
||||
parseFlakeIdRef(const fetchers::Settings & fetchSettings, const std::string & url, bool isFlake)
|
||||
{
|
||||
std::smatch match;
|
||||
|
||||
static std::regex flakeRegex(
|
||||
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||
+ "(?:#(" + fragmentRegex + "))?",
|
||||
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" + "(?:#(" + fragmentRegex + "))?",
|
||||
std::regex::ECMAScript);
|
||||
|
||||
if (std::regex_match(url, match, flakeRegex)) {
|
||||
|
|
@ -205,8 +197,7 @@ static std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
|
|||
};
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake), ""),
|
||||
percentDecode(match.str(6)));
|
||||
FlakeRef(fetchers::Input::fromURL(fetchSettings, parsedURL, isFlake), ""), percentDecode(match.str(6)));
|
||||
}
|
||||
|
||||
return {};
|
||||
|
|
@ -220,9 +211,7 @@ std::optional<std::pair<FlakeRef, std::string>> parseURLFlakeRef(
|
|||
{
|
||||
try {
|
||||
auto parsed = parseURL(url);
|
||||
if (baseDir
|
||||
&& (parsed.scheme == "path" || parsed.scheme == "git+file")
|
||||
&& !isAbsolute(parsed.path))
|
||||
if (baseDir && (parsed.scheme == "path" || parsed.scheme == "git+file") && !isAbsolute(parsed.path))
|
||||
parsed.path = absPath(parsed.path, *baseDir);
|
||||
return fromParsedURL(fetchSettings, std::move(parsed), isFlake);
|
||||
} catch (BadURL &) {
|
||||
|
|
@ -249,9 +238,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
}
|
||||
}
|
||||
|
||||
FlakeRef FlakeRef::fromAttrs(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const fetchers::Attrs & attrs)
|
||||
FlakeRef FlakeRef::fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs)
|
||||
{
|
||||
auto attrs2(attrs);
|
||||
attrs2.erase("dir");
|
||||
|
|
@ -323,12 +310,11 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
|||
bool isFlake)
|
||||
{
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(
|
||||
fetchSettings,
|
||||
std::string { prefix }, baseDir, allowMissing, isFlake);
|
||||
auto [flakeRef, fragment] =
|
||||
parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, baseDir, allowMissing, isFlake);
|
||||
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
|
||||
}
|
||||
|
||||
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -13,4 +13,4 @@ nix::PrimOp getFlake(const Settings & settings);
|
|||
extern nix::PrimOp parseFlakeRef;
|
||||
extern nix::PrimOp flakeRefToString;
|
||||
|
||||
} // namespace nix::flake
|
||||
} // namespace nix::flake::primops
|
||||
|
|
|
|||
|
|
@ -134,9 +134,7 @@ struct LockedFlake
|
|||
*/
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
||||
std::optional<Fingerprint> getFingerprint(
|
||||
ref<Store> store,
|
||||
const fetchers::Settings & fetchSettings) const;
|
||||
std::optional<Fingerprint> getFingerprint(ref<Store> store, const fetchers::Settings & fetchSettings) const;
|
||||
};
|
||||
|
||||
struct LockFlags
|
||||
|
|
@ -215,18 +213,12 @@ struct LockFlags
|
|||
std::set<InputAttrPath> inputUpdates;
|
||||
};
|
||||
|
||||
LockedFlake lockFlake(
|
||||
const Settings & settings,
|
||||
EvalState & state,
|
||||
const FlakeRef & flakeRef,
|
||||
const LockFlags & lockFlags);
|
||||
LockedFlake
|
||||
lockFlake(const Settings & settings, EvalState & state, const FlakeRef & flakeRef, const LockFlags & lockFlags);
|
||||
|
||||
void callFlake(
|
||||
EvalState & state,
|
||||
const LockedFlake & lockedFlake,
|
||||
Value & v);
|
||||
void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & v);
|
||||
|
||||
}
|
||||
} // namespace flake
|
||||
|
||||
void emitTreeAttrs(
|
||||
EvalState & state,
|
||||
|
|
@ -241,6 +233,6 @@ void emitTreeAttrs(
|
|||
* always treats the input as final (i.e. no attributes can be
|
||||
* added/removed/changed).
|
||||
*/
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -47,29 +47,27 @@ struct FlakeRef
|
|||
*/
|
||||
Path subdir;
|
||||
|
||||
bool operator ==(const FlakeRef & other) const = default;
|
||||
bool operator==(const FlakeRef & other) const = default;
|
||||
|
||||
bool operator <(const FlakeRef & other) const
|
||||
bool operator<(const FlakeRef & other) const
|
||||
{
|
||||
return std::tie(input, subdir) < std::tie(other.input, other.subdir);
|
||||
}
|
||||
|
||||
FlakeRef(fetchers::Input && input, const Path & subdir)
|
||||
: input(std::move(input)), subdir(subdir)
|
||||
{ }
|
||||
: input(std::move(input))
|
||||
, subdir(subdir)
|
||||
{
|
||||
}
|
||||
|
||||
// FIXME: change to operator <<.
|
||||
std::string to_string() const;
|
||||
|
||||
fetchers::Attrs toAttrs() const;
|
||||
|
||||
FlakeRef resolve(
|
||||
ref<Store> store,
|
||||
fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const;
|
||||
FlakeRef resolve(ref<Store> store, fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const;
|
||||
|
||||
static FlakeRef fromAttrs(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const fetchers::Attrs & attrs);
|
||||
static FlakeRef fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs);
|
||||
|
||||
std::pair<ref<SourceAccessor>, FlakeRef> lazyFetch(ref<Store> store) const;
|
||||
|
||||
|
|
@ -80,7 +78,7 @@ struct FlakeRef
|
|||
FlakeRef canonicalize() const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||
std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef);
|
||||
|
||||
/**
|
||||
* @param baseDir Optional [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory)
|
||||
|
|
@ -117,4 +115,4 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
|||
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
|
||||
extern std::regex flakeIdRegex;
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
namespace nix {
|
||||
class Store;
|
||||
class StorePath;
|
||||
}
|
||||
} // namespace nix
|
||||
|
||||
namespace nix::flake {
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ struct Node : std::enable_shared_from_this<Node>
|
|||
|
||||
std::map<FlakeId, Edge> inputs;
|
||||
|
||||
virtual ~Node() { }
|
||||
virtual ~Node() {}
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -51,11 +51,10 @@ struct LockedNode : Node
|
|||
, originalRef(std::move(originalRef))
|
||||
, isFlake(isFlake)
|
||||
, parentInputAttrPath(std::move(parentInputAttrPath))
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
LockedNode(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const nlohmann::json & json);
|
||||
LockedNode(const fetchers::Settings & fetchSettings, const nlohmann::json & json);
|
||||
|
||||
StorePath computeStorePath(Store & store) const;
|
||||
};
|
||||
|
|
@ -65,9 +64,7 @@ struct LockFile
|
|||
ref<Node> root = make_ref<Node>();
|
||||
|
||||
LockFile() {};
|
||||
LockFile(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
std::string_view contents, std::string_view path);
|
||||
LockFile(const fetchers::Settings & fetchSettings, std::string_view contents, std::string_view path);
|
||||
|
||||
typedef std::map<ref<const Node>, std::string> KeyMap;
|
||||
|
||||
|
|
@ -81,7 +78,7 @@ struct LockFile
|
|||
*/
|
||||
std::optional<FlakeRef> isUnlocked(const fetchers::Settings & fetchSettings) const;
|
||||
|
||||
bool operator ==(const LockFile & other) const;
|
||||
bool operator==(const LockFile & other) const;
|
||||
|
||||
std::shared_ptr<Node> findInput(const InputAttrPath & path);
|
||||
|
||||
|
|
@ -95,10 +92,10 @@ struct LockFile
|
|||
void check();
|
||||
};
|
||||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
|
||||
std::ostream & operator<<(std::ostream & stream, const LockFile & lockFile);
|
||||
|
||||
InputAttrPath parseInputAttrPath(std::string_view s);
|
||||
|
||||
std::string printInputAttrPath(const InputAttrPath & path);
|
||||
|
||||
}
|
||||
} // namespace nix::flake
|
||||
|
|
|
|||
|
|
@ -50,4 +50,4 @@ struct Settings : public Config
|
|||
Xp::Flakes};
|
||||
};
|
||||
|
||||
}
|
||||
} // namespace nix::flake
|
||||
|
|
|
|||
|
|
@ -17,4 +17,4 @@ namespace nix {
|
|||
*/
|
||||
std::optional<std::string> getNameFromURL(const ParsedURL & url);
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -12,14 +12,10 @@
|
|||
#include <iterator>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
namespace nix::flake {
|
||||
|
||||
static FlakeRef getFlakeRef(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const nlohmann::json & json,
|
||||
const char * attr,
|
||||
const char * info)
|
||||
static FlakeRef
|
||||
getFlakeRef(const fetchers::Settings & fetchSettings, const nlohmann::json & json, const char * attr, const char * info)
|
||||
{
|
||||
auto i = json.find(attr);
|
||||
if (i != json.end()) {
|
||||
|
|
@ -38,13 +34,12 @@ static FlakeRef getFlakeRef(
|
|||
throw Error("attribute '%s' missing in lock file", attr);
|
||||
}
|
||||
|
||||
LockedNode::LockedNode(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const nlohmann::json & json)
|
||||
LockedNode::LockedNode(const fetchers::Settings & fetchSettings, const nlohmann::json & json)
|
||||
: lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info"
|
||||
, originalRef(getFlakeRef(fetchSettings, json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
, parentInputAttrPath(json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
|
||||
, parentInputAttrPath(
|
||||
json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
|
||||
{
|
||||
if (!lockedRef.input.isLocked() && !lockedRef.input.isRelative()) {
|
||||
if (lockedRef.input.getNarHash())
|
||||
|
|
@ -53,7 +48,8 @@ LockedNode::LockedNode(
|
|||
"This is deprecated since such inputs are verifiable but may not be reproducible.",
|
||||
lockedRef.to_string());
|
||||
else
|
||||
throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
|
||||
throw Error(
|
||||
"Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
}
|
||||
|
||||
|
|
@ -67,7 +63,8 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
|
||||
static std::shared_ptr<Node>
|
||||
doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
|
||||
{
|
||||
auto pos = root;
|
||||
|
||||
|
|
@ -104,9 +101,7 @@ std::shared_ptr<Node> LockFile::findInput(const InputAttrPath & path)
|
|||
return doFind(root, path, visited);
|
||||
}
|
||||
|
||||
LockFile::LockFile(
|
||||
const fetchers::Settings & fetchSettings,
|
||||
std::string_view contents, std::string_view path)
|
||||
LockFile::LockFile(const fetchers::Settings & fetchSettings, std::string_view contents, std::string_view path)
|
||||
{
|
||||
auto json = [=] {
|
||||
try {
|
||||
|
|
@ -123,9 +118,9 @@ LockFile::LockFile(
|
|||
|
||||
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
||||
|
||||
getInputs = [&](Node & node, const nlohmann::json & jsonNode)
|
||||
{
|
||||
if (jsonNode.find("inputs") == jsonNode.end()) return;
|
||||
getInputs = [&](Node & node, const nlohmann::json & jsonNode) {
|
||||
if (jsonNode.find("inputs") == jsonNode.end())
|
||||
return;
|
||||
for (auto & i : jsonNode["inputs"].items()) {
|
||||
if (i.value().is_array()) { // FIXME: remove, obsolete
|
||||
InputAttrPath path;
|
||||
|
|
@ -171,14 +166,13 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
|||
|
||||
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
|
||||
|
||||
dumpNode = [&](std::string key, ref<const Node> node) -> std::string
|
||||
{
|
||||
dumpNode = [&](std::string key, ref<const Node> node) -> std::string {
|
||||
auto k = nodeKeys.find(node);
|
||||
if (k != nodeKeys.end())
|
||||
return k->second;
|
||||
|
||||
if (!keys.insert(key).second) {
|
||||
for (int n = 2; ; ++n) {
|
||||
for (int n = 2;; ++n) {
|
||||
auto k = fmt("%s_%d", key, n);
|
||||
if (keys.insert(k).second) {
|
||||
key = k;
|
||||
|
|
@ -239,7 +233,7 @@ std::pair<std::string, LockFile::KeyMap> LockFile::to_string() const
|
|||
return {json.dump(2), std::move(nodeKeys)};
|
||||
}
|
||||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
||||
std::ostream & operator<<(std::ostream & stream, const LockFile & lockFile)
|
||||
{
|
||||
stream << lockFile.toJSON().first.dump(2);
|
||||
return stream;
|
||||
|
|
@ -251,9 +245,9 @@ std::optional<FlakeRef> LockFile::isUnlocked(const fetchers::Settings & fetchSet
|
|||
|
||||
std::function<void(ref<const Node> node)> visit;
|
||||
|
||||
visit = [&](ref<const Node> node)
|
||||
{
|
||||
if (!nodes.insert(node).second) return;
|
||||
visit = [&](ref<const Node> node) {
|
||||
if (!nodes.insert(node).second)
|
||||
return;
|
||||
for (auto & i : node->inputs)
|
||||
if (auto child = std::get_if<0>(&i.second))
|
||||
visit(*child);
|
||||
|
|
@ -265,17 +259,15 @@ std::optional<FlakeRef> LockFile::isUnlocked(const fetchers::Settings & fetchSet
|
|||
`allow-dirty-locks` is enabled, it has a NAR hash. In the
|
||||
latter case, we can verify the input but we may not be able to
|
||||
fetch it from anywhere. */
|
||||
auto isConsideredLocked = [&](const fetchers::Input & input)
|
||||
{
|
||||
auto isConsideredLocked = [&](const fetchers::Input & input) {
|
||||
return input.isLocked() || (fetchSettings.allowDirtyLocks && input.getNarHash());
|
||||
};
|
||||
|
||||
for (auto & i : nodes) {
|
||||
if (i == ref<const Node>(root)) continue;
|
||||
if (i == ref<const Node>(root))
|
||||
continue;
|
||||
auto node = i.dynamic_pointer_cast<const LockedNode>();
|
||||
if (node
|
||||
&& (!isConsideredLocked(node->lockedRef.input)
|
||||
|| !node->lockedRef.input.isFinal())
|
||||
if (node && (!isConsideredLocked(node->lockedRef.input) || !node->lockedRef.input.isFinal())
|
||||
&& !node->lockedRef.input.isRelative())
|
||||
return node->lockedRef;
|
||||
}
|
||||
|
|
@ -283,7 +275,7 @@ std::optional<FlakeRef> LockFile::isUnlocked(const fetchers::Settings & fetchSet
|
|||
return {};
|
||||
}
|
||||
|
||||
bool LockFile::operator ==(const LockFile & other) const
|
||||
bool LockFile::operator==(const LockFile & other) const
|
||||
{
|
||||
// FIXME: slow
|
||||
return toJSON().first == other.toJSON().first;
|
||||
|
|
@ -309,11 +301,11 @@ std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
|
|||
|
||||
std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
|
||||
|
||||
recurse = [&](const InputAttrPath & prefix, ref<Node> node)
|
||||
{
|
||||
if (!done.insert(node).second) return;
|
||||
recurse = [&](const InputAttrPath & prefix, ref<Node> node) {
|
||||
if (!done.insert(node).second)
|
||||
return;
|
||||
|
||||
for (auto &[id, input] : node->inputs) {
|
||||
for (auto & [id, input] : node->inputs) {
|
||||
auto inputAttrPath(prefix);
|
||||
inputAttrPath.push_back(id);
|
||||
res.emplace(inputAttrPath, input);
|
||||
|
|
@ -337,7 +329,7 @@ static std::string describe(const FlakeRef & flakeRef)
|
|||
return s;
|
||||
}
|
||||
|
||||
std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
|
||||
std::ostream & operator<<(std::ostream & stream, const Node::Edge & edge)
|
||||
{
|
||||
if (auto node = std::get_if<0>(&edge))
|
||||
stream << describe((*node)->lockedRef);
|
||||
|
|
@ -368,18 +360,19 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
|
|||
|
||||
while (i != oldFlat.end() || j != newFlat.end()) {
|
||||
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
|
||||
res += fmt("• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
|
||||
printInputAttrPath(j->first), j->second);
|
||||
res += fmt(
|
||||
"• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n", printInputAttrPath(j->first), j->second);
|
||||
++j;
|
||||
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
|
||||
res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first));
|
||||
++i;
|
||||
} else {
|
||||
if (!equals(i->second, j->second)) {
|
||||
res += fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
|
||||
printInputAttrPath(i->first),
|
||||
i->second,
|
||||
j->second);
|
||||
res +=
|
||||
fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
|
||||
printInputAttrPath(i->first),
|
||||
i->second,
|
||||
j->second);
|
||||
}
|
||||
++i;
|
||||
++j;
|
||||
|
|
@ -396,7 +389,8 @@ void LockFile::check()
|
|||
for (auto & [inputAttrPath, input] : inputs) {
|
||||
if (auto follows = std::get_if<1>(&input)) {
|
||||
if (!follows->empty() && !findInput(*follows))
|
||||
throw Error("input '%s' follows a non-existent input '%s'",
|
||||
throw Error(
|
||||
"input '%s' follows a non-existent input '%s'",
|
||||
printInputAttrPath(inputAttrPath),
|
||||
printInputAttrPath(*follows));
|
||||
}
|
||||
|
|
@ -410,4 +404,4 @@ std::string printInputAttrPath(const InputAttrPath & path)
|
|||
return concatStringsSep("/", path);
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace nix::flake
|
||||
|
|
|
|||
|
|
@ -12,4 +12,4 @@ void Settings::configureEvalSettings(nix::EvalSettings & evalSettings) const
|
|||
evalSettings.extraPrimOps.emplace_back(primops::flakeRefToString);
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
} // namespace nix::flake
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@
|
|||
namespace nix {
|
||||
|
||||
static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern +")(\\^.*)?$");
|
||||
static const std::regex
|
||||
lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern + ")(\\^.*)?$");
|
||||
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
|
||||
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
|
||||
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern + ")");
|
||||
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern + ")(?:/.*)?");
|
||||
static const std::regex gitProviderRegex("github|gitlab|sourcehut");
|
||||
static const std::regex gitSchemeRegex("git($|\\+.*)");
|
||||
|
||||
|
|
@ -21,8 +22,7 @@ std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
|||
return url.query.at("dir");
|
||||
|
||||
/* If the fragment isn't a "default" and contains two attribute elements, use the last one */
|
||||
if (std::regex_match(url.fragment, match, lastAttributeRegex)
|
||||
&& match.str(1) != "defaultPackage."
|
||||
if (std::regex_match(url.fragment, match, lastAttributeRegex) && match.str(1) != "defaultPackage."
|
||||
&& match.str(2) != "default") {
|
||||
return match.str(2);
|
||||
}
|
||||
|
|
@ -43,4 +43,4 @@ std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
|||
return {};
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace nix
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue