mirror of
https://github.com/NixOS/nix.git
synced 2025-11-24 11:19:35 +01:00
Merge remote-tracking branch 'origin/master' into lfs
This commit is contained in:
commit
6d0043902a
251 changed files with 1001 additions and 5288 deletions
|
|
@ -41,7 +41,7 @@ INPUT = \
|
|||
@src@/src/libutil-c \
|
||||
@src@/src/libexpr-c \
|
||||
@src@/src/libstore-c \
|
||||
@src@/doc/external-api/README.md
|
||||
@src@/src/external-api-docs/README.md
|
||||
|
||||
FILE_PATTERNS = nix_api_*.h *.md
|
||||
|
||||
|
|
@ -55,6 +55,8 @@ EXCLUDE_PATTERNS = *_internal.h
|
|||
GENERATE_TREEVIEW = YES
|
||||
OPTIMIZE_OUTPUT_FOR_C = YES
|
||||
|
||||
USE_MDFILE_AS_MAINPAGE = doc/external-api/README.md
|
||||
USE_MDFILE_AS_MAINPAGE = @src@/src/external-api-docs/README.md
|
||||
|
||||
WARN_IF_UNDOCUMENTED = NO
|
||||
WARN_IF_INCOMPLETE_DOC = NO
|
||||
QUIET = YES
|
||||
|
|
|
|||
|
|
@ -43,8 +43,8 @@ INPUT = \
|
|||
@src@/libexpr/flake \
|
||||
@src@/libexpr-tests \
|
||||
@src@/libexpr-tests/value \
|
||||
@src@/libexpr-test-support/test \
|
||||
@src@/libexpr-test-support/test/value \
|
||||
@src@/libexpr-test-support/tests \
|
||||
@src@/libexpr-test-support/tests/value \
|
||||
@src@/libexpr/value \
|
||||
@src@/libfetchers \
|
||||
@src@/libmain \
|
||||
|
|
@ -52,10 +52,11 @@ INPUT = \
|
|||
@src@/libstore/build \
|
||||
@src@/libstore/builtins \
|
||||
@src@/libstore-tests \
|
||||
@src@/libstore-test-support/test \
|
||||
@src@/libstore-test-support/tests \
|
||||
@src@/libutil \
|
||||
@src@/libutil/args \
|
||||
@src@/libutil-tests \
|
||||
@src@/libutil-test-support/test \
|
||||
@src@/libutil-test-support/tests \
|
||||
@src@/nix \
|
||||
@src@/nix-env \
|
||||
@src@/nix-store
|
||||
|
|
@ -83,7 +84,9 @@ EXPAND_ONLY_PREDEF = YES
|
|||
# RECURSIVE has no effect here.
|
||||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||
|
||||
INCLUDE_PATH =
|
||||
INCLUDE_PATH = \
|
||||
@BUILD_ROOT@/src/libexpr/libnixexpr.so.p \
|
||||
@BUILD_ROOT@/src/nix/nix.p \
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
|
|
@ -96,7 +99,18 @@ EXPAND_AS_DEFINED = \
|
|||
DECLARE_COMMON_SERIALISER \
|
||||
DECLARE_WORKER_SERIALISER \
|
||||
DECLARE_SERVE_SERIALISER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER
|
||||
LENGTH_PREFIXED_PROTO_HELPER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER_X \
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER \
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA \
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER \
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA \
|
||||
COMMON_METHODS \
|
||||
JSON_IMPL \
|
||||
MakeBinOp
|
||||
|
||||
PREDEFINED = DOXYGEN_SKIP
|
||||
|
||||
WARN_IF_UNDOCUMENTED = NO
|
||||
WARN_IF_INCOMPLETE_DOC = NO
|
||||
QUIET = YES
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ doxygen_cfg = configure_file(
|
|||
configuration : {
|
||||
'PROJECT_NUMBER': meson.project_version(),
|
||||
'OUTPUT_DIRECTORY' : meson.current_build_dir(),
|
||||
'BUILD_ROOT' : meson.build_root(),
|
||||
'src' : fs.parent(fs.parent(meson.project_source_root())) / 'src',
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -32,16 +32,6 @@ InstallableDerivedPath InstallableDerivedPath::parse(
|
|||
// store path.
|
||||
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||
auto storePath = store->followLinksToStorePath(prefix);
|
||||
// Remove this prior to stabilizing the new CLI.
|
||||
if (storePath.isDerivation()) {
|
||||
auto oldDerivedPath = DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(storePath),
|
||||
.outputs = OutputsSpec::All { },
|
||||
};
|
||||
warn(
|
||||
"The interpretation of store paths arguments ending in `.drv` recently changed. If this command is now failing try again with '%s'",
|
||||
oldDerivedPath.to_string(*store));
|
||||
};
|
||||
return DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
};
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
|
|||
Flake flake;
|
||||
|
||||
ExtraPathInfoFlake(Value && v, Flake && f)
|
||||
: ExtraPathInfoValue(std::move(v)), flake(f)
|
||||
: ExtraPathInfoValue(std::move(v)), flake(std::move(f))
|
||||
{ }
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ struct ExtraPathInfoValue : ExtraPathInfo
|
|||
Value value;
|
||||
|
||||
ExtraPathInfoValue(Value && v)
|
||||
: value(v)
|
||||
: value(std::move(v))
|
||||
{ }
|
||||
|
||||
virtual ~ExtraPathInfoValue() = default;
|
||||
|
|
|
|||
|
|
@ -857,6 +857,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
|||
{
|
||||
applyDefaultInstallables(rawInstallables);
|
||||
std::vector<FlakeRef> res;
|
||||
res.reserve(rawInstallables.size());
|
||||
for (auto i : rawInstallables)
|
||||
res.push_back(parseFlakeRefWithFragment(
|
||||
fetchSettings,
|
||||
|
|
|
|||
|
|
@ -1,15 +0,0 @@
|
|||
libraries += libcmd
|
||||
|
||||
libcmd_NAME = libnixcmd
|
||||
|
||||
libcmd_DIR := $(d)
|
||||
|
||||
libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libcmd_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) $(INCLUDE_libflake) $(INCLUDE_libmain)
|
||||
|
||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS)
|
||||
|
||||
libcmd_LIBS = libutil libstore libfetchers libflake libexpr libmain
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixcmd
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
libraries += libexprc
|
||||
|
||||
libexprc_NAME = libnixexprc
|
||||
|
||||
libexprc_DIR := $(d)
|
||||
|
||||
libexprc_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libexprc := -I $(d)
|
||||
libexprc_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) $(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libexpr) $(INCLUDE_libexprc)
|
||||
|
||||
libexprc_LIBS = libutil libutilc libstore libstorec libfetchers libexpr
|
||||
|
||||
libexprc_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-expr-c.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
libexprc_FORCE_INSTALL := 1
|
||||
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Language Evaluator - C API
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-store-c
|
||||
Libs: -L${libdir} -lnixexprc
|
||||
Cflags: -I${includedir}/nix
|
||||
|
|
@ -129,7 +129,7 @@ nix_err nix_value_call_multi(
|
|||
* @param[in] state The state of the evaluation.
|
||||
* @param[out] value The result of the function call.
|
||||
* @param[in] fn The Nix function to call.
|
||||
* @param[in] args The arguments to pass to the function.
|
||||
* @param[in] ... The arguments to pass to the function.
|
||||
*
|
||||
* @see nix_value_call_multi
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -77,8 +77,7 @@ typedef struct ExternalValue ExternalValue;
|
|||
*/
|
||||
typedef struct nix_realised_string nix_realised_string;
|
||||
|
||||
/** @defgroup primops
|
||||
* @brief Create your own primops
|
||||
/** @defgroup primops Adding primops
|
||||
* @{
|
||||
*/
|
||||
/** @brief Function pointer for primops
|
||||
|
|
@ -252,7 +251,7 @@ int64_t nix_get_int(nix_c_context * context, const nix_value * value);
|
|||
* @param[in] value Nix value to inspect
|
||||
* @return reference to external, NULL in case of error
|
||||
*/
|
||||
ExternalValue * nix_get_external(nix_c_context * context, nix_value *);
|
||||
ExternalValue * nix_get_external(nix_c_context * context, nix_value * value);
|
||||
|
||||
/** @brief Get the ix'th element of a list
|
||||
*
|
||||
|
|
@ -423,7 +422,7 @@ nix_list_builder_insert(nix_c_context * context, ListBuilder * list_builder, uns
|
|||
/** @brief Free a list builder
|
||||
*
|
||||
* Does not fail.
|
||||
* @param[in] builder the builder to free
|
||||
* @param[in] list_builder The builder to free.
|
||||
*/
|
||||
void nix_list_builder_free(ListBuilder * list_builder);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
libraries += libexpr-test-support
|
||||
|
||||
libexpr-test-support_NAME = libnixexpr-test-support
|
||||
|
||||
libexpr-test-support_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libexpr-test-support_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libexpr-test-support_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libexpr-test-support_SOURCES := \
|
||||
$(wildcard $(d)/tests/*.cc) \
|
||||
$(wildcard $(d)/tests/value/*.cc)
|
||||
|
||||
libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-test-support_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libexpr libstore libutil
|
||||
|
||||
libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
check: libexpr-tests_RUN
|
||||
|
||||
programs += libexpr-tests
|
||||
|
||||
libexpr-tests_NAME := libnixexpr-tests
|
||||
|
||||
libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libexpr-tests.xml
|
||||
|
||||
libexpr-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libexpr-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libexpr-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc)
|
||||
|
||||
libexpr-tests_EXTRA_INCLUDES = \
|
||||
-I src/libexpr-test-support \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libexpr) \
|
||||
$(INCLUDE_libexprc) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-tests_LIBS = \
|
||||
libexpr-test-support libstore-test-support libutil-test-support \
|
||||
libexpr libexprc libfetchers libstore libstorec libutil libutilc
|
||||
|
||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libexpr-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
|
@ -10,6 +10,9 @@ lockFileStr:
|
|||
# unlocked trees.
|
||||
overrides:
|
||||
|
||||
# This is `prim_fetchFinalTree`.
|
||||
fetchTreeFinal:
|
||||
|
||||
let
|
||||
|
||||
lockFile = builtins.fromJSON lockFileStr;
|
||||
|
|
@ -44,7 +47,8 @@ let
|
|||
overrides.${key}.sourceInfo
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
|
|
|
|||
|
|
@ -510,9 +510,15 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
|
|||
|
||||
Value * v = allocValue();
|
||||
v->mkPrimOp(new PrimOp(primOp));
|
||||
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
|
||||
|
||||
if (primOp.internal)
|
||||
internalPrimOps.emplace(primOp.name, v);
|
||||
else {
|
||||
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
|
||||
}
|
||||
|
||||
return v;
|
||||
}
|
||||
|
||||
|
|
@ -1730,7 +1736,7 @@ void EvalState::incrFunctionCall(ExprLambda * fun)
|
|||
}
|
||||
|
||||
|
||||
void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||
void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res)
|
||||
{
|
||||
auto pos = fun.determinePos(noPos);
|
||||
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ struct PrimOp
|
|||
const char * doc = nullptr;
|
||||
|
||||
/**
|
||||
* Add a trace item, `while calling the '<name>' builtin`
|
||||
* Add a trace item, while calling the `<name>` builtin.
|
||||
*
|
||||
* This is used to remove the redundant item for `builtins.addErrorContext`.
|
||||
*/
|
||||
|
|
@ -107,6 +107,11 @@ struct PrimOp
|
|||
*/
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
/**
|
||||
* If true, this primop is not exposed to the user.
|
||||
*/
|
||||
bool internal = false;
|
||||
|
||||
/**
|
||||
* Validity check to be performed by functions that introduce primops,
|
||||
* such as RegisterPrimOp() and Value::mkPrimOp().
|
||||
|
|
@ -591,6 +596,11 @@ public:
|
|||
*/
|
||||
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
|
||||
|
||||
/**
|
||||
* Internal primops not exposed to the user.
|
||||
*/
|
||||
std::unordered_map<std::string, Value *, std::hash<std::string>, std::equal_to<std::string>, traceable_allocator<std::pair<const std::string, Value *>>> internalPrimOps;
|
||||
|
||||
/**
|
||||
* Name and documentation about every constant.
|
||||
*
|
||||
|
|
@ -693,7 +703,7 @@ public:
|
|||
* Automatically call a function for which each argument has a
|
||||
* default value or has a binding in the `args` map.
|
||||
*/
|
||||
void autoCallFunction(Bindings & args, Value & fun, Value & res);
|
||||
void autoCallFunction(const Bindings & args, Value & fun, Value & res);
|
||||
|
||||
/**
|
||||
* Allocation primitives.
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
libraries += libexpr
|
||||
|
||||
libexpr_NAME = libnixexpr
|
||||
|
||||
libexpr_DIR := $(d)
|
||||
|
||||
libexpr_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/primops/*.cc) \
|
||||
$(d)/lexer-tab.cc \
|
||||
$(d)/parser-tab.cc
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libexpr := -I $(d)
|
||||
|
||||
libexpr_CXXFLAGS += \
|
||||
$(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) \
|
||||
-DGC_THREADS
|
||||
|
||||
libexpr_LIBS = libutil libstore libfetchers
|
||||
|
||||
libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libexpr_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
# The dependency on libgc must be propagated (i.e. meaning that
|
||||
# programs/libraries that use libexpr must explicitly pass -lgc),
|
||||
# because inline functions in libexpr's header files call libgc.
|
||||
libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS)
|
||||
|
||||
libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y
|
||||
$(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d
|
||||
|
||||
$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
|
||||
$(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $<
|
||||
|
||||
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||
|
||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||
|
||||
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/call-flake.nix.gen.hh
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-store bdw-gc
|
||||
Libs: -L${libdir} -lnixexpr
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -206,7 +206,7 @@ struct ExprSelect : Expr
|
|||
/**
|
||||
* Evaluate the `a.b.c` part of `a.b.c.d`. This exists mostly for the purpose of :doc in the repl.
|
||||
*
|
||||
* @param[out] v The attribute set that should contain the last attribute name (if it exists).
|
||||
* @param[out] attrs The attribute set that should contain the last attribute name (if it exists).
|
||||
* @return The last attribute name in `attrPath`
|
||||
*
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist.
|
||||
|
|
|
|||
|
|
@ -71,6 +71,10 @@ mkMesonLibrary (finalAttrs: {
|
|||
nix-util
|
||||
nix-store
|
||||
nix-fetchers
|
||||
] ++ finalAttrs.passthru.externalPropagatedBuildInputs;
|
||||
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalPropagatedBuildInputs = [
|
||||
boost
|
||||
nlohmann_json
|
||||
] ++ lib.optional enableGC boehmgc;
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
|
|||
|
||||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
buildReqs.reserve(drvs.size());
|
||||
for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d });
|
||||
buildStore->buildPaths(buildReqs, bmNormal, store);
|
||||
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ struct FetchTreeParams {
|
|||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
bool isFetchGit = false;
|
||||
bool isFinal = false;
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
|
|
@ -195,6 +196,13 @@ static void fetchTree(
|
|||
|
||||
state.checkURI(input.toURLString());
|
||||
|
||||
if (params.isFinal) {
|
||||
input.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
} else {
|
||||
if (input.isFinal())
|
||||
throw Error("input '%s' is not allowed to use the '__final' attribute", input.to_string());
|
||||
}
|
||||
|
||||
auto [storePath, input2] = input.fetchToStore(state.store);
|
||||
|
||||
state.allowPath(storePath);
|
||||
|
|
@ -431,6 +439,18 @@ static RegisterPrimOp primop_fetchTree({
|
|||
.experimentalFeature = Xp::FetchTree,
|
||||
});
|
||||
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v, {.isFinal = true});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchFinalTree({
|
||||
.name = "fetchFinalTree",
|
||||
.args = {"input"},
|
||||
.fun = prim_fetchFinalTree,
|
||||
.internal = true,
|
||||
});
|
||||
|
||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||
const std::string & who, bool unpack, std::string name)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -141,7 +141,9 @@ public:
|
|||
Value * * elems;
|
||||
ListBuilder(EvalState & state, size_t size);
|
||||
|
||||
ListBuilder(ListBuilder && x)
|
||||
// NOTE: Can be noexcept because we are just copying integral values and
|
||||
// raw pointers.
|
||||
ListBuilder(ListBuilder && x) noexcept
|
||||
: size(x.size)
|
||||
, inlineElems{x.inlineElems[0], x.inlineElems[1]}
|
||||
, elems(size <= 2 ? inlineElems : x.elems)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ struct NixStringContextElem {
|
|||
/**
|
||||
* Plain opaque path to some store object.
|
||||
*
|
||||
* Encoded as just the path: ‘<path>’.
|
||||
* Encoded as just the path: `<path>`.
|
||||
*/
|
||||
using Opaque = SingleDerivedPath::Opaque;
|
||||
|
||||
|
|
@ -39,7 +39,7 @@ struct NixStringContextElem {
|
|||
* also all outputs of all derivations in that closure (including the
|
||||
* root derivation).
|
||||
*
|
||||
* Encoded in the form ‘=<drvPath>’.
|
||||
* Encoded in the form `=<drvPath>`.
|
||||
*/
|
||||
struct DrvDeep {
|
||||
StorePath drvPath;
|
||||
|
|
@ -50,7 +50,7 @@ struct NixStringContextElem {
|
|||
/**
|
||||
* Derivation output.
|
||||
*
|
||||
* Encoded in the form ‘!<output>!<drvPath>’.
|
||||
* Encoded in the form `!<output>!<drvPath>`.
|
||||
*/
|
||||
using Built = SingleDerivedPath::Built;
|
||||
|
||||
|
|
@ -68,9 +68,9 @@ struct NixStringContextElem {
|
|||
|
||||
/**
|
||||
* Decode a context string, one of:
|
||||
* - ‘<path>’
|
||||
* - ‘=<path>’
|
||||
* - ‘!<name>!<path>’
|
||||
* - `<path>`
|
||||
* - `=<path>`
|
||||
* - `!<name>!<path>`
|
||||
*
|
||||
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
check: libfetchers-tests_RUN
|
||||
|
||||
programs += libfetchers-tests
|
||||
|
||||
libfetchers-tests_NAME = libnixfetchers-tests
|
||||
|
||||
libfetchers-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libfetchers-tests.xml
|
||||
|
||||
libfetchers-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libfetchers-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libfetchers-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libfetchers-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libfetchers-tests_EXTRA_INCLUDES = \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libutil)
|
||||
|
||||
libfetchers-tests_CXXFLAGS += $(libfetchers-tests_EXTRA_INCLUDES)
|
||||
|
||||
libfetchers-tests_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libfetchers libstore libutil
|
||||
|
||||
libfetchers-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) $(LIBGIT2_LIBS) $(LIBCURL_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libfetchers-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
|
@ -3,6 +3,7 @@
|
|||
#include "source-path.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
#include "json-utils.hh"
|
||||
#include "store-path-accessor.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
|
@ -100,7 +101,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
|||
auto allowedAttrs = inputScheme->allowedAttrs();
|
||||
|
||||
for (auto & [name, _] : attrs)
|
||||
if (name != "type" && allowedAttrs.count(name) == 0)
|
||||
if (name != "type" && name != "__final" && allowedAttrs.count(name) == 0)
|
||||
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
|
||||
|
||||
auto res = inputScheme->inputFromAttrs(settings, attrs);
|
||||
|
|
@ -145,6 +146,11 @@ bool Input::isLocked() const
|
|||
return scheme && scheme->isLocked(*this);
|
||||
}
|
||||
|
||||
bool Input::isFinal() const
|
||||
{
|
||||
return maybeGetBoolAttr(attrs, "__final").value_or(false);
|
||||
}
|
||||
|
||||
Attrs Input::toAttrs() const
|
||||
{
|
||||
return attrs;
|
||||
|
|
@ -172,16 +178,24 @@ std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
|
|||
|
||||
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
|
||||
try {
|
||||
auto [accessor, final] = getAccessorUnchecked(store);
|
||||
auto [accessor, result] = getAccessorUnchecked(store);
|
||||
|
||||
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
|
||||
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, result.getName());
|
||||
|
||||
auto narHash = store->queryPathInfo(storePath)->narHash;
|
||||
final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
||||
scheme->checkLocks(*this, final);
|
||||
// FIXME: we would like to mark inputs as final in
|
||||
// getAccessorUnchecked(), but then we can't add
|
||||
// narHash. Or maybe narHash should be excluded from the
|
||||
// concept of "final" inputs?
|
||||
result.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
|
||||
return {storePath, final};
|
||||
assert(result.isFinal());
|
||||
|
||||
checkLocks(*this, result);
|
||||
|
||||
return {storePath, result};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
|
|
@ -191,13 +205,40 @@ std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
|
|||
return {std::move(storePath), input};
|
||||
}
|
||||
|
||||
void InputScheme::checkLocks(const Input & specified, const Input & final) const
|
||||
void Input::checkLocks(Input specified, Input & result)
|
||||
{
|
||||
/* If the original input is final, then we just return the
|
||||
original attributes, dropping any new fields returned by the
|
||||
fetcher. However, any fields that are in both the specified and
|
||||
result input must be identical. */
|
||||
if (specified.isFinal()) {
|
||||
|
||||
/* Backwards compatibility hack: we had some lock files in the
|
||||
past that 'narHash' fields with incorrect base-64
|
||||
formatting (lacking the trailing '=', e.g. 'sha256-ri...Mw'
|
||||
instead of ''sha256-ri...Mw='). So fix that. */
|
||||
if (auto prevNarHash = specified.getNarHash())
|
||||
specified.attrs.insert_or_assign("narHash", prevNarHash->to_string(HashFormat::SRI, true));
|
||||
|
||||
for (auto & field : specified.attrs) {
|
||||
auto field2 = result.attrs.find(field.first);
|
||||
if (field2 != result.attrs.end() && field.second != field2->second)
|
||||
throw Error("mismatch in field '%s' of input '%s', got '%s'",
|
||||
field.first,
|
||||
attrsToJSON(specified.attrs),
|
||||
attrsToJSON(result.attrs));
|
||||
}
|
||||
|
||||
result.attrs = specified.attrs;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (auto prevNarHash = specified.getNarHash()) {
|
||||
if (final.getNarHash() != prevNarHash) {
|
||||
if (final.getNarHash())
|
||||
if (result.getNarHash() != prevNarHash) {
|
||||
if (result.getNarHash())
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), result.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
else
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
|
||||
|
|
@ -205,32 +246,32 @@ void InputScheme::checkLocks(const Input & specified, const Input & final) const
|
|||
}
|
||||
|
||||
if (auto prevLastModified = specified.getLastModified()) {
|
||||
if (final.getLastModified() != prevLastModified)
|
||||
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
|
||||
final.to_string(), *prevLastModified);
|
||||
if (result.getLastModified() != prevLastModified)
|
||||
throw Error("'lastModified' attribute mismatch in input '%s', expected %d, got %d",
|
||||
result.to_string(), *prevLastModified, result.getLastModified().value_or(-1));
|
||||
}
|
||||
|
||||
if (auto prevRev = specified.getRev()) {
|
||||
if (final.getRev() != prevRev)
|
||||
if (result.getRev() != prevRev)
|
||||
throw Error("'rev' attribute mismatch in input '%s', expected %s",
|
||||
final.to_string(), prevRev->gitRev());
|
||||
result.to_string(), prevRev->gitRev());
|
||||
}
|
||||
|
||||
if (auto prevRevCount = specified.getRevCount()) {
|
||||
if (final.getRevCount() != prevRevCount)
|
||||
if (result.getRevCount() != prevRevCount)
|
||||
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
|
||||
final.to_string(), *prevRevCount);
|
||||
result.to_string(), *prevRevCount);
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<ref<SourceAccessor>, Input> Input::getAccessor(ref<Store> store) const
|
||||
{
|
||||
try {
|
||||
auto [accessor, final] = getAccessorUnchecked(store);
|
||||
auto [accessor, result] = getAccessorUnchecked(store);
|
||||
|
||||
scheme->checkLocks(*this, final);
|
||||
checkLocks(*this, result);
|
||||
|
||||
return {accessor, std::move(final)};
|
||||
return {accessor, std::move(result)};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
|
|
@ -244,12 +285,42 @@ std::pair<ref<SourceAccessor>, Input> Input::getAccessorUnchecked(ref<Store> sto
|
|||
if (!scheme)
|
||||
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
|
||||
|
||||
auto [accessor, final] = scheme->getAccessor(store, *this);
|
||||
/* The tree may already be in the Nix store, or it could be
|
||||
substituted (which is often faster than fetching from the
|
||||
original source). So check that. We only do this for final
|
||||
inputs, otherwise there is a risk that we don't return the
|
||||
same attributes (like `lastModified`) that the "real" fetcher
|
||||
would return.
|
||||
|
||||
FIXME: add a setting to disable this.
|
||||
FIXME: substituting may be slower than fetching normally,
|
||||
e.g. for fetchers like Git that are incremental!
|
||||
*/
|
||||
if (isFinal() && getNarHash()) {
|
||||
try {
|
||||
auto storePath = computeStorePath(*store);
|
||||
|
||||
store->ensurePath(storePath);
|
||||
|
||||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
|
||||
auto accessor = makeStorePathAccessor(store, storePath);
|
||||
|
||||
accessor->fingerprint = scheme->getFingerprint(store, *this);
|
||||
|
||||
return {accessor, *this};
|
||||
} catch (Error & e) {
|
||||
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||
}
|
||||
}
|
||||
|
||||
auto [accessor, result] = scheme->getAccessor(store, *this);
|
||||
|
||||
assert(!accessor->fingerprint);
|
||||
accessor->fingerprint = scheme->getFingerprint(store, final);
|
||||
accessor->fingerprint = scheme->getFingerprint(store, result);
|
||||
|
||||
return {accessor, std::move(final)};
|
||||
return {accessor, std::move(result)};
|
||||
}
|
||||
|
||||
Input Input::applyOverrides(
|
||||
|
|
@ -404,7 +475,10 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json & json) {
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json & json)
|
||||
{
|
||||
fetchers::PublicKey res = { };
|
||||
if (auto type = optionalValueAt(json, "type"))
|
||||
res.type = getString(*type);
|
||||
|
|
@ -414,9 +488,12 @@ fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json &
|
|||
return res;
|
||||
}
|
||||
|
||||
void adl_serializer<fetchers::PublicKey>::to_json(json & json, fetchers::PublicKey p) {
|
||||
void adl_serializer<fetchers::PublicKey>::to_json(json & json, fetchers::PublicKey p)
|
||||
{
|
||||
json["type"] = p.type;
|
||||
json["key"] = p.key;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,17 +78,30 @@ public:
|
|||
Attrs toAttrs() const;
|
||||
|
||||
/**
|
||||
* Check whether this is a "direct" input, that is, not
|
||||
* Return whether this is a "direct" input, that is, not
|
||||
* one that goes through a registry.
|
||||
*/
|
||||
bool isDirect() const;
|
||||
|
||||
/**
|
||||
* Check whether this is a "locked" input, that is,
|
||||
* one that contains a commit hash or content hash.
|
||||
* Return whether this is a "locked" input, that is, it has
|
||||
* attributes like a Git revision or NAR hash that uniquely
|
||||
* identify its contents.
|
||||
*/
|
||||
bool isLocked() const;
|
||||
|
||||
/**
|
||||
* Return whether this is a "final" input, meaning that fetching
|
||||
* it will not add, remove or change any attributes. (See
|
||||
* `checkLocks()` for the semantics.) Only "final" inputs can be
|
||||
* substituted from a binary cache.
|
||||
*
|
||||
* The "final" state is denoted by the presence of an attribute
|
||||
* `__final = true`. This attribute is currently undocumented and
|
||||
* for internal use only.
|
||||
*/
|
||||
bool isFinal() const;
|
||||
|
||||
bool operator ==(const Input & other) const noexcept;
|
||||
|
||||
bool contains(const Input & other) const;
|
||||
|
|
@ -99,6 +112,19 @@ public:
|
|||
*/
|
||||
std::pair<StorePath, Input> fetchToStore(ref<Store> store) const;
|
||||
|
||||
/**
|
||||
* Check the locking attributes in `result` against
|
||||
* `specified`. E.g. if `specified` has a `rev` attribute, then
|
||||
* `result` must have the same `rev` attribute. Throw an exception
|
||||
* if there is a mismatch.
|
||||
*
|
||||
* If `specified` is marked final (i.e. has the `__final`
|
||||
* attribute), then the intersection of attributes in `specified`
|
||||
* and `result` must be equal, and `final.attrs` is set to
|
||||
* `specified.attrs` (i.e. we discard any new attributes).
|
||||
*/
|
||||
static void checkLocks(Input specified, Input & result);
|
||||
|
||||
/**
|
||||
* Return a `SourceAccessor` that allows access to files in the
|
||||
* input without copying it to the store. Also return a possibly
|
||||
|
|
@ -144,6 +170,10 @@ public:
|
|||
/**
|
||||
* For locked inputs, return a string that uniquely specifies the
|
||||
* content of the input (typically a commit hash or content hash).
|
||||
*
|
||||
* Only known-equivalent inputs should return the same fingerprint.
|
||||
*
|
||||
* This is not a stable identifier between Nix versions, but not guaranteed to change either.
|
||||
*/
|
||||
std::optional<std::string> getFingerprint(ref<Store> store) const;
|
||||
};
|
||||
|
|
@ -215,31 +245,11 @@ struct InputScheme
|
|||
virtual bool isDirect(const Input & input) const
|
||||
{ return true; }
|
||||
|
||||
/**
|
||||
* A sufficiently unique string that can be used as a cache key to identify the `input`.
|
||||
*
|
||||
* Only known-equivalent inputs should return the same fingerprint.
|
||||
*
|
||||
* This is not a stable identifier between Nix versions, but not guaranteed to change either.
|
||||
*/
|
||||
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
|
||||
/**
|
||||
* Return `true` if this input is considered "locked", i.e. it has
|
||||
* attributes like a Git revision or NAR hash that uniquely
|
||||
* identify its contents.
|
||||
*/
|
||||
virtual bool isLocked(const Input & input) const
|
||||
{ return false; }
|
||||
|
||||
/**
|
||||
* Check the locking attributes in `final` against
|
||||
* `specified`. E.g. if `specified` has a `rev` attribute, then
|
||||
* `final` must have the same `rev` attribute. Throw an exception
|
||||
* if there is a mismatch.
|
||||
*/
|
||||
virtual void checkLocks(const Input & specified, const Input & final) const;
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
|
|
|
|||
|
|
@ -218,8 +218,12 @@ static void initRepoAtomically(std::filesystem::path &path, bool bare) {
|
|||
try {
|
||||
std::filesystem::rename(tmpDir, path);
|
||||
} catch (std::filesystem::filesystem_error & e) {
|
||||
if (e.code() == std::errc::file_exists) // Someone might race us to create the repository.
|
||||
// Someone may race us to create the repository.
|
||||
if (e.code() == std::errc::file_exists
|
||||
// `path` may be attempted to be deleted by s::f::rename, in which case the code is:
|
||||
|| e.code() == std::errc::directory_not_empty) {
|
||||
return;
|
||||
}
|
||||
else
|
||||
throw SysError("moving temporary git repository from %s to %s", tmpDir, path);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
libraries += libfetchers
|
||||
|
||||
libfetchers_NAME = libnixfetchers
|
||||
|
||||
libfetchers_DIR := $(d)
|
||||
|
||||
libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libfetchers := -I $(d)
|
||||
|
||||
libfetchers_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers)
|
||||
|
||||
libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) $(LIBCURL_LIBS) -larchive
|
||||
|
||||
libfetchers_LIBS = libutil libstore
|
||||
|
|
@ -72,6 +72,7 @@ struct PathInputScheme : InputScheme
|
|||
auto query = attrsToQuery(input.attrs);
|
||||
query.erase("path");
|
||||
query.erase("type");
|
||||
query.erase("__final");
|
||||
return ParsedURL {
|
||||
.scheme = "path",
|
||||
.path = getStrAttr(input.attrs, "path"),
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
check: libflake-tests_RUN
|
||||
|
||||
programs += libflake-tests
|
||||
|
||||
libflake-tests_NAME := libnixflake-tests
|
||||
|
||||
libflake-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libflake-tests.xml
|
||||
|
||||
libflake-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libflake-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libflake-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libflake-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc)
|
||||
|
||||
libflake-tests_EXTRA_INCLUDES = \
|
||||
-I src/libflake-test-support \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libflake) \
|
||||
$(INCLUDE_libexpr) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libutil) \
|
||||
|
||||
libflake-tests_CXXFLAGS += $(libflake-tests_EXTRA_INCLUDES)
|
||||
|
||||
libflake-tests_LIBS = \
|
||||
libexpr-test-support libstore-test-support libutil-test-support \
|
||||
libflake libexpr libfetchers libstore libutil
|
||||
|
||||
libflake-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libflake-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
|
@ -85,7 +85,6 @@ static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos
|
|||
state.forceValue(value, pos);
|
||||
}
|
||||
|
||||
|
||||
static void expectType(EvalState & state, ValueType type,
|
||||
Value & value, const PosIdx pos)
|
||||
{
|
||||
|
|
@ -810,12 +809,14 @@ void callFlake(EvalState & state,
|
|||
auto vCallFlake = state.allocValue();
|
||||
state.evalFile(state.callFlakeInternal, *vCallFlake);
|
||||
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vLocks = state.allocValue();
|
||||
vLocks->mkString(lockFileStr);
|
||||
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
|
||||
state.callFunction(*vTmp1, vOverrides, vRes, noPos);
|
||||
auto vFetchFinalTree = get(state.internalPrimOps, "fetchFinalTree");
|
||||
assert(vFetchFinalTree);
|
||||
|
||||
Value * args[] = {vLocks, &vOverrides, *vFetchFinalTree};
|
||||
state.callFunction(*vCallFlake, 3, args, vRes, noPos);
|
||||
}
|
||||
|
||||
void initLib(const Settings & settings)
|
||||
|
|
|
|||
|
|
@ -234,4 +234,11 @@ void emitTreeAttrs(
|
|||
bool emptyRevFallback = false,
|
||||
bool forceDirty = false);
|
||||
|
||||
/**
|
||||
* An internal builtin similar to `fetchTree`, except that it
|
||||
* always treats the input as final (i.e. no attributes can be
|
||||
* added/removed/changed).
|
||||
*/
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,6 +46,10 @@ LockedNode::LockedNode(
|
|||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lock file contains unlocked input '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
|
||||
// For backward compatibility, lock file entries are implicitly final.
|
||||
assert(!lockedRef.input.attrs.contains("__final"));
|
||||
lockedRef.input.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
}
|
||||
|
||||
StorePath LockedNode::computeStorePath(Store & store) const
|
||||
|
|
@ -53,7 +57,6 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited)
|
||||
{
|
||||
auto pos = root;
|
||||
|
|
@ -191,6 +194,11 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
|||
if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
|
||||
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
|
||||
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
|
||||
/* For backward compatibility, omit the "__final"
|
||||
attribute. We never allow non-final inputs in lock files
|
||||
anyway. */
|
||||
assert(lockedNode->lockedRef.input.isFinal());
|
||||
n["locked"].erase("__final");
|
||||
if (!lockedNode->isFlake)
|
||||
n["flake"] = false;
|
||||
}
|
||||
|
|
@ -239,7 +247,7 @@ std::optional<FlakeRef> LockFile::isUnlocked() const
|
|||
for (auto & i : nodes) {
|
||||
if (i == ref<const Node>(root)) continue;
|
||||
auto node = i.dynamic_pointer_cast<const LockedNode>();
|
||||
if (node && !node->lockedRef.input.isLocked())
|
||||
if (node && (!node->lockedRef.input.isLocked() || !node->lockedRef.input.isFinal()))
|
||||
return node->lockedRef;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -68,8 +68,8 @@ struct LockFile
|
|||
std::pair<std::string, KeyMap> to_string() const;
|
||||
|
||||
/**
|
||||
* Check whether this lock file has any unlocked inputs. If so,
|
||||
* return one.
|
||||
* Check whether this lock file has any unlocked or non-final
|
||||
* inputs. If so, return one.
|
||||
*/
|
||||
std::optional<FlakeRef> isUnlocked() const;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-util nix-store nix-expr
|
||||
Libs: -L${libdir} -lnixflake
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
libraries += libflake
|
||||
|
||||
libflake_NAME = libnixflake
|
||||
|
||||
libflake_DIR := $(d)
|
||||
|
||||
libflake_SOURCES := $(wildcard $(d)/*.cc $(d)/flake/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libflake := -I $(d)
|
||||
|
||||
libflake_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) $(INCLUDE_libflake)
|
||||
|
||||
libflake_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
libflake_LIBS = libutil libstore libfetchers libexpr
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/flake/nix-flake.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libflake/flake/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||
|
|
@ -17,7 +17,9 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
|
|||
.shortName = 'v',
|
||||
.description = "Increase the logging verbosity level.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||
.handler = {[]() {
|
||||
verbosity = (Verbosity) std::min<std::underlying_type_t<Verbosity>>(verbosity + 1, lvlVomit);
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
libraries += libmain
|
||||
|
||||
libmain_NAME = libnixmain
|
||||
|
||||
libmain_DIR := $(d)
|
||||
|
||||
libmain_SOURCES := $(wildcard $(d)/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libmain_SOURCES += $(wildcard $(d)/unix/*.cc)
|
||||
endif
|
||||
|
||||
INCLUDE_libmain := -I $(d)
|
||||
|
||||
libmain_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libmain)
|
||||
|
||||
libmain_LDFLAGS += $(OPENSSL_LIBS)
|
||||
|
||||
libmain_LIBS = libstore libutil
|
||||
|
||||
libmain_ALLOW_UNDEFINED = 1
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixmain
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -543,7 +543,7 @@ public:
|
|||
auto state(state_.lock());
|
||||
if (!state->active) return {};
|
||||
std::cerr << fmt("\r\e[K%s ", msg);
|
||||
auto s = trim(readLine(STDIN_FILENO, true));
|
||||
auto s = trim(readLine(getStandardInput(), true));
|
||||
if (s.size() != 1) return {};
|
||||
draw(*state);
|
||||
return s[0];
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
libraries += libstorec
|
||||
|
||||
libstorec_NAME = libnixstorec
|
||||
|
||||
libstorec_DIR := $(d)
|
||||
|
||||
libstorec_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstorec_LIBS = libutil libstore libutilc
|
||||
|
||||
libstorec_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libstorec := -I $(d)
|
||||
libstorec_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc) \
|
||||
$(INCLUDE_libstore) $(INCLUDE_libstorec)
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-store-c.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
libstorec_FORCE_INSTALL := 1
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Store - C API
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixstorec -lnixutilc
|
||||
Cflags: -I${includedir}/nix
|
||||
|
|
@ -166,7 +166,7 @@ nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_cal
|
|||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] srcStore nix source store reference
|
||||
* @param[in] srcStore nix destination store reference
|
||||
* @param[in] dstStore nix destination store reference
|
||||
* @param[in] path Path to copy
|
||||
*/
|
||||
nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path);
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
libraries += libstore-test-support
|
||||
|
||||
libstore-test-support_NAME = libnixstore-test-support
|
||||
|
||||
libstore-test-support_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-test-support_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libstore-test-support_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
||||
|
||||
libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-test-support_LIBS = \
|
||||
libutil-test-support \
|
||||
libstore libutil
|
||||
|
||||
libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck
|
||||
|
|
@ -7,7 +7,7 @@
|
|||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<DerivedPath::Opaque> Arbitrary<DerivedPath::Opaque>::arbitrary()
|
||||
Gen<SingleDerivedPath::Opaque> Arbitrary<SingleDerivedPath::Opaque>::arbitrary()
|
||||
{
|
||||
return gen::just(DerivedPath::Opaque {
|
||||
.path = *gen::arbitrary<StorePath>(),
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
check: libstore-tests_RUN
|
||||
|
||||
programs += libstore-tests
|
||||
|
||||
libstore-tests_NAME = libnixstore-tests
|
||||
|
||||
libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libstore-tests.xml
|
||||
|
||||
libstore-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libstore-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstore-tests_EXTRA_INCLUDES = \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-tests_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libstore libstorec libutil libutilc
|
||||
|
||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libstore-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
|
@ -38,6 +38,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
storePath,
|
||||
|
|
@ -84,6 +86,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
},
|
||||
}))
|
||||
|
||||
#endif
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
realisation,
|
||||
|
|
|
|||
|
|
@ -39,6 +39,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
storePath,
|
||||
|
|
@ -69,6 +71,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
},
|
||||
}))
|
||||
|
||||
#endif
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
derivedPath_1_29,
|
||||
|
|
|
|||
|
|
@ -66,6 +66,7 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
worker.run(goals);
|
||||
|
||||
std::vector<KeyedBuildResult> results;
|
||||
results.reserve(state.size());
|
||||
|
||||
for (auto & [req, goalPtr] : state)
|
||||
results.emplace_back(KeyedBuildResult {
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ protected:
|
|||
public:
|
||||
|
||||
/**
|
||||
* Suspend our goal and wait until we get @ref work()-ed again.
|
||||
* Suspend our goal and wait until we get `work`-ed again.
|
||||
* `co_await`-able by @ref Co.
|
||||
*/
|
||||
struct Suspend {};
|
||||
|
|
@ -192,7 +192,7 @@ public:
|
|||
|
||||
bool await_ready() { return false; };
|
||||
/**
|
||||
* When we `co_await` another @ref Co-returning coroutine,
|
||||
* When we `co_await` another `Co`-returning coroutine,
|
||||
* we tell the caller of `caller_coroutine.resume()` to switch to our coroutine (@ref handle).
|
||||
* To make sure we return to the original coroutine, we set it as the continuation of our
|
||||
* coroutine. In @ref promise_type::final_awaiter we check if it's set and if so we return to it.
|
||||
|
|
@ -208,7 +208,7 @@ public:
|
|||
};
|
||||
|
||||
/**
|
||||
* Used on initial suspend, does the same as @ref std::suspend_always,
|
||||
* Used on initial suspend, does the same as `std::suspend_always`,
|
||||
* but asserts that everything has been set correctly.
|
||||
*/
|
||||
struct InitialSuspend {
|
||||
|
|
@ -269,8 +269,8 @@ public:
|
|||
};
|
||||
|
||||
/**
|
||||
* Called by compiler generated code to construct the @ref Co
|
||||
* that is returned from a @ref Co-returning coroutine.
|
||||
* Called by compiler generated code to construct the `Co`
|
||||
* that is returned from a `Co`-returning coroutine.
|
||||
*/
|
||||
Co get_return_object();
|
||||
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ public:
|
|||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
|
||||
/**
|
||||
* @ref SubstitutionGoal "substitution goal"
|
||||
* @ref PathSubstitutionGoal "substitution goal"
|
||||
*/
|
||||
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
|
|
|
|||
|
|
@ -97,8 +97,9 @@ struct ContentAddressMethod
|
|||
* were ingested, with the fixed output case not prefixed for back
|
||||
* compat.
|
||||
*
|
||||
* @param [in] m A string that should begin with the prefix.
|
||||
* @param [out] m The remainder of the string after the prefix.
|
||||
* @param m A string that should begin with the
|
||||
* prefix. On return, the remainder of the string after the
|
||||
* prefix.
|
||||
*/
|
||||
static ContentAddressMethod parsePrefix(std::string_view & m);
|
||||
|
||||
|
|
@ -139,14 +140,14 @@ struct ContentAddressMethod
|
|||
/**
|
||||
* We've accumulated several types of content-addressed paths over the
|
||||
* years; fixed-output derivations support multiple hash algorithms and
|
||||
* serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
|
||||
* serialisation methods (flat file vs NAR). Thus, `ca` has one of the
|
||||
* following forms:
|
||||
*
|
||||
* - `TextIngestionMethod`:
|
||||
* ‘text:sha256:<sha256 hash of file contents>’
|
||||
* `text:sha256:<sha256 hash of file contents>`
|
||||
*
|
||||
* - `FixedIngestionMethod`:
|
||||
* ‘fixed:<r?>:<hash algorithm>:<hash of file contents>’
|
||||
* `fixed:<r?>:<hash algorithm>:<hash of file contents>`
|
||||
*/
|
||||
struct ContentAddress
|
||||
{
|
||||
|
|
|
|||
|
|
@ -298,6 +298,10 @@ struct BasicDerivation
|
|||
std::string name;
|
||||
|
||||
BasicDerivation() = default;
|
||||
BasicDerivation(BasicDerivation &&) = default;
|
||||
BasicDerivation(const BasicDerivation &) = default;
|
||||
BasicDerivation& operator=(BasicDerivation &&) = default;
|
||||
BasicDerivation& operator=(const BasicDerivation &) = default;
|
||||
virtual ~BasicDerivation() { };
|
||||
|
||||
bool isBuiltin() const;
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
template<class T>
|
||||
void fail(T && e)
|
||||
{
|
||||
failEx(std::make_exception_ptr(std::move(e)));
|
||||
failEx(std::make_exception_ptr(std::forward<T>(e)));
|
||||
}
|
||||
|
||||
LambdaSink finalSink;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Reusable serialisers for serialization container types in a
|
||||
* @file
|
||||
*
|
||||
* Reusable serialisers for serialization container types in a
|
||||
* length-prefixed manner.
|
||||
*
|
||||
* Used by both the Worker and Serve protocols.
|
||||
|
|
@ -28,25 +30,22 @@ struct StoreDirConfig;
|
|||
template<class Inner, typename T>
|
||||
struct LengthPrefixedProtoHelper;
|
||||
|
||||
/*!
|
||||
* \typedef LengthPrefixedProtoHelper::S
|
||||
*
|
||||
* Read this as simply `using S = Inner::Serialise;`.
|
||||
*
|
||||
* It would be nice to use that directly, but C++ doesn't seem to allow
|
||||
* it. The `typename` keyword needed to refer to `Inner` seems to greedy
|
||||
* (low precedence), and then C++ complains that `Serialise` is not a
|
||||
* type parameter but a real type.
|
||||
*
|
||||
* Making this `S` alias seems to be the only way to avoid these issues.
|
||||
*/
|
||||
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \
|
||||
struct LengthPrefixedProtoHelper< Inner, T > \
|
||||
{ \
|
||||
static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \
|
||||
static void write(const StoreDirConfig & store, typename Inner::WriteConn conn, const T & str); \
|
||||
private: \
|
||||
/*! \
|
||||
* Read this as simply `using S = Inner::Serialise;`. \
|
||||
* \
|
||||
* It would be nice to use that directly, but C++ doesn't seem to allow \
|
||||
* it. The `typename` keyword needed to refer to `Inner` seems to greedy \
|
||||
* (low precedence), and then C++ complains that `Serialise` is not a \
|
||||
* type parameter but a real type. \
|
||||
* \
|
||||
* Making this `S` alias seems to be the only way to avoid these issues. \
|
||||
*/ \
|
||||
template<typename U> using S = typename Inner::template Serialise<U>; \
|
||||
}
|
||||
|
||||
|
|
@ -60,9 +59,8 @@ template<class Inner, typename... Ts>
|
|||
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple<Ts...>);
|
||||
|
||||
template<class Inner, typename K, typename V>
|
||||
#define _X std::map<K, V>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, _X);
|
||||
#undef _X
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER_X std::map<K, V>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, LENGTH_PREFIXED_PROTO_HELPER_X);
|
||||
|
||||
template<class Inner, typename T>
|
||||
std::vector<T>
|
||||
|
|
|
|||
|
|
@ -1,103 +0,0 @@
|
|||
libraries += libstore
|
||||
|
||||
libstore_NAME = libnixstore
|
||||
|
||||
libstore_DIR := $(d)
|
||||
|
||||
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libstore_SOURCES += $(wildcard $(d)/unix/*.cc $(d)/unix/build/*.cc)
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
libstore_SOURCES += $(wildcard $(d)/linux/*.cc)
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libstore_SOURCES += $(wildcard $(d)/windows/*.cc)
|
||||
endif
|
||||
|
||||
libstore_LIBS = libutil
|
||||
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libstore_LDFLAGS += -lws2_32
|
||||
endif
|
||||
|
||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||
|
||||
ifeq ($(ENABLE_S3), 1)
|
||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
||||
endif
|
||||
|
||||
ifdef HOST_SOLARIS
|
||||
libstore_LDFLAGS += -lsocket
|
||||
endif
|
||||
|
||||
ifeq ($(HAVE_SECCOMP), 1)
|
||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
||||
endif
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libstore := -I $(d) -I $(d)/build
|
||||
ifdef HOST_UNIX
|
||||
INCLUDE_libstore += -I $(d)/unix -I $(d)/unix/build
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
INCLUDE_libstore += -I $(d)/linux
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
INCLUDE_libstore += -I $(d)/windows
|
||||
endif
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
NIX_ROOT = N:\\\\
|
||||
else
|
||||
NIX_ROOT =
|
||||
endif
|
||||
|
||||
# Prefix all but `NIX_STORE_DIR`, since we aren't doing a local store
|
||||
# yet so a "logical" store dir that is the same as unix is preferred.
|
||||
#
|
||||
# Also, it keeps the unit tests working.
|
||||
|
||||
libstore_CXXFLAGS += \
|
||||
$(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libstore) \
|
||||
-DNIX_PREFIX=\"$(NIX_ROOT)$(prefix)\" \
|
||||
-DNIX_STORE_DIR=\"$(storedir)\" \
|
||||
-DNIX_DATA_DIR=\"$(NIX_ROOT)$(datadir)\" \
|
||||
-DNIX_STATE_DIR=\"$(NIX_ROOT)$(localstatedir)/nix\" \
|
||||
-DNIX_LOG_DIR=\"$(NIX_ROOT)$(localstatedir)/log/nix\" \
|
||||
-DNIX_CONF_DIR=\"$(NIX_ROOT)$(sysconfdir)/nix\" \
|
||||
-DNIX_MAN_DIR=\"$(NIX_ROOT)$(mandir)\" \
|
||||
-DLSOF=\"$(NIX_ROOT)$(lsof)\"
|
||||
|
||||
ifeq ($(embedded_sandbox_shell),yes)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
|
||||
|
||||
$(d)/unix/build/local-derivation-goal.cc: $(d)/unix/embedded-sandbox-shell.gen.hh
|
||||
|
||||
$(d)/unix/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
||||
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
else
|
||||
ifneq ($(sandbox_shell),)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||
endif
|
||||
endif
|
||||
|
||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(d)/unix/build.cc:
|
||||
|
||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/build/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/build, 0644)))
|
||||
|
|
@ -33,7 +33,7 @@ Machine::Machine(
|
|||
systemTypes(systemTypes),
|
||||
sshKey(sshKey),
|
||||
maxJobs(maxJobs),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : speedFactor),
|
||||
supportedFeatures(supportedFeatures),
|
||||
mandatoryFeatures(mandatoryFeatures),
|
||||
sshPublicHostKey(sshPublicHostKey)
|
||||
|
|
|
|||
|
|
@ -32,12 +32,12 @@ struct Machine {
|
|||
|
||||
/**
|
||||
* @return Whether `features` is a subset of the union of `supportedFeatures` and
|
||||
* `mandatoryFeatures`
|
||||
* `mandatoryFeatures`.
|
||||
*/
|
||||
bool allSupported(const std::set<std::string> & features) const;
|
||||
|
||||
/**
|
||||
* @return @Whether `mandatoryFeatures` is a subset of `features`
|
||||
* @return Whether `mandatoryFeatures` is a subset of `features`.
|
||||
*/
|
||||
bool mandatoryMet(const std::set<std::string> & features) const;
|
||||
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ if host_machine.system() == 'windows'
|
|||
deps_other += [wsock32]
|
||||
endif
|
||||
|
||||
subdir('build-utils-meson/libatomic')
|
||||
subdir('build-utils-meson/threads')
|
||||
|
||||
boost = dependency(
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-util
|
||||
Libs: -L${libdir} -lnixstore
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -153,7 +153,10 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json)
|
||||
{
|
||||
auto names = json.get<StringSet>();
|
||||
if (names == StringSet({"*"}))
|
||||
return OutputsSpec::All {};
|
||||
|
|
@ -161,7 +164,8 @@ OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
|||
return OutputsSpec::Names { std::move(names) };
|
||||
}
|
||||
|
||||
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
||||
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t)
|
||||
{
|
||||
std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
json = std::vector<std::string>({"*"});
|
||||
|
|
@ -172,8 +176,8 @@ void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
|||
}, t.raw);
|
||||
}
|
||||
|
||||
|
||||
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json) {
|
||||
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json)
|
||||
{
|
||||
if (json.is_null())
|
||||
return ExtendedOutputsSpec::Default {};
|
||||
else {
|
||||
|
|
@ -181,7 +185,8 @@ ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json &
|
|||
}
|
||||
}
|
||||
|
||||
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t) {
|
||||
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t)
|
||||
{
|
||||
std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default &) {
|
||||
json = nullptr;
|
||||
|
|
@ -192,4 +197,6 @@ void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSp
|
|||
}, t.raw);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -176,17 +176,18 @@ struct ValidPathInfo : UnkeyedValidPathInfo {
|
|||
*/
|
||||
Strings shortRefs() const;
|
||||
|
||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||
|
||||
ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { };
|
||||
ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(path) { };
|
||||
|
||||
ValidPathInfo(const Store & store,
|
||||
std::string_view name, ContentAddressWithReferences && ca, Hash narHash);
|
||||
|
||||
virtual ~ValidPathInfo() { }
|
||||
};
|
||||
|
||||
static_assert(std::is_move_assignable_v<ValidPathInfo>);
|
||||
static_assert(std::is_copy_assignable_v<ValidPathInfo>);
|
||||
static_assert(std::is_copy_constructible_v<ValidPathInfo>);
|
||||
static_assert(std::is_move_constructible_v<ValidPathInfo>);
|
||||
|
||||
using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ DerivedPath StorePathWithOutputs::toDerivedPath() const
|
|||
std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs> ss)
|
||||
{
|
||||
std::vector<DerivedPath> reqs;
|
||||
reqs.reserve(ss.size());
|
||||
for (auto & s : ss) reqs.push_back(s.toDerivedPath());
|
||||
return reqs;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ typedef std::set<StorePath> StorePathSet;
|
|||
typedef std::vector<StorePath> StorePaths;
|
||||
|
||||
/**
|
||||
* The file extension of \ref Derivation derivations when serialized
|
||||
* The file extension of \ref nix::Derivation derivations when serialized
|
||||
* into store objects.
|
||||
*/
|
||||
constexpr std::string_view drvExtension = ".drv";
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Implementation of Profiles.
|
||||
* @file
|
||||
*
|
||||
* Implementation of Profiles.
|
||||
*
|
||||
* See the manual for additional information.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ struct RemoteStore::ConnectionHandle
|
|||
: handle(std::move(handle))
|
||||
{ }
|
||||
|
||||
ConnectionHandle(ConnectionHandle && h)
|
||||
ConnectionHandle(ConnectionHandle && h) noexcept
|
||||
: handle(std::move(h.handle))
|
||||
{ }
|
||||
|
||||
|
|
|
|||
|
|
@ -29,11 +29,10 @@ SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
|||
SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::set<T>)
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||
|
||||
#define COMMA_ ,
|
||||
#define SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K COMMA_ typename V>,
|
||||
std::map<K COMMA_ V>)
|
||||
#undef COMMA_
|
||||
template<typename K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
|||
|
|
@ -42,7 +42,8 @@ struct SQLite
|
|||
SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal);
|
||||
SQLite(const SQLite & from) = delete;
|
||||
SQLite& operator = (const SQLite & from) = delete;
|
||||
SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
|
||||
// NOTE: This is noexcept since we are only copying and assigning raw pointers.
|
||||
SQLite& operator = (SQLite && from) noexcept { db = from.db; from.db = 0; return *this; }
|
||||
~SQLite();
|
||||
operator sqlite3 * () { return db; }
|
||||
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ public:
|
|||
/**
|
||||
* @param command The command (arg vector) to execute.
|
||||
*
|
||||
* @param extraSShArgs Extra args to pass to SSH (not the command to
|
||||
* @param extraSshArgs Extra arguments to pass to SSH (not the command to
|
||||
* execute). Will not be used when "fake SSHing" to the local
|
||||
* machine.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -260,11 +260,11 @@ public:
|
|||
|
||||
/**
|
||||
* Query the set of all valid paths. Note that for some store
|
||||
* backends, the name part of store paths may be replaced by 'x'
|
||||
* (i.e. you'll get /nix/store/<hash>-x rather than
|
||||
* /nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
|
||||
* backends, the name part of store paths may be replaced by `x`
|
||||
* (i.e. you'll get `/nix/store/<hash>-x` rather than
|
||||
* `/nix/store/<hash>-<name>`). Use queryPathInfo() to obtain the
|
||||
* full store path. FIXME: should return a set of
|
||||
* std::variant<StorePath, HashPart> to get rid of this hack.
|
||||
* `std::variant<StorePath, HashPart>` to get rid of this hack.
|
||||
*/
|
||||
virtual StorePathSet queryAllValidPaths()
|
||||
{ unsupported("queryAllValidPaths"); }
|
||||
|
|
|
|||
|
|
@ -59,20 +59,20 @@ struct StoreDirConfig : public Config
|
|||
std::string showPaths(const StorePathSet & paths);
|
||||
|
||||
/**
|
||||
* @return true if ‘path’ is in the Nix store (but not the Nix
|
||||
* @return true if *path* is in the Nix store (but not the Nix
|
||||
* store itself).
|
||||
*/
|
||||
bool isInStore(PathView path) const;
|
||||
|
||||
/**
|
||||
* @return true if ‘path’ is a store path, i.e. a direct child of the
|
||||
* @return true if *path* is a store path, i.e. a direct child of the
|
||||
* Nix store.
|
||||
*/
|
||||
bool isStorePath(std::string_view path) const;
|
||||
|
||||
/**
|
||||
* Split a path like /nix/store/<hash>-<name>/<bla> into
|
||||
* /nix/store/<hash>-<name> and /<bla>.
|
||||
* Split a path like `/nix/store/<hash>-<name>/<bla>` into
|
||||
* `/nix/store/<hash>-<name>` and `/<bla>`.
|
||||
*/
|
||||
std::pair<StorePath, Path> toStorePath(PathView path) const;
|
||||
|
||||
|
|
|
|||
|
|
@ -13,31 +13,31 @@ namespace nix {
|
|||
*
|
||||
* Supported values are:
|
||||
*
|
||||
* - ‘local’: The Nix store in /nix/store and database in
|
||||
* - `local`: The Nix store in /nix/store and database in
|
||||
* /nix/var/nix/db, accessed directly.
|
||||
*
|
||||
* - ‘daemon’: The Nix store accessed via a Unix domain socket
|
||||
* - `daemon`: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon.
|
||||
*
|
||||
* - ‘unix://<path>’: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon, with the socket located at <path>.
|
||||
* - `unix://<path>`: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon, with the socket located at `<path>`.
|
||||
*
|
||||
* - ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on
|
||||
* - `auto` or ``: Equivalent to `local` or `daemon` depending on
|
||||
* whether the user has write access to the local Nix
|
||||
* store/database.
|
||||
*
|
||||
* - ‘file://<path>’: A binary cache stored in <path>.
|
||||
* - `file://<path>`: A binary cache stored in `<path>`.
|
||||
*
|
||||
* - ‘https://<path>’: A binary cache accessed via HTTP.
|
||||
* - `https://<path>`: A binary cache accessed via HTTP.
|
||||
*
|
||||
* - ‘s3://<path>’: A writable binary cache stored on Amazon's Simple
|
||||
* - `s3://<path>`: A writable binary cache stored on Amazon's Simple
|
||||
* Storage Service.
|
||||
*
|
||||
* - ‘ssh://[user@]<host>’: A remote Nix store accessed by running
|
||||
* ‘nix-store --serve’ via SSH.
|
||||
* - `ssh://[user@]<host>`: A remote Nix store accessed by running
|
||||
* `nix-store --serve` via SSH.
|
||||
*
|
||||
* You can pass parameters to the store type by appending
|
||||
* ‘?key=value&key=value&...’ to the URI.
|
||||
* `?key=value&key=value&...` to the URI.
|
||||
*/
|
||||
struct StoreReference
|
||||
{
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the minimum version supported by both sides and the set
|
||||
* @return The minimum version supported by both sides and the set
|
||||
* of protocol features supported by both sides.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
|
|
@ -87,9 +87,9 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
* @param localVersion Our version which is sent over.
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
* @param supportedFeatures The protocol features that we support.
|
||||
*/
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
|
|
@ -141,7 +141,7 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection
|
|||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the version provided by the other side of the
|
||||
* @return The version provided by the other side of the
|
||||
* connection.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
|
|
@ -150,9 +150,9 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection
|
|||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
* @param localVersion Our version which is sent over.
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
* @param supportedFeatures The protocol features that we support.
|
||||
*/
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
|
|
|
|||
|
|
@ -29,11 +29,10 @@ WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
|||
WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::set<T>)
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||
|
||||
#define COMMA_ ,
|
||||
#define WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K COMMA_ typename V>,
|
||||
std::map<K COMMA_ V>)
|
||||
#undef COMMA_
|
||||
template<typename K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
libraries += libutilc
|
||||
|
||||
libutilc_NAME = libnixutilc
|
||||
|
||||
libutilc_DIR := $(d)
|
||||
|
||||
libutilc_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libutilc := -I $(d)
|
||||
libutilc_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc)
|
||||
|
||||
libutilc_LIBS = libutil
|
||||
|
||||
libutilc_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
libutilc_FORCE_INSTALL := 1
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix libutil C API
|
||||
Description: Common functions for the Nix C API, such as error handling
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixutil
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
|
|
@ -162,7 +162,7 @@ void nix_c_context_free(nix_c_context * context);
|
|||
*/
|
||||
nix_err nix_libutil_init(nix_c_context * context);
|
||||
|
||||
/** @defgroup settings
|
||||
/** @defgroup settings Nix configuration settings
|
||||
* @{
|
||||
*/
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
libraries += libutil-test-support
|
||||
|
||||
libutil-test-support_NAME = libnixutil-test-support
|
||||
|
||||
libutil-test-support_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libutil-test-support_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libutil-test-support_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libutil-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
||||
|
||||
libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
|
||||
|
||||
libutil-test-support_LIBS = libutil
|
||||
|
||||
libutil-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck
|
||||
|
|
@ -6,7 +6,9 @@
|
|||
// The lion's share of this code is copy pasted directly out of RapidCheck
|
||||
// headers, so the copyright is set accordingly.
|
||||
/**
|
||||
* @file Implements the ability to run a RapidCheck test under gtest with changed
|
||||
* @file
|
||||
*
|
||||
* Implements the ability to run a RapidCheck test under gtest with changed
|
||||
* test parameters such as the number of tests to run. This is useful for
|
||||
* running very large numbers of the extremely cheap property tests.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
check: libutil-tests_RUN
|
||||
|
||||
programs += libutil-tests
|
||||
|
||||
libutil-tests_NAME = libnixutil-tests
|
||||
|
||||
libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libutil-tests.xml
|
||||
|
||||
libutil-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libutil-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libutil-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil-tests_EXTRA_INCLUDES = \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
|
||||
|
||||
libutil-tests_LIBS = libutil-test-support libutil libutilc
|
||||
|
||||
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libutil-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
||||
check: $(d)/data/git/check-data.sh.test
|
||||
|
||||
$(eval $(call run-test,$(d)/data/git/check-data.sh))
|
||||
|
|
@ -57,4 +57,9 @@ TEST(filterANSIEscapes, utf8)
|
|||
ASSERT_EQ(filterANSIEscapes("f𐍈𐍈bär", true, 4), "f𐍈𐍈b");
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, osc8)
|
||||
{
|
||||
ASSERT_EQ(filterANSIEscapes("\e]8;;http://example.com\e\\This is a link\e]8;;\e\\"), "This is a link");
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
|||
|
|
@ -91,9 +91,6 @@ struct Parser {
|
|||
|
||||
/**
|
||||
* @brief Parse the next character(s)
|
||||
*
|
||||
* @param r
|
||||
* @return std::shared_ptr<Parser>
|
||||
*/
|
||||
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) = 0;
|
||||
|
||||
|
|
|
|||
|
|
@ -371,7 +371,7 @@ using Commands = std::map<std::string, std::function<ref<Command>()>>;
|
|||
|
||||
/**
|
||||
* An argument parser that supports multiple subcommands,
|
||||
* i.e. ‘<command> <subcommand>’.
|
||||
* i.e. `<command> <subcommand>`.
|
||||
*/
|
||||
class MultiCommand : virtual public Args
|
||||
{
|
||||
|
|
|
|||
|
|
@ -21,7 +21,9 @@ public:
|
|||
|
||||
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
|
||||
|
||||
Callback(Callback && callback) : fun(std::move(callback.fun))
|
||||
// NOTE: std::function is noexcept move-constructible since C++20.
|
||||
Callback(Callback && callback) noexcept(std::is_nothrow_move_constructible_v<decltype(fun)>)
|
||||
: fun(std::move(callback.fun))
|
||||
{
|
||||
auto prev = callback.done.test_and_set();
|
||||
if (prev) done.test_and_set();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Checked arithmetic with classes that make it hard to accidentally make something an unchecked operation.
|
||||
* @file
|
||||
*
|
||||
* Checked arithmetic with classes that make it hard to accidentally make something an unchecked operation.
|
||||
*/
|
||||
|
||||
#include <compare>
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ ExecutablePath ExecutablePath::parse(const OsString & path)
|
|||
std::make_move_iterator(strings.begin()),
|
||||
std::make_move_iterator(strings.end()),
|
||||
std::back_inserter(ret),
|
||||
[](auto && str) {
|
||||
[](OsString && str) {
|
||||
return fs::path{
|
||||
str.empty()
|
||||
// "A zero-length prefix is a legacy feature that
|
||||
|
|
@ -56,6 +56,7 @@ ExecutablePath ExecutablePath::parse(const OsString & path)
|
|||
OsString ExecutablePath::render() const
|
||||
{
|
||||
std::vector<PathViewNG> path2;
|
||||
path2.reserve(directories.size());
|
||||
for (auto & p : directories)
|
||||
path2.push_back(p.native());
|
||||
return basicConcatStringsSep(path_var_separator, path2);
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ void dumpPath(
|
|||
/**
|
||||
* Restore a serialisation of the given file system object.
|
||||
*
|
||||
* @TODO use an arbitrary `FileSystemObjectSink`.
|
||||
* \todo use an arbitrary `FileSystemObjectSink`.
|
||||
*/
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
|
|
|
|||
|
|
@ -45,8 +45,9 @@ AutoCloseFD::AutoCloseFD() : fd{INVALID_DESCRIPTOR} {}
|
|||
|
||||
AutoCloseFD::AutoCloseFD(Descriptor fd) : fd{fd} {}
|
||||
|
||||
|
||||
AutoCloseFD::AutoCloseFD(AutoCloseFD && that) : fd{that.fd}
|
||||
// NOTE: This can be noexcept since we are just copying a value and resetting
|
||||
// the file descriptor in the rhs.
|
||||
AutoCloseFD::AutoCloseFD(AutoCloseFD && that) noexcept : fd{that.fd}
|
||||
{
|
||||
that.fd = INVALID_DESCRIPTOR;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -106,8 +106,25 @@ void drainFD(
|
|||
#endif
|
||||
);
|
||||
|
||||
/**
|
||||
* Get [Standard Input](https://en.wikipedia.org/wiki/Standard_streams#Standard_input_(stdin))
|
||||
*/
|
||||
[[gnu::always_inline]]
|
||||
inline Descriptor getStandardOut() {
|
||||
inline Descriptor getStandardInput()
|
||||
{
|
||||
#ifndef _WIN32
|
||||
return STDIN_FILENO;
|
||||
#else
|
||||
return GetStdHandle(STD_INPUT_HANDLE);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Get [Standard Output](https://en.wikipedia.org/wiki/Standard_streams#Standard_output_(stdout))
|
||||
*/
|
||||
[[gnu::always_inline]]
|
||||
inline Descriptor getStandardOutput()
|
||||
{
|
||||
#ifndef _WIN32
|
||||
return STDOUT_FILENO;
|
||||
#else
|
||||
|
|
@ -115,6 +132,19 @@ inline Descriptor getStandardOut() {
|
|||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Get [Standard Error](https://en.wikipedia.org/wiki/Standard_streams#Standard_error_(stderr))
|
||||
*/
|
||||
[[gnu::always_inline]]
|
||||
inline Descriptor getStandardError()
|
||||
{
|
||||
#ifndef _WIN32
|
||||
return STDERR_FILENO;
|
||||
#else
|
||||
return GetStdHandle(STD_ERROR_HANDLE);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Automatic cleanup of resources.
|
||||
*/
|
||||
|
|
@ -125,7 +155,7 @@ public:
|
|||
AutoCloseFD();
|
||||
AutoCloseFD(Descriptor fd);
|
||||
AutoCloseFD(const AutoCloseFD & fd) = delete;
|
||||
AutoCloseFD(AutoCloseFD&& fd);
|
||||
AutoCloseFD(AutoCloseFD&& fd) noexcept;
|
||||
~AutoCloseFD();
|
||||
AutoCloseFD& operator =(const AutoCloseFD & fd) = delete;
|
||||
AutoCloseFD& operator =(AutoCloseFD&& fd);
|
||||
|
|
|
|||
|
|
@ -501,7 +501,7 @@ void deletePath(const fs::path & path, uint64_t & bytesFreed)
|
|||
|
||||
AutoDelete::AutoDelete() : del{false} {}
|
||||
|
||||
AutoDelete::AutoDelete(const fs::path & p, bool recursive) : _path(p)
|
||||
AutoDelete::AutoDelete(const std::filesystem::path & p, bool recursive) : _path(p)
|
||||
{
|
||||
del = true;
|
||||
this->recursive = recursive;
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ void createDir(const Path & path, mode_t mode = 0755);
|
|||
* Set the access and modification times of the given path, not
|
||||
* following symlinks.
|
||||
*
|
||||
* @param accessTime Specified in seconds.
|
||||
* @param accessedTime Specified in seconds.
|
||||
*
|
||||
* @param modificationTime Specified in seconds.
|
||||
*
|
||||
|
|
|
|||
|
|
@ -20,7 +20,11 @@ public:
|
|||
// Copying Finallys is definitely not a good idea and will cause them to be
|
||||
// called twice.
|
||||
Finally(Finally &other) = delete;
|
||||
Finally(Finally &&other) : fun(std::move(other.fun)) {
|
||||
// NOTE: Move constructor can be nothrow if the callable type is itself nothrow
|
||||
// move-constructible.
|
||||
Finally(Finally && other) noexcept(std::is_nothrow_move_constructible_v<Fn>)
|
||||
: fun(std::move(other.fun))
|
||||
{
|
||||
other.movedFrom = true;
|
||||
}
|
||||
~Finally() noexcept(false)
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ void parseTree(
|
|||
/**
|
||||
* Helper putting the previous three `parse*` functions together.
|
||||
*
|
||||
* @rootModeIfBlob How to interpret a root blob, for which there is no
|
||||
* @param rootModeIfBlob How to interpret a root blob, for which there is no
|
||||
* disambiguating dir entry to answer that questino. If the root it not
|
||||
* a blob, this is ignored.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -91,12 +91,14 @@ namespace nlohmann {
|
|||
* round trip. We do that with a static assert.
|
||||
*/
|
||||
template<typename T>
|
||||
struct adl_serializer<std::optional<T>> {
|
||||
struct adl_serializer<std::optional<T>>
|
||||
{
|
||||
/**
|
||||
* @brief Convert a JSON type to an `optional<T>` treating
|
||||
* `null` as `std::nullopt`.
|
||||
*/
|
||||
static void from_json(const json & json, std::optional<T> & t) {
|
||||
static void from_json(const json & json, std::optional<T> & t)
|
||||
{
|
||||
static_assert(
|
||||
nix::json_avoids_null<T>::value,
|
||||
"null is already in use for underlying type's JSON");
|
||||
|
|
@ -109,7 +111,8 @@ struct adl_serializer<std::optional<T>> {
|
|||
* @brief Convert an optional type to a JSON type treating `std::nullopt`
|
||||
* as `null`.
|
||||
*/
|
||||
static void to_json(json & json, const std::optional<T> & t) {
|
||||
static void to_json(json & json, const std::optional<T> & t)
|
||||
{
|
||||
static_assert(
|
||||
nix::json_avoids_null<T>::value,
|
||||
"null is already in use for underlying type's JSON");
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
libraries += libutil
|
||||
|
||||
libutil_NAME = libnixutil
|
||||
|
||||
libutil_DIR := $(d)
|
||||
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libutil_SOURCES += $(wildcard $(d)/unix/*.cc)
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
libutil_SOURCES += $(wildcard $(d)/linux/*.cc)
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libutil_SOURCES += $(wildcard $(d)/windows/*.cc)
|
||||
endif
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libutil := -I $(d)
|
||||
ifdef HOST_UNIX
|
||||
INCLUDE_libutil += -I $(d)/unix
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
INCLUDE_libutil += -I $(d)/linux
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
INCLUDE_libutil += -I $(d)/windows
|
||||
endif
|
||||
libutil_CXXFLAGS += $(INCLUDE_libutil)
|
||||
|
||||
libutil_LDFLAGS += $(THREAD_LDFLAGS) $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
$(foreach i, $(wildcard $(d)/args/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644)))
|
||||
$(foreach i, $(wildcard $(d)/signature/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/signature, 0644)))
|
||||
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
endif
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-util.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
|
@ -38,7 +38,7 @@ void Logger::warn(const std::string & msg)
|
|||
|
||||
void Logger::writeToStdout(std::string_view s)
|
||||
{
|
||||
Descriptor standard_out = getStandardOut();
|
||||
Descriptor standard_out = getStandardOutput();
|
||||
writeFull(standard_out, s);
|
||||
writeFull(standard_out, "\n");
|
||||
}
|
||||
|
|
@ -118,11 +118,7 @@ void writeToStderr(std::string_view s)
|
|||
{
|
||||
try {
|
||||
writeFull(
|
||||
#ifdef _WIN32
|
||||
GetStdHandle(STD_ERROR_HANDLE),
|
||||
#else
|
||||
STDERR_FILENO,
|
||||
#endif
|
||||
getStandardError(),
|
||||
s, false);
|
||||
} catch (SystemError & e) {
|
||||
/* Ignore failing writes to stderr. We need to ignore write
|
||||
|
|
|
|||
|
|
@ -53,16 +53,9 @@ endforeach
|
|||
|
||||
configdata.set('HAVE_DECL_AT_SYMLINK_NOFOLLOW', cxx.has_header_symbol('fcntl.h', 'AT_SYMLINK_NOFOLLOW').to_int())
|
||||
|
||||
subdir('build-utils-meson/libatomic')
|
||||
subdir('build-utils-meson/threads')
|
||||
|
||||
# Check if -latomic is needed
|
||||
# This is needed for std::atomic on some platforms
|
||||
# We did not manage to test this reliably on all platforms, so we hardcode
|
||||
# it for now.
|
||||
if host_machine.cpu_family() == 'arm'
|
||||
deps_other += cxx.find_library('atomic')
|
||||
endif
|
||||
|
||||
if host_machine.system() == 'windows'
|
||||
socket = cxx.find_library('ws2_32')
|
||||
deps_other += socket
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue